speccrew 0.7.6 → 0.7.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.speccrew/skills/speccrew-knowledge-bizs-dispatch/scripts/batch-orchestrator.js +294 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-dispatch/workflow.agentflow.xml +4 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-identify-entries/workflow.agentflow.xml +6 -2
- package/package.json +1 -1
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* batch-orchestrator.js - Batch management for Stage 2 feature processing
|
|
4
|
+
*
|
|
5
|
+
* Subcommands:
|
|
6
|
+
* get-batch - Get next batch of pending features
|
|
7
|
+
* process-results - Process completed batch results and merge graph data
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* node batch-orchestrator.js get-batch --syncStatePath <path> --batchSize <number>
|
|
11
|
+
* node batch-orchestrator.js process-results --syncStatePath <path> --graphRoot <path> --completedDir <path>
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
const fs = require('fs');
|
|
15
|
+
const path = require('path');
|
|
16
|
+
|
|
17
|
+
// Parse command line arguments
|
|
18
|
+
function parseArgs() {
|
|
19
|
+
const args = {
|
|
20
|
+
command: null,
|
|
21
|
+
syncStatePath: null,
|
|
22
|
+
batchSize: 5,
|
|
23
|
+
graphRoot: null,
|
|
24
|
+
completedDir: null
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const argv = process.argv.slice(2);
|
|
28
|
+
args.command = argv[0];
|
|
29
|
+
|
|
30
|
+
for (let i = 1; i < argv.length; i++) {
|
|
31
|
+
if (argv[i] === '--syncStatePath' && i + 1 < argv.length) {
|
|
32
|
+
args.syncStatePath = argv[++i];
|
|
33
|
+
} else if (argv[i] === '--batchSize' && i + 1 < argv.length) {
|
|
34
|
+
args.batchSize = parseInt(argv[++i], 10) || 5;
|
|
35
|
+
} else if (argv[i] === '--graphRoot' && i + 1 < argv.length) {
|
|
36
|
+
args.graphRoot = argv[++i];
|
|
37
|
+
} else if (argv[i] === '--completedDir' && i + 1 < argv.length) {
|
|
38
|
+
args.completedDir = argv[++i];
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return args;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Ensure directory exists
|
|
46
|
+
function ensureDir(dirPath) {
|
|
47
|
+
if (!fs.existsSync(dirPath)) {
|
|
48
|
+
fs.mkdirSync(dirPath, { recursive: true });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Read JSON file safely
|
|
53
|
+
function readJsonSafe(filePath) {
|
|
54
|
+
try {
|
|
55
|
+
if (fs.existsSync(filePath)) {
|
|
56
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
57
|
+
return JSON.parse(content);
|
|
58
|
+
}
|
|
59
|
+
} catch (e) {
|
|
60
|
+
// Skip invalid JSON
|
|
61
|
+
}
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Get all features from syncStatePath
|
|
66
|
+
function getAllFeatures(syncStatePath) {
|
|
67
|
+
const features = [];
|
|
68
|
+
|
|
69
|
+
if (!fs.existsSync(syncStatePath)) {
|
|
70
|
+
return features;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const files = fs.readdirSync(syncStatePath);
|
|
74
|
+
const featureFiles = files.filter(f => f.startsWith('features-') && f.endsWith('.json'));
|
|
75
|
+
|
|
76
|
+
for (const file of featureFiles) {
|
|
77
|
+
const filePath = path.join(syncStatePath, file);
|
|
78
|
+
const data = readJsonSafe(filePath);
|
|
79
|
+
|
|
80
|
+
if (data && Array.isArray(data.features)) {
|
|
81
|
+
for (const feature of data.features) {
|
|
82
|
+
// Generate feature ID: {platformId}-{module}-{fileName} (without extension)
|
|
83
|
+
const fileName = feature.fileName || feature.sourceFile || 'unknown';
|
|
84
|
+
const fileNameWithoutExt = fileName.replace(/\.[^.]+$/, '');
|
|
85
|
+
const featureId = `${feature.platformId}-${feature.module}-${fileNameWithoutExt}`;
|
|
86
|
+
|
|
87
|
+
features.push({
|
|
88
|
+
id: featureId,
|
|
89
|
+
...feature
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return features;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Get completed feature IDs from completed directory
|
|
99
|
+
function getCompletedFeatureIds(completedDir) {
|
|
100
|
+
const completedIds = new Set();
|
|
101
|
+
|
|
102
|
+
if (!fs.existsSync(completedDir)) {
|
|
103
|
+
return completedIds;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const files = fs.readdirSync(completedDir);
|
|
107
|
+
const doneFiles = files.filter(f => f.endsWith('.done.json'));
|
|
108
|
+
|
|
109
|
+
for (const file of doneFiles) {
|
|
110
|
+
// Extract feature ID from filename: {featureId}.done.json
|
|
111
|
+
const featureId = file.replace('.done.json', '');
|
|
112
|
+
completedIds.add(featureId);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return completedIds;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// get-batch subcommand
|
|
119
|
+
function getBatch(args) {
|
|
120
|
+
const { syncStatePath, batchSize } = args;
|
|
121
|
+
|
|
122
|
+
ensureDir(syncStatePath);
|
|
123
|
+
|
|
124
|
+
// Determine completed directory (sibling to syncStatePath)
|
|
125
|
+
const completedDir = path.join(path.dirname(syncStatePath), 'completed');
|
|
126
|
+
ensureDir(completedDir);
|
|
127
|
+
|
|
128
|
+
// Get all features and completed IDs
|
|
129
|
+
const allFeatures = getAllFeatures(syncStatePath);
|
|
130
|
+
const completedIds = getCompletedFeatureIds(completedDir);
|
|
131
|
+
|
|
132
|
+
// Filter pending features
|
|
133
|
+
const pendingFeatures = allFeatures.filter(f => !completedIds.has(f.id));
|
|
134
|
+
const total = allFeatures.length;
|
|
135
|
+
const completed = completedIds.size;
|
|
136
|
+
const remaining = pendingFeatures.length;
|
|
137
|
+
|
|
138
|
+
if (remaining === 0) {
|
|
139
|
+
// All done
|
|
140
|
+
console.log(JSON.stringify({
|
|
141
|
+
action: 'done',
|
|
142
|
+
total,
|
|
143
|
+
completed
|
|
144
|
+
}));
|
|
145
|
+
} else {
|
|
146
|
+
// Get next batch
|
|
147
|
+
const batch = pendingFeatures.slice(0, batchSize);
|
|
148
|
+
|
|
149
|
+
console.log(JSON.stringify({
|
|
150
|
+
action: 'process',
|
|
151
|
+
batch,
|
|
152
|
+
remaining,
|
|
153
|
+
total
|
|
154
|
+
}));
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// process-results subcommand
|
|
159
|
+
function processResults(args) {
|
|
160
|
+
const { syncStatePath, graphRoot, completedDir } = args;
|
|
161
|
+
|
|
162
|
+
ensureDir(completedDir);
|
|
163
|
+
ensureDir(graphRoot);
|
|
164
|
+
|
|
165
|
+
let success = 0;
|
|
166
|
+
let failed = 0;
|
|
167
|
+
let graphUpdated = false;
|
|
168
|
+
|
|
169
|
+
if (!fs.existsSync(completedDir)) {
|
|
170
|
+
console.log(JSON.stringify({ success, failed, graphUpdated }));
|
|
171
|
+
return;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const files = fs.readdirSync(completedDir);
|
|
175
|
+
|
|
176
|
+
// Count done files
|
|
177
|
+
const doneFiles = files.filter(f => f.endsWith('.done.json'));
|
|
178
|
+
for (const file of doneFiles) {
|
|
179
|
+
const filePath = path.join(completedDir, file);
|
|
180
|
+
const data = readJsonSafe(filePath);
|
|
181
|
+
if (data) {
|
|
182
|
+
if (data.status === 'success' || data.status === 'completed') {
|
|
183
|
+
success++;
|
|
184
|
+
} else if (data.status === 'failed' || data.status === 'error') {
|
|
185
|
+
failed++;
|
|
186
|
+
} else {
|
|
187
|
+
// Default to success if no status field
|
|
188
|
+
success++;
|
|
189
|
+
}
|
|
190
|
+
} else {
|
|
191
|
+
// Invalid JSON, assume success
|
|
192
|
+
success++;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// Process graph files and merge
|
|
197
|
+
const graphFiles = files.filter(f => f.endsWith('.graph.json'));
|
|
198
|
+
|
|
199
|
+
if (graphFiles.length > 0) {
|
|
200
|
+
const nodesPath = path.join(graphRoot, 'nodes.json');
|
|
201
|
+
const edgesPath = path.join(graphRoot, 'edges.json');
|
|
202
|
+
|
|
203
|
+
// Load existing graph data
|
|
204
|
+
let existingNodes = [];
|
|
205
|
+
let existingEdges = [];
|
|
206
|
+
|
|
207
|
+
if (fs.existsSync(nodesPath)) {
|
|
208
|
+
const data = readJsonSafe(nodesPath);
|
|
209
|
+
if (Array.isArray(data)) {
|
|
210
|
+
existingNodes = data;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (fs.existsSync(edgesPath)) {
|
|
215
|
+
const data = readJsonSafe(edgesPath);
|
|
216
|
+
if (Array.isArray(data)) {
|
|
217
|
+
existingEdges = data;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Merge graph data with deduplication
|
|
222
|
+
const nodeIds = new Set(existingNodes.map(n => n.id));
|
|
223
|
+
const edgeKeys = new Set(existingEdges.map(e => `${e.source}-${e.target}-${e.type}`));
|
|
224
|
+
|
|
225
|
+
for (const file of graphFiles) {
|
|
226
|
+
const filePath = path.join(completedDir, file);
|
|
227
|
+
const graphData = readJsonSafe(filePath);
|
|
228
|
+
|
|
229
|
+
if (graphData) {
|
|
230
|
+
// Add nodes
|
|
231
|
+
if (Array.isArray(graphData.nodes)) {
|
|
232
|
+
for (const node of graphData.nodes) {
|
|
233
|
+
if (node.id && !nodeIds.has(node.id)) {
|
|
234
|
+
existingNodes.push(node);
|
|
235
|
+
nodeIds.add(node.id);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Add edges
|
|
241
|
+
if (Array.isArray(graphData.edges)) {
|
|
242
|
+
for (const edge of graphData.edges) {
|
|
243
|
+
const edgeKey = `${edge.source}-${edge.target}-${edge.type}`;
|
|
244
|
+
if (edge.source && edge.target && !edgeKeys.has(edgeKey)) {
|
|
245
|
+
existingEdges.push(edge);
|
|
246
|
+
edgeKeys.add(edgeKey);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
graphUpdated = true;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Write merged graph data
|
|
256
|
+
fs.writeFileSync(nodesPath, JSON.stringify(existingNodes, null, 2));
|
|
257
|
+
fs.writeFileSync(edgesPath, JSON.stringify(existingEdges, null, 2));
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
console.log(JSON.stringify({
|
|
261
|
+
success,
|
|
262
|
+
failed,
|
|
263
|
+
graphUpdated
|
|
264
|
+
}));
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Main
|
|
268
|
+
function main() {
|
|
269
|
+
const args = parseArgs();
|
|
270
|
+
|
|
271
|
+
if (!args.command) {
|
|
272
|
+
console.error(JSON.stringify({ error: 'No command specified' }));
|
|
273
|
+
process.exit(1);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
if (args.command === 'get-batch') {
|
|
277
|
+
if (!args.syncStatePath) {
|
|
278
|
+
console.error(JSON.stringify({ error: '--syncStatePath is required' }));
|
|
279
|
+
process.exit(1);
|
|
280
|
+
}
|
|
281
|
+
getBatch(args);
|
|
282
|
+
} else if (args.command === 'process-results') {
|
|
283
|
+
if (!args.syncStatePath || !args.graphRoot || !args.completedDir) {
|
|
284
|
+
console.error(JSON.stringify({ error: '--syncStatePath, --graphRoot, and --completedDir are required' }));
|
|
285
|
+
process.exit(1);
|
|
286
|
+
}
|
|
287
|
+
processResults(args);
|
|
288
|
+
} else {
|
|
289
|
+
console.error(JSON.stringify({ error: `Unknown command: ${args.command}` }));
|
|
290
|
+
process.exit(1);
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
main();
|
|
@@ -44,6 +44,10 @@
|
|
|
44
44
|
<field name="text">DO NOT estimate workload and suggest breaking it into phases 鈥?execute ALL phases in sequence</field>
|
|
45
45
|
<field name="text">When many features need analysis, dispatch ALL of them 鈥?do NOT skip or defer any</field>
|
|
46
46
|
<field name="text">Context window management: if approaching limit, save progress to checkpoint file and resume 鈥?do NOT ask user for guidance</field>
|
|
47
|
+
<field name="text">WHEN batch-orchestrator.js returns action=process, you MUST dispatch ALL workers in the batch and continue to the next iteration 鈥?DO NOT stop to report progress or ask for guidance</field>
|
|
48
|
+
<field name="text">DO NOT estimate total workload (e.g., "52 batches needed") and present it as a reason to pause 鈥?just execute batch by batch until done</field>
|
|
49
|
+
<field name="text">DO NOT present "progress reports" with options to continue/skip/defer 鈥?the ONLY acceptable output between batches is a one-line log of completed count</field>
|
|
50
|
+
<field name="text">If a required script (e.g., batch-orchestrator.js) is missing, report the error and STOP 鈥?DO NOT attempt manual batch management as a workaround</field>
|
|
47
51
|
</block>
|
|
48
52
|
|
|
49
53
|
<!-- ============================================================
|
|
@@ -63,9 +63,10 @@
|
|
|
63
63
|
<field name="tech_stack" value="${platform.techStack}"/>
|
|
64
64
|
<field name="logic_module_scan" value="Read tech-stack-mappings.json for the techStack's module_scan configuration. Use module_scan.root as the scan starting point and module_scan.depth as the grouping level (depth=1 means first-level subdirectories = one module each)"/>
|
|
65
65
|
<field name="logic_backend" value="Find all directories containing *Controller.java or *Controller.kt files under module_scan.root. These are API entry directories. Module name = the business package name of the entry directory. Apply module_scan.depth for grouping level"/>
|
|
66
|
-
<field name="logic_frontend_vue_react" value="
|
|
66
|
+
<field name="logic_frontend_vue_react" value="STRICT DEPTH RULE: Read module_scan.depth from tech-stack-mappings.json. When depth=1, ONLY the first-level subdirectories directly under module_scan.root are modules. DO NOT recurse into subdirectories. Example for depth=1 with root=src/views: if directories are src/views/system/, src/views/bpm/, src/views/ai/, src/views/dashboard/ — output modules: system, bpm, ai, dashboard. WRONG: system-config, system-dept, bpm-category, bpm-form. Each module's entryDirs = ["src/views/{module_name}"]."/>
|
|
67
67
|
<field name="logic_mobile_uniapp" value="Find first-level subdirectories under module_scan.root (e.g., src/pages). Plus top-level pages-* directories (module name = directory name without pages- prefix)"/>
|
|
68
68
|
<field name="logic_mobile_miniprogram" value="Find first-level subdirectories under module_scan.root (e.g., pages) as modules"/>
|
|
69
|
+
<field name="depth_enforcement" value="CRITICAL: When module_scan.depth=1, the module name MUST be exactly the first-level directory name (e.g. 'system', 'bpm', 'monitor'). NEVER concatenate parent-child directory names with hyphens (e.g. NEVER 'system-config', 'bpm-form'). Count directory levels from module_scan.root: root/X is depth=1, root/X/Y is depth=2."/>
|
|
69
70
|
<field name="output" var="identified_entries"/>
|
|
70
71
|
</block>
|
|
71
72
|
|
|
@@ -103,7 +104,8 @@
|
|
|
103
104
|
"sourcePath": "${platform.sourcePath}",
|
|
104
105
|
"techStack": "${platform.techStack}",
|
|
105
106
|
"modules": [
|
|
106
|
-
|
|
107
|
+
IMPORTANT: When depth=1, each module = one first-level subdirectory.
|
|
108
|
+
Example: {"name": "system", "entryDirs": ["src/views/system"]}, NOT {"name": "system-config", "entryDirs": ["src/views/system/config"]}
|
|
107
109
|
]
|
|
108
110
|
}
|
|
109
111
|
</field>
|
|
@@ -143,6 +145,8 @@
|
|
|
143
145
|
- module names are business-meaningful (not technical terms like config, util)
|
|
144
146
|
- entryDirs paths are correct and accessible
|
|
145
147
|
- JSON format is valid
|
|
148
|
+
- DEPTH CHECK: If module_scan.depth=1, verify NO module name contains a hyphen that corresponds to parent-child directory structure (e.g., "system-config" is invalid if "system" is a first-level directory under module_scan.root)
|
|
149
|
+
- Module count should roughly match the number of first-level subdirectories under module_scan.root (not second-level)
|
|
146
150
|
</field>
|
|
147
151
|
<field name="output" var="validation_result"/>
|
|
148
152
|
</block>
|