spec-agent 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +256 -0
- package/bin/spec-agent.js +14 -0
- package/dist/commands/analyze.d.ts +16 -0
- package/dist/commands/analyze.d.ts.map +1 -0
- package/dist/commands/analyze.js +283 -0
- package/dist/commands/analyze.js.map +1 -0
- package/dist/commands/clean.d.ts +9 -0
- package/dist/commands/clean.d.ts.map +1 -0
- package/dist/commands/clean.js +109 -0
- package/dist/commands/clean.js.map +1 -0
- package/dist/commands/dispatch.d.ts +12 -0
- package/dist/commands/dispatch.d.ts.map +1 -0
- package/dist/commands/dispatch.js +232 -0
- package/dist/commands/dispatch.js.map +1 -0
- package/dist/commands/doctor.d.ts +9 -0
- package/dist/commands/doctor.d.ts.map +1 -0
- package/dist/commands/doctor.js +153 -0
- package/dist/commands/doctor.js.map +1 -0
- package/dist/commands/learn.d.ts +13 -0
- package/dist/commands/learn.d.ts.map +1 -0
- package/dist/commands/learn.js +234 -0
- package/dist/commands/learn.js.map +1 -0
- package/dist/commands/merge.d.ts +11 -0
- package/dist/commands/merge.d.ts.map +1 -0
- package/dist/commands/merge.js +335 -0
- package/dist/commands/merge.js.map +1 -0
- package/dist/commands/pipeline.d.ts +19 -0
- package/dist/commands/pipeline.d.ts.map +1 -0
- package/dist/commands/pipeline.js +266 -0
- package/dist/commands/pipeline.js.map +1 -0
- package/dist/commands/plan.d.ts +13 -0
- package/dist/commands/plan.d.ts.map +1 -0
- package/dist/commands/plan.js +314 -0
- package/dist/commands/plan.js.map +1 -0
- package/dist/commands/scan.d.ts +28 -0
- package/dist/commands/scan.d.ts.map +1 -0
- package/dist/commands/scan.js +488 -0
- package/dist/commands/scan.js.map +1 -0
- package/dist/commands/status.d.ts +8 -0
- package/dist/commands/status.d.ts.map +1 -0
- package/dist/commands/status.js +146 -0
- package/dist/commands/status.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +126 -0
- package/dist/index.js.map +1 -0
- package/dist/services/document-parser.d.ts +49 -0
- package/dist/services/document-parser.d.ts.map +1 -0
- package/dist/services/document-parser.js +499 -0
- package/dist/services/document-parser.js.map +1 -0
- package/dist/services/llm.d.ts +61 -0
- package/dist/services/llm.d.ts.map +1 -0
- package/dist/services/llm.js +716 -0
- package/dist/services/llm.js.map +1 -0
- package/dist/types.d.ts +159 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +4 -0
- package/dist/types.js.map +1 -0
- package/dist/utils/file.d.ts +10 -0
- package/dist/utils/file.d.ts.map +1 -0
- package/dist/utils/file.js +96 -0
- package/dist/utils/file.js.map +1 -0
- package/dist/utils/logger.d.ts +13 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +55 -0
- package/dist/utils/logger.js.map +1 -0
- package/package.json +48 -0
- package/scripts/publish-npm.js +174 -0
- package/spec-agent-implementation.md +750 -0
- package/src/commands/analyze.ts +322 -0
- package/src/commands/clean.ts +88 -0
- package/src/commands/dispatch.ts +250 -0
- package/src/commands/doctor.ts +136 -0
- package/src/commands/learn.ts +261 -0
- package/src/commands/merge.ts +377 -0
- package/src/commands/pipeline.ts +306 -0
- package/src/commands/plan.ts +331 -0
- package/src/commands/scan.ts +568 -0
- package/src/commands/status.ts +129 -0
- package/src/index.ts +137 -0
- package/src/services/document-parser.ts +548 -0
- package/src/services/llm.ts +857 -0
- package/src/types.ts +161 -0
- package/src/utils/file.ts +60 -0
- package/src/utils/logger.ts +58 -0
- package/tsconfig.json +19 -0
|
@@ -0,0 +1,377 @@
|
|
|
1
|
+
import * as path from 'path';
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { Logger } from '../utils/logger';
|
|
4
|
+
import {
|
|
5
|
+
ensureDir,
|
|
6
|
+
fileExists,
|
|
7
|
+
readJson,
|
|
8
|
+
writeJson,
|
|
9
|
+
findFiles
|
|
10
|
+
} from '../utils/file';
|
|
11
|
+
import { ChunkSummary, SpecSummary, Feature, DataModel, Page, Api, DependencyGraph } from '../types';
|
|
12
|
+
|
|
13
|
+
interface MergeOptions {
|
|
14
|
+
summaries: string;
|
|
15
|
+
output: string;
|
|
16
|
+
strategy: string;
|
|
17
|
+
dryRun?: boolean;
|
|
18
|
+
yes?: boolean;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export async function mergeCommand(options: MergeOptions, command: Command): Promise<void> {
|
|
22
|
+
const logger = new Logger();
|
|
23
|
+
|
|
24
|
+
try {
|
|
25
|
+
const summariesDir = path.resolve(options.summaries);
|
|
26
|
+
|
|
27
|
+
if (!(await fileExists(summariesDir))) {
|
|
28
|
+
logger.error(`Error: Summaries directory not found: ${options.summaries}`);
|
|
29
|
+
logger.info('Run spec-agent analyze first to create summaries.');
|
|
30
|
+
process.exit(1);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Find all chunk summaries
|
|
34
|
+
const summaryFiles = await findFiles('chunk_*_summary.json', summariesDir);
|
|
35
|
+
|
|
36
|
+
if (summaryFiles.length === 0) {
|
|
37
|
+
logger.error(`Error: No chunk summaries found in ${options.summaries}`);
|
|
38
|
+
process.exit(1);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
logger.info(`Found ${summaryFiles.length} chunk summaries`);
|
|
42
|
+
|
|
43
|
+
// Load all summaries
|
|
44
|
+
const summaries: ChunkSummary[] = await Promise.all(
|
|
45
|
+
summaryFiles.map(async (file) => {
|
|
46
|
+
const summary = await readJson<ChunkSummary>(file);
|
|
47
|
+
return summary;
|
|
48
|
+
})
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
// Sort by chunk id
|
|
52
|
+
summaries.sort((a, b) => a.chunkId - b.chunkId);
|
|
53
|
+
|
|
54
|
+
logger.info(`Loading summaries from ${summaries.length} chunks...`);
|
|
55
|
+
|
|
56
|
+
// Preview mode
|
|
57
|
+
if (options.dryRun) {
|
|
58
|
+
logger.info('Dry run mode - merge plan:');
|
|
59
|
+
const totalFeatures = summaries.reduce((sum, s) => sum + s.features.length, 0);
|
|
60
|
+
logger.info(` Total features to merge: ${totalFeatures}`);
|
|
61
|
+
logger.info(` Strategy: ${options.strategy}`);
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Merge logic
|
|
66
|
+
const startTime = Date.now();
|
|
67
|
+
|
|
68
|
+
// Collect all items
|
|
69
|
+
const allFeatures: Feature[] = [];
|
|
70
|
+
const allDataModels: DataModel[] = [];
|
|
71
|
+
const allPages: Page[] = [];
|
|
72
|
+
const allApis: Api[] = [];
|
|
73
|
+
const allBusinessRules: string[] = [];
|
|
74
|
+
|
|
75
|
+
for (const summary of summaries) {
|
|
76
|
+
allFeatures.push(...summary.features);
|
|
77
|
+
allDataModels.push(...summary.dataModels);
|
|
78
|
+
allPages.push(...summary.pages);
|
|
79
|
+
allApis.push(...summary.apis);
|
|
80
|
+
allBusinessRules.push(...summary.businessRules);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
logger.info(`Collected ${allFeatures.length} features, ${allDataModels.length} models, ${allPages.length} pages, ${allApis.length} APIs`);
|
|
84
|
+
|
|
85
|
+
// Deduplication
|
|
86
|
+
const { features: dedupedFeatures, duplicates } = deduplicateFeatures(
|
|
87
|
+
allFeatures,
|
|
88
|
+
options.strategy as 'conservative' | 'aggressive'
|
|
89
|
+
);
|
|
90
|
+
|
|
91
|
+
logger.info(`Deduplication: ${allFeatures.length} → ${dedupedFeatures.length} features (${duplicates.length} removed)`);
|
|
92
|
+
|
|
93
|
+
// Build dependency graph
|
|
94
|
+
const dependencyGraph = buildDependencyGraph(dedupedFeatures);
|
|
95
|
+
|
|
96
|
+
// Detect conflicts
|
|
97
|
+
const conflicts = detectConflicts(dedupedFeatures, allDataModels);
|
|
98
|
+
|
|
99
|
+
if (conflicts.length > 0) {
|
|
100
|
+
logger.warn(`Detected ${conflicts.length} potential conflicts`);
|
|
101
|
+
for (const conflict of conflicts.slice(0, 5)) {
|
|
102
|
+
logger.warn(` - ${conflict}`);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Create spec summary
|
|
107
|
+
const specSummary: SpecSummary = {
|
|
108
|
+
version: '1.0.0',
|
|
109
|
+
createdAt: new Date().toISOString(),
|
|
110
|
+
sourceChunks: summaries.length,
|
|
111
|
+
deduplicatedFeatures: dedupedFeatures.length,
|
|
112
|
+
features: dedupedFeatures,
|
|
113
|
+
dataModels: deduplicateDataModels(allDataModels),
|
|
114
|
+
pages: deduplicatePages(allPages),
|
|
115
|
+
apis: deduplicateApis(allApis),
|
|
116
|
+
businessRules: [...new Set(allBusinessRules)],
|
|
117
|
+
dependencyGraph,
|
|
118
|
+
conflicts: conflicts.length > 0 ? conflicts : undefined,
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
// Write output
|
|
122
|
+
const outputPath = path.resolve(options.output);
|
|
123
|
+
await ensureDir(path.dirname(outputPath));
|
|
124
|
+
await writeJson(outputPath, specSummary);
|
|
125
|
+
|
|
126
|
+
const duration = ((Date.now() - startTime) / 1000).toFixed(1);
|
|
127
|
+
|
|
128
|
+
logger.success(`Merge complete in ${duration}s`);
|
|
129
|
+
logger.json({
|
|
130
|
+
status: 'success',
|
|
131
|
+
sourceChunks: summaries.length,
|
|
132
|
+
totalFeatures: dedupedFeatures.length,
|
|
133
|
+
duplicatesRemoved: duplicates.length,
|
|
134
|
+
dataModels: specSummary.dataModels.length,
|
|
135
|
+
pages: specSummary.pages.length,
|
|
136
|
+
apis: specSummary.apis.length,
|
|
137
|
+
conflicts: conflicts.length,
|
|
138
|
+
outputPath,
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
} catch (error) {
|
|
142
|
+
logger.error(`Merge failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function deduplicateFeatures(
|
|
148
|
+
features: Feature[],
|
|
149
|
+
strategy: 'conservative' | 'aggressive'
|
|
150
|
+
): { features: Feature[]; duplicates: string[] } {
|
|
151
|
+
interface FeatureGroup {
|
|
152
|
+
key: string;
|
|
153
|
+
items: Feature[];
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
const groups: FeatureGroup[] = [];
|
|
157
|
+
const duplicates: string[] = [];
|
|
158
|
+
|
|
159
|
+
for (const feature of features) {
|
|
160
|
+
const key = strategy === 'aggressive'
|
|
161
|
+
? generateAggressiveKey(feature)
|
|
162
|
+
: generateConservativeKey(feature);
|
|
163
|
+
const normalizedName = normalizeText(feature.name);
|
|
164
|
+
|
|
165
|
+
let targetGroup = groups.find(group => {
|
|
166
|
+
if (group.key === key) return true;
|
|
167
|
+
if (strategy !== 'aggressive') return false;
|
|
168
|
+
const rep = group.items[0];
|
|
169
|
+
const repName = normalizeText(rep.name);
|
|
170
|
+
const nameSimilarity = jaccardSimilarity(tokenize(repName), tokenize(normalizedName));
|
|
171
|
+
return nameSimilarity >= 0.72;
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
if (!targetGroup) {
|
|
175
|
+
targetGroup = { key, items: [] };
|
|
176
|
+
groups.push(targetGroup);
|
|
177
|
+
} else {
|
|
178
|
+
duplicates.push(feature.id);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
targetGroup.items.push(feature);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
const oldIdAlias = new Map<string, string>();
|
|
185
|
+
const dedupedFeatures: Feature[] = groups.map((group, idx) => {
|
|
186
|
+
const representative = chooseRepresentativeFeature(group.items);
|
|
187
|
+
const newId = `F${String(idx + 1).padStart(3, '0')}`;
|
|
188
|
+
|
|
189
|
+
// Build alias from old IDs to new canonical ID for dependency remapping.
|
|
190
|
+
const uniqueOldIds = Array.from(new Set(group.items.map(item => item.id)));
|
|
191
|
+
for (const oldId of uniqueOldIds) {
|
|
192
|
+
oldIdAlias.set(oldId, newId);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
return {
|
|
196
|
+
...representative,
|
|
197
|
+
id: newId,
|
|
198
|
+
dependencies: [] as string[],
|
|
199
|
+
};
|
|
200
|
+
});
|
|
201
|
+
|
|
202
|
+
// Rebuild dependencies from grouped items and remap to canonical IDs.
|
|
203
|
+
groups.forEach((group, idx) => {
|
|
204
|
+
const currentId = dedupedFeatures[idx].id;
|
|
205
|
+
const mappedDeps = new Set<string>();
|
|
206
|
+
for (const item of group.items) {
|
|
207
|
+
for (const dep of item.dependencies || []) {
|
|
208
|
+
const mapped = oldIdAlias.get(dep);
|
|
209
|
+
if (mapped && mapped !== currentId) {
|
|
210
|
+
mappedDeps.add(mapped);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
dedupedFeatures[idx].dependencies = Array.from(mappedDeps);
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
return { features: dedupedFeatures, duplicates };
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
function generateConservativeKey(feature: Feature): string {
|
|
221
|
+
// Only exact name match is considered duplicate
|
|
222
|
+
return normalizeText(feature.name);
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
function generateAggressiveKey(feature: Feature): string {
|
|
226
|
+
// Similar names might be duplicates
|
|
227
|
+
return normalizeText(feature.name)
|
|
228
|
+
.replace(/\b(功能|流程|管理|申请|支持|模块|客户)\b/g, '')
|
|
229
|
+
.replace(/\s+/g, ' ')
|
|
230
|
+
.trim();
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
function normalizeText(input: string): string {
|
|
234
|
+
return (input || '')
|
|
235
|
+
.toLowerCase()
|
|
236
|
+
.replace(/[^\u4e00-\u9fa5a-z0-9\s]/gi, ' ')
|
|
237
|
+
.replace(/\s+/g, ' ')
|
|
238
|
+
.trim();
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
function tokenize(input: string): string[] {
|
|
242
|
+
return normalizeText(input)
|
|
243
|
+
.split(' ')
|
|
244
|
+
.filter(token => token.length > 0);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
function jaccardSimilarity(aTokens: string[], bTokens: string[]): number {
|
|
248
|
+
const a = new Set(aTokens);
|
|
249
|
+
const b = new Set(bTokens);
|
|
250
|
+
if (a.size === 0 || b.size === 0) return 0;
|
|
251
|
+
let intersection = 0;
|
|
252
|
+
for (const token of a) {
|
|
253
|
+
if (b.has(token)) intersection++;
|
|
254
|
+
}
|
|
255
|
+
const union = new Set([...a, ...b]).size;
|
|
256
|
+
return union === 0 ? 0 : intersection / union;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
function chooseRepresentativeFeature(features: Feature[]): Feature {
|
|
260
|
+
const sorted = features.slice().sort((a, b) => {
|
|
261
|
+
// Prefer higher priority first.
|
|
262
|
+
const priorityRank: Record<Feature['priority'], number> = {
|
|
263
|
+
P0: 0,
|
|
264
|
+
P1: 1,
|
|
265
|
+
P2: 2,
|
|
266
|
+
P3: 3,
|
|
267
|
+
};
|
|
268
|
+
const pa = priorityRank[a.priority] ?? 3;
|
|
269
|
+
const pb = priorityRank[b.priority] ?? 3;
|
|
270
|
+
if (pa !== pb) return pa - pb;
|
|
271
|
+
// Prefer richer description.
|
|
272
|
+
if (a.description.length !== b.description.length) {
|
|
273
|
+
return b.description.length - a.description.length;
|
|
274
|
+
}
|
|
275
|
+
return a.sourceChunk - b.sourceChunk;
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
const representative = sorted[0];
|
|
279
|
+
const mergedDescription = mergeFeatureDescriptions(features);
|
|
280
|
+
return {
|
|
281
|
+
...representative,
|
|
282
|
+
description: mergedDescription,
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
function mergeFeatureDescriptions(features: Feature[]): string {
|
|
287
|
+
const descriptions = Array.from(
|
|
288
|
+
new Set(
|
|
289
|
+
features
|
|
290
|
+
.map(item => item.description?.trim())
|
|
291
|
+
.filter((item): item is string => Boolean(item))
|
|
292
|
+
)
|
|
293
|
+
);
|
|
294
|
+
|
|
295
|
+
if (descriptions.length === 0) {
|
|
296
|
+
return '';
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
descriptions.sort((a, b) => b.length - a.length);
|
|
300
|
+
return descriptions[0];
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
function buildDependencyGraph(features: Feature[]): DependencyGraph {
|
|
304
|
+
const graph: DependencyGraph = {};
|
|
305
|
+
|
|
306
|
+
for (const feature of features) {
|
|
307
|
+
graph[feature.id] = feature.dependencies || [];
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
return graph;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
function detectConflicts(features: Feature[], dataModels: unknown[]): string[] {
|
|
314
|
+
const conflicts: string[] = [];
|
|
315
|
+
|
|
316
|
+
// Check for circular dependencies
|
|
317
|
+
const visited = new Set<string>();
|
|
318
|
+
const recursionStack = new Set<string>();
|
|
319
|
+
|
|
320
|
+
function hasCircularDependency(featureId: string, graph: DependencyGraph): boolean {
|
|
321
|
+
visited.add(featureId);
|
|
322
|
+
recursionStack.add(featureId);
|
|
323
|
+
|
|
324
|
+
const dependencies = graph[featureId] || [];
|
|
325
|
+
for (const depId of dependencies) {
|
|
326
|
+
if (!visited.has(depId)) {
|
|
327
|
+
if (hasCircularDependency(depId, graph)) {
|
|
328
|
+
return true;
|
|
329
|
+
}
|
|
330
|
+
} else if (recursionStack.has(depId)) {
|
|
331
|
+
return true;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
recursionStack.delete(featureId);
|
|
336
|
+
return false;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
const graph = buildDependencyGraph(features);
|
|
340
|
+
for (const feature of features) {
|
|
341
|
+
if (hasCircularDependency(feature.id, graph)) {
|
|
342
|
+
conflicts.push(`Circular dependency detected for feature ${feature.id}`);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
return conflicts;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
function deduplicateDataModels(models: DataModel[]): DataModel[] {
|
|
350
|
+
const seen = new Set<string>();
|
|
351
|
+
return models.filter((model) => {
|
|
352
|
+
const key = `${model.name}`;
|
|
353
|
+
if (seen.has(key)) return false;
|
|
354
|
+
seen.add(key);
|
|
355
|
+
return true;
|
|
356
|
+
});
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
function deduplicatePages(pages: Page[]): Page[] {
|
|
360
|
+
const seen = new Set<string>();
|
|
361
|
+
return pages.filter((page) => {
|
|
362
|
+
const key = `${page.route}`;
|
|
363
|
+
if (seen.has(key)) return false;
|
|
364
|
+
seen.add(key);
|
|
365
|
+
return true;
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
function deduplicateApis(apis: Api[]): Api[] {
|
|
370
|
+
const seen = new Set<string>();
|
|
371
|
+
return apis.filter((api) => {
|
|
372
|
+
const key = `${api.method}:${api.path}`;
|
|
373
|
+
if (seen.has(key)) return false;
|
|
374
|
+
seen.add(key);
|
|
375
|
+
return true;
|
|
376
|
+
});
|
|
377
|
+
}
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import * as path from 'path';
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { Logger } from '../utils/logger';
|
|
4
|
+
import {
|
|
5
|
+
ensureDir,
|
|
6
|
+
fileExists,
|
|
7
|
+
readJson,
|
|
8
|
+
writeJson
|
|
9
|
+
} from '../utils/file';
|
|
10
|
+
import { scanCommand } from './scan';
|
|
11
|
+
import { analyzeCommand } from './analyze';
|
|
12
|
+
import { mergeCommand } from './merge';
|
|
13
|
+
import { planCommand } from './plan';
|
|
14
|
+
import { dispatchCommand } from './dispatch';
|
|
15
|
+
|
|
16
|
+
interface PipelineOptions {
|
|
17
|
+
input?: string;
|
|
18
|
+
output: string;
|
|
19
|
+
agents: string;
|
|
20
|
+
chunkSize: string;
|
|
21
|
+
minChunkSize: string;
|
|
22
|
+
analyzeRetries: string;
|
|
23
|
+
analyzeBudgetTokens: string;
|
|
24
|
+
framework: string;
|
|
25
|
+
strictLlm?: boolean;
|
|
26
|
+
stopAt?: string;
|
|
27
|
+
from?: string;
|
|
28
|
+
dryRun?: boolean;
|
|
29
|
+
yes?: boolean;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const PHASES = ['scan', 'analyze', 'merge', 'plan', 'dispatch'] as const;
|
|
33
|
+
type Phase = typeof PHASES[number];
|
|
34
|
+
|
|
35
|
+
export async function pipelineCommand(options: PipelineOptions, command: Command): Promise<void> {
|
|
36
|
+
const logger = new Logger();
|
|
37
|
+
const startedAt = new Date().toISOString();
|
|
38
|
+
|
|
39
|
+
try {
|
|
40
|
+
if (!options.input && !options.from) {
|
|
41
|
+
logger.error('Error: --input is required (unless using --from to resume)');
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const workspacePath = path.resolve(options.output);
|
|
46
|
+
await ensureDir(workspacePath);
|
|
47
|
+
|
|
48
|
+
// Determine start and end phases
|
|
49
|
+
const startPhase: Phase = (options.from as Phase) || 'scan';
|
|
50
|
+
const endPhase: Phase = (options.stopAt as Phase) || 'dispatch';
|
|
51
|
+
|
|
52
|
+
const startIndex = PHASES.indexOf(startPhase);
|
|
53
|
+
const endIndex = PHASES.indexOf(endPhase);
|
|
54
|
+
|
|
55
|
+
if (startIndex === -1 || endIndex === -1) {
|
|
56
|
+
logger.error(`Error: Invalid phase. Valid phases: ${PHASES.join(', ')}`);
|
|
57
|
+
process.exit(1);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (startIndex > endIndex) {
|
|
61
|
+
logger.error('Error: Start phase cannot be after end phase');
|
|
62
|
+
process.exit(1);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
logger.info(`Pipeline: ${startPhase} → ${endPhase}`);
|
|
66
|
+
logger.info(`Workspace: ${workspacePath}`);
|
|
67
|
+
|
|
68
|
+
// Preview mode
|
|
69
|
+
if (options.dryRun) {
|
|
70
|
+
logger.info('Dry run mode - pipeline preview:');
|
|
71
|
+
for (let i = startIndex; i <= endIndex; i++) {
|
|
72
|
+
const phase = PHASES[i];
|
|
73
|
+
const phaseDone = await checkPhaseDone(workspacePath, phase);
|
|
74
|
+
logger.info(` ${phaseDone ? '✔' : '○'} ${phase}`);
|
|
75
|
+
}
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const startTime = Date.now();
|
|
80
|
+
const results: Array<{ phase: string; status: string; duration: string }> = [];
|
|
81
|
+
let failedPhase: string | null = null;
|
|
82
|
+
let failedReason = '';
|
|
83
|
+
|
|
84
|
+
// Execute phases
|
|
85
|
+
for (let i = startIndex; i <= endIndex; i++) {
|
|
86
|
+
const phase = PHASES[i];
|
|
87
|
+
const phaseStart = Date.now();
|
|
88
|
+
|
|
89
|
+
logger.info(`\n[${i - startIndex + 1}/${endIndex - startIndex + 1}] Running ${phase}...`);
|
|
90
|
+
|
|
91
|
+
// Check if phase already done
|
|
92
|
+
if (await checkPhaseDone(workspacePath, phase)) {
|
|
93
|
+
logger.info(` Phase ${phase} already completed, skipping`);
|
|
94
|
+
results.push({ phase, status: 'skipped', duration: '0s' });
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
try {
|
|
99
|
+
await executePhase(phase, options, workspacePath, logger);
|
|
100
|
+
|
|
101
|
+
// Mark phase as done
|
|
102
|
+
await markPhaseDone(workspacePath, phase);
|
|
103
|
+
|
|
104
|
+
const phaseDuration = ((Date.now() - phaseStart) / 1000).toFixed(1);
|
|
105
|
+
results.push({ phase, status: 'success', duration: `${phaseDuration}s` });
|
|
106
|
+
logger.success(` ${phase} complete (${phaseDuration}s)`);
|
|
107
|
+
|
|
108
|
+
} catch (error) {
|
|
109
|
+
const phaseDuration = ((Date.now() - phaseStart) / 1000).toFixed(1);
|
|
110
|
+
results.push({ phase, status: 'failed', duration: `${phaseDuration}s` });
|
|
111
|
+
failedPhase = phase;
|
|
112
|
+
failedReason = error instanceof Error ? error.message : String(error);
|
|
113
|
+
logger.error(` ${phase} failed: ${failedReason}`);
|
|
114
|
+
|
|
115
|
+
logger.info('\nPipeline stopped. To resume:');
|
|
116
|
+
logger.info(` spec-agent pipeline --from ${phase} --output ${options.output}`);
|
|
117
|
+
|
|
118
|
+
break;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const totalDuration = ((Date.now() - startTime) / 1000).toFixed(1);
|
|
123
|
+
|
|
124
|
+
logger.info('\n' + '='.repeat(50));
|
|
125
|
+
logger.success('Pipeline complete');
|
|
126
|
+
logger.info('='.repeat(50));
|
|
127
|
+
|
|
128
|
+
for (const result of results) {
|
|
129
|
+
const icon = result.status === 'success' ? '✔' : result.status === 'failed' ? '✖' : '○';
|
|
130
|
+
logger.info(` ${icon} ${result.phase.padEnd(10)} ${result.status.padEnd(10)} ${result.duration}`);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
logger.info(` Total: ${totalDuration}s`);
|
|
134
|
+
logger.info(` Output: ${workspacePath}`);
|
|
135
|
+
|
|
136
|
+
const summaryPath = path.join(workspacePath, 'pipeline_summary.json');
|
|
137
|
+
await writeJson(summaryPath, {
|
|
138
|
+
startedAt,
|
|
139
|
+
endedAt: new Date().toISOString(),
|
|
140
|
+
status: failedPhase ? 'failed' : 'success',
|
|
141
|
+
startPhase,
|
|
142
|
+
endPhase,
|
|
143
|
+
totalDurationSeconds: Number(totalDuration),
|
|
144
|
+
failedPhase,
|
|
145
|
+
failedReason: failedPhase ? failedReason : undefined,
|
|
146
|
+
phases: results,
|
|
147
|
+
});
|
|
148
|
+
logger.info(` Summary: ${summaryPath}`);
|
|
149
|
+
|
|
150
|
+
const runReportPath = path.join(workspacePath, 'run_report.json');
|
|
151
|
+
const runReport = await buildRunReport(workspacePath, {
|
|
152
|
+
startedAt,
|
|
153
|
+
totalDurationSeconds: Number(totalDuration),
|
|
154
|
+
status: failedPhase ? 'failed' : 'success',
|
|
155
|
+
});
|
|
156
|
+
await writeJson(runReportPath, runReport);
|
|
157
|
+
logger.info(` Run report: ${runReportPath}`);
|
|
158
|
+
|
|
159
|
+
if (failedPhase) {
|
|
160
|
+
process.exit(2);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
} catch (error) {
|
|
164
|
+
logger.error(`Pipeline failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
165
|
+
process.exit(1);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
async function executePhase(
|
|
170
|
+
phase: Phase,
|
|
171
|
+
options: PipelineOptions,
|
|
172
|
+
workspacePath: string,
|
|
173
|
+
logger: Logger
|
|
174
|
+
): Promise<void> {
|
|
175
|
+
const manifestPath = path.join(workspacePath, 'manifest.json');
|
|
176
|
+
const summariesDir = path.join(workspacePath, 'summaries');
|
|
177
|
+
const specPath = path.join(workspacePath, 'spec_summary.json');
|
|
178
|
+
const planPath = path.join(workspacePath, 'task_plan.json');
|
|
179
|
+
const dispatchPath = path.join(workspacePath, 'dispatch_plan.json');
|
|
180
|
+
|
|
181
|
+
switch (phase) {
|
|
182
|
+
case 'scan':
|
|
183
|
+
if (!options.input) {
|
|
184
|
+
throw new Error('Input required for scan phase');
|
|
185
|
+
}
|
|
186
|
+
await scanCommand({
|
|
187
|
+
input: options.input,
|
|
188
|
+
output: manifestPath,
|
|
189
|
+
chunkSize: options.chunkSize,
|
|
190
|
+
minChunkSize: options.minChunkSize,
|
|
191
|
+
format: 'auto',
|
|
192
|
+
strictLlm: options.strictLlm,
|
|
193
|
+
dryRun: false,
|
|
194
|
+
yes: options.yes,
|
|
195
|
+
}, {} as Command);
|
|
196
|
+
break;
|
|
197
|
+
|
|
198
|
+
case 'analyze':
|
|
199
|
+
await analyzeCommand({
|
|
200
|
+
manifest: manifestPath,
|
|
201
|
+
output: summariesDir,
|
|
202
|
+
agents: options.agents,
|
|
203
|
+
focus: 'full',
|
|
204
|
+
retries: options.analyzeRetries || '1',
|
|
205
|
+
budgetTokens: options.analyzeBudgetTokens || '0',
|
|
206
|
+
dryRun: false,
|
|
207
|
+
yes: options.yes,
|
|
208
|
+
}, {} as Command);
|
|
209
|
+
break;
|
|
210
|
+
|
|
211
|
+
case 'merge':
|
|
212
|
+
await mergeCommand({
|
|
213
|
+
summaries: summariesDir,
|
|
214
|
+
output: specPath,
|
|
215
|
+
strategy: 'conservative',
|
|
216
|
+
dryRun: false,
|
|
217
|
+
yes: options.yes,
|
|
218
|
+
}, {} as Command);
|
|
219
|
+
break;
|
|
220
|
+
|
|
221
|
+
case 'plan':
|
|
222
|
+
await planCommand({
|
|
223
|
+
spec: specPath,
|
|
224
|
+
output: planPath,
|
|
225
|
+
type: 'prototype',
|
|
226
|
+
framework: options.framework,
|
|
227
|
+
parallel: '3',
|
|
228
|
+
dryRun: false,
|
|
229
|
+
yes: options.yes,
|
|
230
|
+
}, {} as Command);
|
|
231
|
+
break;
|
|
232
|
+
|
|
233
|
+
case 'dispatch':
|
|
234
|
+
await dispatchCommand({
|
|
235
|
+
plan: planPath,
|
|
236
|
+
output: dispatchPath,
|
|
237
|
+
strategy: 'balanced',
|
|
238
|
+
dryRun: false,
|
|
239
|
+
yes: options.yes,
|
|
240
|
+
}, {} as Command);
|
|
241
|
+
break;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
async function checkPhaseDone(workspacePath: string, phase: string): Promise<boolean> {
|
|
246
|
+
const doneFile = path.join(workspacePath, `.${phase}_done`);
|
|
247
|
+
return await fileExists(doneFile);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async function markPhaseDone(workspacePath: string, phase: string): Promise<void> {
|
|
251
|
+
const doneFile = path.join(workspacePath, `.${phase}_done`);
|
|
252
|
+
await writeJson(doneFile, { completedAt: new Date().toISOString() });
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
async function buildRunReport(
|
|
256
|
+
workspacePath: string,
|
|
257
|
+
context: { startedAt: string; totalDurationSeconds: number; status: 'success' | 'failed' }
|
|
258
|
+
): Promise<{
|
|
259
|
+
startedAt: string;
|
|
260
|
+
endedAt: string;
|
|
261
|
+
status: 'success' | 'failed';
|
|
262
|
+
totalDurationSeconds: number;
|
|
263
|
+
metrics: Record<string, number>;
|
|
264
|
+
}> {
|
|
265
|
+
const metrics: Record<string, number> = {};
|
|
266
|
+
const manifestPath = path.join(workspacePath, 'manifest.json');
|
|
267
|
+
const specPath = path.join(workspacePath, 'spec_summary.json');
|
|
268
|
+
const taskPlanPath = path.join(workspacePath, 'task_plan.json');
|
|
269
|
+
const summariesDir = path.join(workspacePath, 'summaries');
|
|
270
|
+
|
|
271
|
+
if (await fileExists(manifestPath)) {
|
|
272
|
+
const manifest = await readJson<{ chunks?: unknown[] }>(manifestPath);
|
|
273
|
+
metrics.chunks = manifest.chunks?.length || 0;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
if (await fileExists(specPath)) {
|
|
277
|
+
const spec = await readJson<{ features?: unknown[]; apis?: unknown[]; pages?: unknown[] }>(specPath);
|
|
278
|
+
metrics.features = spec.features?.length || 0;
|
|
279
|
+
metrics.apis = spec.apis?.length || 0;
|
|
280
|
+
metrics.pages = spec.pages?.length || 0;
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
if (await fileExists(taskPlanPath)) {
|
|
284
|
+
const plan = await readJson<{ totalTasks?: number }>(taskPlanPath);
|
|
285
|
+
metrics.totalTasks = plan.totalTasks || 0;
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
if (await fileExists(summariesDir)) {
|
|
289
|
+
const files = await import('../utils/file').then(m => m.findFiles('chunk_*_summary.json', summariesDir));
|
|
290
|
+
metrics.chunkSummaries = files.length;
|
|
291
|
+
let totalTokens = 0;
|
|
292
|
+
for (const file of files) {
|
|
293
|
+
const summary = await readJson<{ llmUsage?: { totalTokens?: number } }>(file);
|
|
294
|
+
totalTokens += summary.llmUsage?.totalTokens || 0;
|
|
295
|
+
}
|
|
296
|
+
metrics.totalTokens = totalTokens;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
return {
|
|
300
|
+
startedAt: context.startedAt,
|
|
301
|
+
endedAt: new Date().toISOString(),
|
|
302
|
+
status: context.status,
|
|
303
|
+
totalDurationSeconds: context.totalDurationSeconds,
|
|
304
|
+
metrics,
|
|
305
|
+
};
|
|
306
|
+
}
|