@zebralabs/context-cli 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/application/compile/compile-context.js +119 -0
- package/src/application/compile/stage1-discovery.js +125 -0
- package/src/application/compile/stage2-extraction.js +97 -0
- package/src/application/compile/stage4-consolidation.js +235 -0
- package/src/application/compile/stage5-assets.js +61 -0
- package/src/application/compile/stage6-validation.js +133 -0
- package/src/application/ports/asset-generator.js +33 -0
- package/src/context.js +274 -10
- package/src/domain/compilation.js +77 -0
- package/src/domain/preference.js +23 -0
- package/src/domain/rule.js +71 -0
- package/src/domain/scope.js +30 -0
- package/src/infrastructure/assets/claude/claude-generator.js +95 -0
- package/src/infrastructure/assets/cursor/cursor-rules-generator.js +119 -0
- package/src/infrastructure/assets/cursor/cursor-skills-generator.js +115 -0
- package/src/infrastructure/file-system/file-reader.js +67 -0
- package/src/infrastructure/file-system/file-writer.js +40 -0
- package/src/infrastructure/parsing/markdown-parser.js +95 -0
- package/src/infrastructure/parsing/rule-extractor.js +219 -0
- package/src/infrastructure/parsing/skill-extractor.js +74 -0
package/package.json
CHANGED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { Stage1Discovery } from "./stage1-discovery.js";
|
|
2
|
+
import { Stage2Extraction } from "./stage2-extraction.js";
|
|
3
|
+
import { Stage4Consolidation } from "./stage4-consolidation.js";
|
|
4
|
+
import { Stage5Assets } from "./stage5-assets.js";
|
|
5
|
+
import { Stage6Validation } from "./stage6-validation.js";
|
|
6
|
+
import YAML from "yaml";
|
|
7
|
+
import fs from "node:fs";
|
|
8
|
+
import path from "node:path";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Main compilation orchestrator
|
|
12
|
+
* Coordinates all compilation stages
|
|
13
|
+
*/
|
|
14
|
+
export class CompileContext {
|
|
15
|
+
constructor() {
|
|
16
|
+
this.stage1 = new Stage1Discovery();
|
|
17
|
+
this.stage2 = new Stage2Extraction();
|
|
18
|
+
this.stage4 = new Stage4Consolidation();
|
|
19
|
+
this.stage5 = new Stage5Assets();
|
|
20
|
+
this.stage6 = new Stage6Validation();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Compile context for a specific pack
|
|
25
|
+
* @param {string} repoRoot - Repository root
|
|
26
|
+
* @param {string} packId - Pack ID to compile
|
|
27
|
+
* @param {string} outputPath - Output path (defaults to pack directory)
|
|
28
|
+
* @returns {Object} Compilation result
|
|
29
|
+
*/
|
|
30
|
+
async compilePack(repoRoot, packId, outputPath = null) {
|
|
31
|
+
// Try both locations for context.yaml
|
|
32
|
+
const installedPath = path.join(repoRoot, "docs", "practices-and-standards", "context.yaml");
|
|
33
|
+
const sourcePath = path.join(repoRoot, "practices-and-standards", "context.yaml");
|
|
34
|
+
|
|
35
|
+
let contextPath;
|
|
36
|
+
if (fs.existsSync(installedPath)) {
|
|
37
|
+
contextPath = installedPath;
|
|
38
|
+
} else if (fs.existsSync(sourcePath)) {
|
|
39
|
+
contextPath = sourcePath;
|
|
40
|
+
} else {
|
|
41
|
+
throw new Error(`context.yaml not found. Checked:\n - ${installedPath}\n - ${sourcePath}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const contextYaml = YAML.parse(fs.readFileSync(contextPath, "utf8"));
|
|
45
|
+
|
|
46
|
+
// Find the pack in installed packs
|
|
47
|
+
const pack = (contextYaml.installed_packs || []).find(p => p.id === packId);
|
|
48
|
+
if (!pack) {
|
|
49
|
+
throw new Error(`Pack ${packId} not found in context.yaml`);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Determine output path
|
|
53
|
+
if (!outputPath) {
|
|
54
|
+
// Output to pack directory
|
|
55
|
+
const packDir = path.dirname(path.join(repoRoot, pack.manifest));
|
|
56
|
+
outputPath = packDir;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const stageReports = {};
|
|
60
|
+
|
|
61
|
+
// Stage 1: Discovery
|
|
62
|
+
console.log("Stage 1: Discovery & Collection...");
|
|
63
|
+
const discovery = this.stage1.execute(repoRoot, contextYaml);
|
|
64
|
+
|
|
65
|
+
// Filter to only files from the specified pack
|
|
66
|
+
const packSourceFiles = discovery.sourceFiles.filter(f => f.packId === packId);
|
|
67
|
+
|
|
68
|
+
console.log(` Found ${packSourceFiles.length} source files`);
|
|
69
|
+
stageReports.stage1 = discovery;
|
|
70
|
+
|
|
71
|
+
// Stage 2: Extraction
|
|
72
|
+
console.log("Stage 2: Rule Extraction & Parsing...");
|
|
73
|
+
const extraction = this.stage2.execute(packSourceFiles);
|
|
74
|
+
|
|
75
|
+
console.log(` Extracted ${extraction.report.rulesExtracted} rules`);
|
|
76
|
+
console.log(` Extracted ${extraction.report.preferencesExtracted} preferences`);
|
|
77
|
+
console.log(` Extracted ${extraction.report.scopesExtracted} scopes`);
|
|
78
|
+
|
|
79
|
+
if (extraction.report.errors.length > 0) {
|
|
80
|
+
console.warn(` Errors: ${extraction.report.errors.length}`);
|
|
81
|
+
extraction.report.errors.forEach(e => {
|
|
82
|
+
console.warn(` - ${e.file}: ${e.error}`);
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
stageReports.stage2 = extraction.report;
|
|
86
|
+
|
|
87
|
+
// Stage 4: Consolidation
|
|
88
|
+
console.log("Stage 4: Consolidation & Standard Generation...");
|
|
89
|
+
const consolidation = this.stage4.execute(extraction.compilation, outputPath);
|
|
90
|
+
console.log(` Generated ${consolidation.filesGenerated.length} consolidated files`);
|
|
91
|
+
|
|
92
|
+
// Stage 5: Asset Generation
|
|
93
|
+
console.log("Stage 5: Integration Asset Generation...");
|
|
94
|
+
const assets = await this.stage5.execute(extraction.compilation, packSourceFiles, outputPath);
|
|
95
|
+
console.log(` Generated Cursor rules: ${assets.results.cursorRules.filesGenerated.length} files`);
|
|
96
|
+
if (assets.skillsExtracted > 0) {
|
|
97
|
+
console.log(` Generated Cursor skills: ${assets.results.cursorSkills.filesGenerated.length} files`);
|
|
98
|
+
}
|
|
99
|
+
console.log(` Generated Claude.md`);
|
|
100
|
+
|
|
101
|
+
// Stage 6: Validation & Reporting
|
|
102
|
+
console.log("Stage 6: Validation & Reporting...");
|
|
103
|
+
const validation = this.stage6.execute(extraction.compilation, stageReports, outputPath);
|
|
104
|
+
console.log(` Validation: ${validation.validation.rulesValid ? "✅ Passed" : "❌ Failed"}`);
|
|
105
|
+
console.log(` Generated ${validation.reportsGenerated.length} reports`);
|
|
106
|
+
|
|
107
|
+
return {
|
|
108
|
+
compilation: extraction.compilation,
|
|
109
|
+
outputPath,
|
|
110
|
+
stageReports: {
|
|
111
|
+
...stageReports,
|
|
112
|
+
stage4: consolidation,
|
|
113
|
+
stage5: assets,
|
|
114
|
+
stage6: validation
|
|
115
|
+
}
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import YAML from "yaml";
|
|
4
|
+
import { FileReader } from "../../infrastructure/file-system/file-reader.js";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Stage 1: Discovery & Collection
|
|
8
|
+
* Gathers all sources of knowledge (packs + custom docs)
|
|
9
|
+
*/
|
|
10
|
+
export class Stage1Discovery {
|
|
11
|
+
constructor() {
|
|
12
|
+
this.fileReader = new FileReader();
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Discover and collect source files
|
|
17
|
+
* @param {string} repoRoot - Repository root directory
|
|
18
|
+
* @param {Object} contextYaml - Parsed context.yaml
|
|
19
|
+
* @returns {Object} Discovery result with source files and metadata
|
|
20
|
+
*/
|
|
21
|
+
execute(repoRoot, contextYaml) {
|
|
22
|
+
const sourceFiles = [];
|
|
23
|
+
const packs = [];
|
|
24
|
+
const precedence = contextYaml.precedence || [];
|
|
25
|
+
|
|
26
|
+
// Order packs by precedence
|
|
27
|
+
const installedPacks = this.orderPacksByPrecedence(
|
|
28
|
+
contextYaml.installed_packs || [],
|
|
29
|
+
precedence
|
|
30
|
+
);
|
|
31
|
+
|
|
32
|
+
// Process each pack
|
|
33
|
+
for (let i = 0; i < installedPacks.length; i++) {
|
|
34
|
+
const pack = installedPacks[i];
|
|
35
|
+
const precedenceIndex = precedence.indexOf(pack.id);
|
|
36
|
+
const finalPrecedence = precedenceIndex !== -1 ? precedenceIndex : 1000 + i;
|
|
37
|
+
|
|
38
|
+
// Load pack manifest
|
|
39
|
+
const manifestPath = path.join(repoRoot, pack.manifest);
|
|
40
|
+
if (!this.fileReader.exists(manifestPath)) {
|
|
41
|
+
console.warn(`Pack manifest not found: ${manifestPath}`);
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Parse pack.yaml to get contributes.roots
|
|
46
|
+
const packManifest = YAML.parse(fs.readFileSync(manifestPath, "utf8"));
|
|
47
|
+
const packId = pack.id;
|
|
48
|
+
const packVersion = pack.version;
|
|
49
|
+
const roots = packManifest?.contributes?.roots || [];
|
|
50
|
+
|
|
51
|
+
packs.push({
|
|
52
|
+
id: packId,
|
|
53
|
+
version: packVersion,
|
|
54
|
+
precedence: finalPrecedence
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
// Collect markdown files from each root
|
|
58
|
+
for (const root of roots) {
|
|
59
|
+
// Try installed location first (docs/practices-and-standards/...)
|
|
60
|
+
let rootPath = path.join(repoRoot, root);
|
|
61
|
+
|
|
62
|
+
// If not found and root starts with "docs/", try source location
|
|
63
|
+
if (!this.fileReader.exists(rootPath) && root.startsWith("docs/")) {
|
|
64
|
+
const sourceRoot = root.replace(/^docs\//, "");
|
|
65
|
+
rootPath = path.join(repoRoot, sourceRoot);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (!this.fileReader.exists(rootPath)) {
|
|
69
|
+
console.warn(`Pack ${packId} root not found: ${root} (tried: ${path.join(repoRoot, root)} and ${rootPath})`);
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const markdownFiles = this.fileReader.listMarkdownFiles(rootPath);
|
|
74
|
+
|
|
75
|
+
for (const filePath of markdownFiles) {
|
|
76
|
+
const relativePath = path.relative(repoRoot, filePath).replace(/\\/g, "/");
|
|
77
|
+
|
|
78
|
+
sourceFiles.push({
|
|
79
|
+
path: relativePath,
|
|
80
|
+
absolutePath: filePath,
|
|
81
|
+
packId: packId,
|
|
82
|
+
packVersion: packVersion,
|
|
83
|
+
precedence: finalPrecedence,
|
|
84
|
+
isCustom: false
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
sourceFiles,
|
|
92
|
+
packs,
|
|
93
|
+
precedenceOrder: precedence
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Order packs by precedence
|
|
99
|
+
* @param {Object[]} installedPacks - Installed packs
|
|
100
|
+
* @param {string[]} precedence - Precedence order
|
|
101
|
+
* @returns {Object[]} Ordered packs
|
|
102
|
+
*/
|
|
103
|
+
orderPacksByPrecedence(installedPacks, precedence) {
|
|
104
|
+
const ordered = [];
|
|
105
|
+
const unordered = [];
|
|
106
|
+
|
|
107
|
+
// Add packs in precedence order
|
|
108
|
+
for (const packId of precedence) {
|
|
109
|
+
const pack = installedPacks.find(p => p.id === packId);
|
|
110
|
+
if (pack) {
|
|
111
|
+
ordered.push(pack);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Add remaining packs
|
|
116
|
+
for (const pack of installedPacks) {
|
|
117
|
+
if (!precedence.includes(pack.id)) {
|
|
118
|
+
unordered.push(pack);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
return [...ordered, ...unordered];
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { FileReader } from "../../infrastructure/file-system/file-reader.js";
|
|
2
|
+
import { RuleExtractor } from "../../infrastructure/parsing/rule-extractor.js";
|
|
3
|
+
import { Compilation } from "../../domain/compilation.js";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Stage 2: Rule Extraction & Parsing
|
|
7
|
+
* Extracts structured rules from markdown documents
|
|
8
|
+
*/
|
|
9
|
+
export class Stage2Extraction {
|
|
10
|
+
constructor() {
|
|
11
|
+
this.fileReader = new FileReader();
|
|
12
|
+
this.ruleExtractor = new RuleExtractor();
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Extract rules from source files
|
|
17
|
+
* @param {Object[]} sourceFiles - Array of source file metadata
|
|
18
|
+
* @returns {Object} Extraction result with compilation and report
|
|
19
|
+
*/
|
|
20
|
+
execute(sourceFiles) {
|
|
21
|
+
const compilation = new Compilation();
|
|
22
|
+
const report = {
|
|
23
|
+
filesProcessed: 0,
|
|
24
|
+
rulesExtracted: 0,
|
|
25
|
+
preferencesExtracted: 0,
|
|
26
|
+
scopesExtracted: 0,
|
|
27
|
+
errors: [],
|
|
28
|
+
warnings: []
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
for (const sourceFile of sourceFiles) {
|
|
32
|
+
try {
|
|
33
|
+
report.filesProcessed++;
|
|
34
|
+
|
|
35
|
+
const content = this.fileReader.readFile(sourceFile.absolutePath);
|
|
36
|
+
|
|
37
|
+
// Extract rules
|
|
38
|
+
const rules = this.ruleExtractor.extractRules(content, {
|
|
39
|
+
pack: sourceFile.packId,
|
|
40
|
+
packVersion: sourceFile.packVersion,
|
|
41
|
+
file: sourceFile.path,
|
|
42
|
+
precedence: sourceFile.precedence
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
for (const rule of rules) {
|
|
46
|
+
try {
|
|
47
|
+
compilation.addRule(rule);
|
|
48
|
+
report.rulesExtracted++;
|
|
49
|
+
} catch (error) {
|
|
50
|
+
report.errors.push({
|
|
51
|
+
file: sourceFile.path,
|
|
52
|
+
error: error.message
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Extract preferences
|
|
58
|
+
const preferences = this.ruleExtractor.extractPreferences(content, {
|
|
59
|
+
pack: sourceFile.packId,
|
|
60
|
+
packVersion: sourceFile.packVersion,
|
|
61
|
+
file: sourceFile.path,
|
|
62
|
+
precedence: sourceFile.precedence
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
for (const preference of preferences) {
|
|
66
|
+
compilation.addPreference(preference);
|
|
67
|
+
report.preferencesExtracted++;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Extract scope
|
|
71
|
+
const scope = this.ruleExtractor.extractScope(content, {
|
|
72
|
+
pack: sourceFile.packId,
|
|
73
|
+
packVersion: sourceFile.packVersion,
|
|
74
|
+
file: sourceFile.path,
|
|
75
|
+
precedence: sourceFile.precedence
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
if (scope) {
|
|
79
|
+
compilation.addScope(scope);
|
|
80
|
+
report.scopesExtracted++;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
} catch (error) {
|
|
84
|
+
report.errors.push({
|
|
85
|
+
file: sourceFile.path,
|
|
86
|
+
error: error.message
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return {
|
|
92
|
+
compilation,
|
|
93
|
+
report
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
import { FileWriter } from "../../infrastructure/file-system/file-writer.js";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Stage 4: Consolidation & Standard Generation
|
|
6
|
+
* Creates a single, coherent, human-readable standard document
|
|
7
|
+
*/
|
|
8
|
+
export class Stage4Consolidation {
|
|
9
|
+
constructor() {
|
|
10
|
+
this.fileWriter = new FileWriter();
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Generate consolidated outputs
|
|
15
|
+
* @param {Compilation} compilation - The compilation with extracted rules
|
|
16
|
+
* @param {string} outputDir - Output directory
|
|
17
|
+
* @returns {Object} Consolidation result
|
|
18
|
+
*/
|
|
19
|
+
execute(compilation, outputDir) {
|
|
20
|
+
const consolidatedDir = outputDir;
|
|
21
|
+
|
|
22
|
+
// Generate consolidated standard
|
|
23
|
+
const consolidatedStandard = this.generateConsolidatedStandard(compilation);
|
|
24
|
+
this.fileWriter.writeFile(
|
|
25
|
+
path.join(consolidatedDir, "CONSOLIDATED-STANDARDS.md"),
|
|
26
|
+
consolidatedStandard
|
|
27
|
+
);
|
|
28
|
+
|
|
29
|
+
// Generate rule index
|
|
30
|
+
const rulesIndex = this.generateRulesIndex(compilation);
|
|
31
|
+
this.fileWriter.writeFile(
|
|
32
|
+
path.join(consolidatedDir, "RULES-INDEX.md"),
|
|
33
|
+
rulesIndex
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
// Generate rules by scope
|
|
37
|
+
const rulesByScope = this.generateRulesByScope(compilation);
|
|
38
|
+
this.fileWriter.writeFile(
|
|
39
|
+
path.join(consolidatedDir, "RULES-BY-SCOPE.md"),
|
|
40
|
+
rulesByScope
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
// Generate JSON outputs
|
|
44
|
+
this.fileWriter.writeJson(
|
|
45
|
+
path.join(consolidatedDir, "rules.json"),
|
|
46
|
+
compilation.rules.map(r => ({
|
|
47
|
+
id: r.id,
|
|
48
|
+
level: r.level,
|
|
49
|
+
category: r.category,
|
|
50
|
+
appliesTo: r.appliesTo,
|
|
51
|
+
rule: r.rule,
|
|
52
|
+
rationale: r.rationale,
|
|
53
|
+
source: r.source
|
|
54
|
+
}))
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
this.fileWriter.writeJson(
|
|
58
|
+
path.join(consolidatedDir, "preferences.json"),
|
|
59
|
+
compilation.preferences.map(p => ({
|
|
60
|
+
name: p.name,
|
|
61
|
+
description: p.description,
|
|
62
|
+
source: p.source
|
|
63
|
+
}))
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
this.fileWriter.writeJson(
|
|
67
|
+
path.join(consolidatedDir, "metadata.json"),
|
|
68
|
+
{
|
|
69
|
+
...compilation.metadata,
|
|
70
|
+
rulesCount: compilation.rules.length,
|
|
71
|
+
preferencesCount: compilation.preferences.length,
|
|
72
|
+
scopesCount: compilation.scopes.length,
|
|
73
|
+
categories: compilation.getCategories()
|
|
74
|
+
}
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
return {
|
|
78
|
+
outputDir: consolidatedDir,
|
|
79
|
+
filesGenerated: [
|
|
80
|
+
"CONSOLIDATED-STANDARDS.md",
|
|
81
|
+
"RULES-INDEX.md",
|
|
82
|
+
"RULES-BY-SCOPE.md",
|
|
83
|
+
"rules.json",
|
|
84
|
+
"preferences.json",
|
|
85
|
+
"metadata.json"
|
|
86
|
+
]
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Generate consolidated standard markdown
|
|
92
|
+
* @param {Compilation} compilation - Compilation object
|
|
93
|
+
* @returns {string} Markdown content
|
|
94
|
+
*/
|
|
95
|
+
generateConsolidatedStandard(compilation) {
|
|
96
|
+
const lines = [];
|
|
97
|
+
lines.push("# Consolidated Standards");
|
|
98
|
+
lines.push("");
|
|
99
|
+
lines.push(`Generated: ${compilation.metadata.timestamp}`);
|
|
100
|
+
lines.push(`Total Rules: ${compilation.rules.length}`);
|
|
101
|
+
lines.push(`Total Preferences: ${compilation.preferences.length}`);
|
|
102
|
+
lines.push("");
|
|
103
|
+
lines.push("---");
|
|
104
|
+
lines.push("");
|
|
105
|
+
|
|
106
|
+
// Group by category
|
|
107
|
+
const categories = compilation.getCategories();
|
|
108
|
+
for (const category of categories) {
|
|
109
|
+
lines.push(`## ${category}`);
|
|
110
|
+
lines.push("");
|
|
111
|
+
|
|
112
|
+
const rules = compilation.getRulesByCategory(category);
|
|
113
|
+
// Sort by level, then by ID
|
|
114
|
+
const levelOrder = { must: 0, should: 1, prefer: 2, avoid: 3 };
|
|
115
|
+
rules.sort((a, b) => {
|
|
116
|
+
const levelDiff = levelOrder[a.level] - levelOrder[b.level];
|
|
117
|
+
if (levelDiff !== 0) return levelDiff;
|
|
118
|
+
return a.id.localeCompare(b.id);
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
for (const rule of rules) {
|
|
122
|
+
lines.push(`### ${rule.id}: ${this.extractTitle(rule.rule)}`);
|
|
123
|
+
lines.push("");
|
|
124
|
+
lines.push(`- **Level:** ${rule.level}`);
|
|
125
|
+
lines.push(`- **Applies to:** ${rule.appliesTo.join(", ")}`);
|
|
126
|
+
lines.push(`- **Rule:** ${rule.rule}`);
|
|
127
|
+
if (rule.rationale) {
|
|
128
|
+
lines.push(`- **Rationale:** ${rule.rationale}`);
|
|
129
|
+
}
|
|
130
|
+
lines.push(`- **Source:** [${rule.source.pack}@${rule.source.packVersion}](${rule.source.file})`);
|
|
131
|
+
lines.push("");
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Add preferences section
|
|
136
|
+
if (compilation.preferences.length > 0) {
|
|
137
|
+
lines.push("---");
|
|
138
|
+
lines.push("");
|
|
139
|
+
lines.push("## Preferences");
|
|
140
|
+
lines.push("");
|
|
141
|
+
|
|
142
|
+
for (const pref of compilation.preferences) {
|
|
143
|
+
lines.push(`- **${pref.name}**`);
|
|
144
|
+
if (pref.description) {
|
|
145
|
+
lines.push(` - ${pref.description}`);
|
|
146
|
+
}
|
|
147
|
+
lines.push(` - Source: [${pref.source.pack}@${pref.source.packVersion}](${pref.source.file})`);
|
|
148
|
+
lines.push("");
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
return lines.join("\n");
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Generate rules index
|
|
157
|
+
* @param {Compilation} compilation - Compilation object
|
|
158
|
+
* @returns {string} Markdown content
|
|
159
|
+
*/
|
|
160
|
+
generateRulesIndex(compilation) {
|
|
161
|
+
const lines = [];
|
|
162
|
+
lines.push("# Rules Index");
|
|
163
|
+
lines.push("");
|
|
164
|
+
lines.push("Quick reference of all rules.");
|
|
165
|
+
lines.push("");
|
|
166
|
+
lines.push("| ID | Level | Category | Applies To |");
|
|
167
|
+
lines.push("|----|-------|----------|------------|");
|
|
168
|
+
|
|
169
|
+
const allRules = [...compilation.rules].sort((a, b) => a.id.localeCompare(b.id));
|
|
170
|
+
for (const rule of allRules) {
|
|
171
|
+
lines.push(`| ${rule.id} | ${rule.level} | ${rule.category} | ${rule.appliesTo.join(", ")} |`);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return lines.join("\n");
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Generate rules by scope
|
|
179
|
+
* @param {Compilation} compilation - Compilation object
|
|
180
|
+
* @returns {string} Markdown content
|
|
181
|
+
*/
|
|
182
|
+
generateRulesByScope(compilation) {
|
|
183
|
+
const lines = [];
|
|
184
|
+
lines.push("# Rules by Scope");
|
|
185
|
+
lines.push("");
|
|
186
|
+
lines.push("Rules organized by what they apply to.");
|
|
187
|
+
lines.push("");
|
|
188
|
+
|
|
189
|
+
// Collect all unique scopes
|
|
190
|
+
const scopes = new Set();
|
|
191
|
+
for (const rule of compilation.rules) {
|
|
192
|
+
if (rule.appliesTo.includes("all")) {
|
|
193
|
+
scopes.add("all");
|
|
194
|
+
} else {
|
|
195
|
+
rule.appliesTo.forEach(s => scopes.add(s));
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const sortedScopes = Array.from(scopes).sort();
|
|
200
|
+
|
|
201
|
+
for (const scope of sortedScopes) {
|
|
202
|
+
lines.push(`## ${scope}`);
|
|
203
|
+
lines.push("");
|
|
204
|
+
|
|
205
|
+
const applicableRules = compilation.rules.filter(r => r.appliesToScope(scope));
|
|
206
|
+
const levelOrder = { must: 0, should: 1, prefer: 2, avoid: 3 };
|
|
207
|
+
applicableRules.sort((a, b) => {
|
|
208
|
+
const levelDiff = levelOrder[a.level] - levelOrder[b.level];
|
|
209
|
+
if (levelDiff !== 0) return levelDiff;
|
|
210
|
+
return a.id.localeCompare(b.id);
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
for (const rule of applicableRules) {
|
|
214
|
+
lines.push(`- **${rule.id}** (${rule.level}): ${this.extractTitle(rule.rule)}`);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
lines.push("");
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
return lines.join("\n");
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* Extract a short title from a rule statement
|
|
225
|
+
* @param {string} rule - Rule statement
|
|
226
|
+
* @returns {string} Short title
|
|
227
|
+
*/
|
|
228
|
+
extractTitle(rule) {
|
|
229
|
+
// Take first sentence or first 80 characters
|
|
230
|
+
const firstSentence = rule.split(/[.!?]/)[0];
|
|
231
|
+
if (firstSentence.length <= 80) return firstSentence;
|
|
232
|
+
return rule.substring(0, 77) + "...";
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { CursorRulesGenerator } from "../../infrastructure/assets/cursor/cursor-rules-generator.js";
|
|
2
|
+
import { CursorSkillsGenerator } from "../../infrastructure/assets/cursor/cursor-skills-generator.js";
|
|
3
|
+
import { ClaudeGenerator } from "../../infrastructure/assets/claude/claude-generator.js";
|
|
4
|
+
import { SkillExtractor } from "../../infrastructure/parsing/skill-extractor.js";
|
|
5
|
+
import { FileReader } from "../../infrastructure/file-system/file-reader.js";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Stage 5: Integration Asset Generation
|
|
9
|
+
* Generates tool-specific assets from consolidated standard
|
|
10
|
+
*/
|
|
11
|
+
export class Stage5Assets {
|
|
12
|
+
constructor() {
|
|
13
|
+
this.cursorRulesGenerator = new CursorRulesGenerator();
|
|
14
|
+
this.cursorSkillsGenerator = new CursorSkillsGenerator();
|
|
15
|
+
this.claudeGenerator = new ClaudeGenerator();
|
|
16
|
+
this.skillExtractor = new SkillExtractor();
|
|
17
|
+
this.fileReader = new FileReader();
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Generate integration assets
|
|
22
|
+
* @param {Compilation} compilation - The compilation with extracted rules
|
|
23
|
+
* @param {Object[]} sourceFiles - Source files (for skill extraction)
|
|
24
|
+
* @param {string} outputPath - Output directory
|
|
25
|
+
* @returns {Object} Asset generation result
|
|
26
|
+
*/
|
|
27
|
+
async execute(compilation, sourceFiles, outputPath) {
|
|
28
|
+
const results = {};
|
|
29
|
+
|
|
30
|
+
// Extract skills from source files
|
|
31
|
+
const skills = [];
|
|
32
|
+
for (const sourceFile of sourceFiles) {
|
|
33
|
+
const content = this.fileReader.readFile(sourceFile.absolutePath);
|
|
34
|
+
const extractedSkills = this.skillExtractor.extractSkills(content, {
|
|
35
|
+
pack: sourceFile.packId,
|
|
36
|
+
packVersion: sourceFile.packVersion,
|
|
37
|
+
file: sourceFile.path
|
|
38
|
+
});
|
|
39
|
+
skills.push(...extractedSkills);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Generate Cursor rules
|
|
43
|
+
results.cursorRules = await this.cursorRulesGenerator.generate(compilation, outputPath);
|
|
44
|
+
|
|
45
|
+
// Generate Cursor skills
|
|
46
|
+
if (skills.length > 0) {
|
|
47
|
+
results.cursorSkills = await this.cursorSkillsGenerator.generate(compilation, outputPath, skills);
|
|
48
|
+
} else {
|
|
49
|
+
results.cursorSkills = { filesGenerated: [], outputDir: null };
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Generate Claude.md
|
|
53
|
+
results.claude = await this.claudeGenerator.generate(compilation, outputPath);
|
|
54
|
+
|
|
55
|
+
return {
|
|
56
|
+
results,
|
|
57
|
+
skillsExtracted: skills.length
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|