fraim-framework 2.0.86 → 2.0.88
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/bin/fraim.js +1 -1
- package/dist/src/cli/commands/add-provider.js +16 -6
- package/dist/src/cli/commands/init-project.js +103 -1
- package/dist/src/cli/commands/login.js +84 -0
- package/dist/src/cli/commands/setup.js +135 -13
- package/dist/src/cli/fraim.js +2 -0
- package/dist/src/cli/internal/device-flow-service.js +83 -0
- package/dist/src/cli/mcp/mcp-server-registry.js +11 -10
- package/dist/src/cli/providers/local-provider-registry.js +22 -1
- package/dist/src/cli/services/device-flow-service.js +83 -0
- package/dist/src/cli/setup/provider-prompts.js +39 -0
- package/dist/src/cli/utils/remote-sync.js +159 -28
- package/dist/src/core/ai-mentor.js +248 -0
- package/dist/src/core/utils/git-utils.js +6 -6
- package/dist/src/core/utils/include-resolver.js +45 -0
- package/dist/src/core/utils/inheritance-parser.js +154 -16
- package/dist/src/core/utils/local-registry-resolver.js +326 -22
- package/dist/src/core/utils/server-startup.js +34 -0
- package/dist/src/core/utils/stub-generator.js +34 -27
- package/dist/src/core/utils/workflow-parser.js +32 -2
- package/dist/src/local-mcp-server/stdio-server.js +240 -284
- package/index.js +26 -5
- package/package.json +15 -5
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AIMentor = void 0;
|
|
4
|
+
const include_resolver_1 = require("./utils/include-resolver");
|
|
5
|
+
class AIMentor {
|
|
6
|
+
constructor(resolver) {
|
|
7
|
+
this.workflowCache = new Map();
|
|
8
|
+
this.resolver = resolver;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Handle mentoring/coaching request from agent
|
|
12
|
+
*/
|
|
13
|
+
async handleMentoringRequest(args) {
|
|
14
|
+
const workflow = await this.getOrLoadWorkflow(args.workflowType);
|
|
15
|
+
if (!workflow) {
|
|
16
|
+
throw new Error(`Workflow "${args.workflowType}" not found or invalid.`);
|
|
17
|
+
}
|
|
18
|
+
const validStatus = ['starting', 'complete', 'incomplete', 'failure'].includes(args.status);
|
|
19
|
+
if (!validStatus) {
|
|
20
|
+
throw new Error(`Invalid status: ${args.status}. Must be one of: starting, complete, incomplete, failure.`);
|
|
21
|
+
}
|
|
22
|
+
// For simple workflows, skip phase validation
|
|
23
|
+
if (!workflow.isSimple) {
|
|
24
|
+
const phases = workflow.metadata.phases || {};
|
|
25
|
+
const hasMetadata = !!phases[args.currentPhase];
|
|
26
|
+
const hasMarkdown = workflow.phases.has(args.currentPhase);
|
|
27
|
+
if (!hasMetadata && !hasMarkdown && args.currentPhase !== 'starting') {
|
|
28
|
+
throw new Error(`Phase "${args.currentPhase}" not found in workflow "${args.workflowType}".`);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
// Handle different statuses
|
|
32
|
+
if (args.status === 'starting') {
|
|
33
|
+
return await this.generateStartingMessage(workflow, args.currentPhase, args.skipIncludes);
|
|
34
|
+
}
|
|
35
|
+
else if (args.status === 'complete') {
|
|
36
|
+
return await this.generateCompletionMessage(workflow, args.currentPhase, args.findings, args.evidence, args.skipIncludes);
|
|
37
|
+
}
|
|
38
|
+
else {
|
|
39
|
+
return await this.generateHelpMessage(workflow, args.currentPhase, args.status, args.skipIncludes);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
async getOrLoadWorkflow(workflowType, preferredType) {
|
|
43
|
+
if (this.workflowCache.has(workflowType)) {
|
|
44
|
+
return this.workflowCache.get(workflowType);
|
|
45
|
+
}
|
|
46
|
+
const workflow = await this.resolver.getWorkflow(workflowType, preferredType);
|
|
47
|
+
if (workflow) {
|
|
48
|
+
this.workflowCache.set(workflowType, workflow);
|
|
49
|
+
return workflow;
|
|
50
|
+
}
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
async resolveIncludes(content, basePath) {
|
|
54
|
+
return (0, include_resolver_1.resolveIncludes)(content, this.resolver, basePath);
|
|
55
|
+
}
|
|
56
|
+
assertNoUnresolvedIncludes(content, context) {
|
|
57
|
+
const matches = content.match(/\{\{include:[^}]+\}\}/g);
|
|
58
|
+
if (!matches || matches.length === 0)
|
|
59
|
+
return;
|
|
60
|
+
const unique = Array.from(new Set(matches));
|
|
61
|
+
throw new Error(`Unresolved include directives in ${context}: ${unique.join(', ')}`);
|
|
62
|
+
}
|
|
63
|
+
buildReportBackFooter(workflowType, phaseId, phaseFlow) {
|
|
64
|
+
if (!phaseFlow)
|
|
65
|
+
return '';
|
|
66
|
+
const base = `seekMentoring({
|
|
67
|
+
workflowType: "${workflowType}",
|
|
68
|
+
issueNumber: "<issue_number>",
|
|
69
|
+
currentPhase: "${phaseId}",
|
|
70
|
+
status: "complete",`;
|
|
71
|
+
const onSuccess = phaseFlow.onSuccess;
|
|
72
|
+
let successBlock;
|
|
73
|
+
if (!onSuccess) {
|
|
74
|
+
successBlock = `\n\n---\n\n> **⚑ Phase Complete — Report Back**\n> When you have finished all steps above, call:\n> \`\`\`javascript\n> ${base}\n> // This is the final phase.\n> })\n> \`\`\``;
|
|
75
|
+
}
|
|
76
|
+
else if (typeof onSuccess === 'string') {
|
|
77
|
+
successBlock = `\n\n---\n\n> **⚑ Phase Complete — Report Back**\n> When you have finished all steps above, call:\n> \`\`\`javascript\n> ${base}\n> })\n> \`\`\``;
|
|
78
|
+
}
|
|
79
|
+
else {
|
|
80
|
+
const validOutcomes = Object.keys(onSuccess)
|
|
81
|
+
.filter(k => k !== 'default')
|
|
82
|
+
.map(k => `"${k}"`)
|
|
83
|
+
.join(' | ');
|
|
84
|
+
successBlock = `\n\n---\n\n> **⚑ Phase Complete — Report Back**\n> The next phase depends on your outcome. Set \`findings.issueType\` (or \`findings.phaseOutcome\`) to one of: ${validOutcomes}\n>\n> Then call:\n> \`\`\`javascript\n> ${base}\n> findings: { issueType: "<Outcome>" }\n> })\n> \`\`\``;
|
|
85
|
+
}
|
|
86
|
+
return successBlock;
|
|
87
|
+
}
|
|
88
|
+
/** Phase-authority content injected for all phased workflows. Loaded from orchestration/phase-authority.md. */
|
|
89
|
+
async getPhaseAuthorityContent() {
|
|
90
|
+
try {
|
|
91
|
+
const content = await this.resolver.getFile('orchestration/phase-authority.md');
|
|
92
|
+
return content?.trim() || '';
|
|
93
|
+
}
|
|
94
|
+
catch {
|
|
95
|
+
return '';
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
async prependPhaseAuthority(message, isPhased) {
|
|
99
|
+
if (!isPhased)
|
|
100
|
+
return message;
|
|
101
|
+
const block = await this.getPhaseAuthorityContent();
|
|
102
|
+
if (!block)
|
|
103
|
+
return message;
|
|
104
|
+
return `${block}\n\n---\n\n${message}`;
|
|
105
|
+
}
|
|
106
|
+
async generateStartingMessage(workflow, phaseId, skipIncludes) {
|
|
107
|
+
const isJob = workflow.metadata.type === 'job';
|
|
108
|
+
const entityType = isJob ? 'Job' : 'Workflow';
|
|
109
|
+
if (workflow.isSimple) {
|
|
110
|
+
const message = `🚀 **Starting ${entityType}: ${workflow.metadata.name}**\n\n${workflow.overview}`;
|
|
111
|
+
this.assertNoUnresolvedIncludes(message, `${workflow.metadata.name} (starting)`);
|
|
112
|
+
return {
|
|
113
|
+
message,
|
|
114
|
+
nextPhase: null,
|
|
115
|
+
status: 'starting'
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
const isVeryFirstCall = phaseId === 'starting';
|
|
119
|
+
const targetPhase = isVeryFirstCall ? (workflow.metadata.initialPhase || 'starting') : phaseId;
|
|
120
|
+
let message = '';
|
|
121
|
+
if (!isJob) {
|
|
122
|
+
message += `🚀 **Starting Workflow: ${workflow.metadata.name}**\n\n`;
|
|
123
|
+
if (isVeryFirstCall) {
|
|
124
|
+
message += `${workflow.overview}\n\n---\n\n`;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
let instructions = workflow.phases.get(targetPhase);
|
|
128
|
+
if (instructions) {
|
|
129
|
+
instructions = skipIncludes ? instructions : await this.resolveIncludes(instructions, workflow.path);
|
|
130
|
+
message += `${instructions}`;
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
message += `⚠️ No specific instructions found for phase: ${targetPhase}`;
|
|
134
|
+
}
|
|
135
|
+
if (isJob) {
|
|
136
|
+
const phaseFlow = workflow.metadata.phases?.[targetPhase];
|
|
137
|
+
message += this.buildReportBackFooter(workflow.metadata.name, targetPhase, phaseFlow);
|
|
138
|
+
}
|
|
139
|
+
if (!skipIncludes) {
|
|
140
|
+
this.assertNoUnresolvedIncludes(message, `${workflow.metadata.name}:${targetPhase} (starting)`);
|
|
141
|
+
}
|
|
142
|
+
return {
|
|
143
|
+
message: await this.prependPhaseAuthority(message, true),
|
|
144
|
+
nextPhase: targetPhase,
|
|
145
|
+
status: 'starting'
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
async generateCompletionMessage(workflow, phaseId, findings, evidence, skipIncludes) {
|
|
149
|
+
const isJob = workflow.metadata.type === 'job';
|
|
150
|
+
const entityType = isJob ? 'Job' : 'Workflow';
|
|
151
|
+
if (workflow.isSimple) {
|
|
152
|
+
const message = `✅ **${entityType} Complete: ${workflow.metadata.name}**\n\n🎉 Great work! You have completed the ${workflow.metadata.name} ${entityType.toLowerCase()}.`;
|
|
153
|
+
this.assertNoUnresolvedIncludes(message, `${workflow.metadata.name} (complete)`);
|
|
154
|
+
return {
|
|
155
|
+
message,
|
|
156
|
+
nextPhase: null,
|
|
157
|
+
status: 'complete'
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
const phaseFlow = workflow.metadata.phases?.[phaseId];
|
|
161
|
+
let nextPhaseId = null;
|
|
162
|
+
if (phaseFlow && phaseFlow.onSuccess) {
|
|
163
|
+
if (typeof phaseFlow.onSuccess === 'string') {
|
|
164
|
+
nextPhaseId = phaseFlow.onSuccess;
|
|
165
|
+
}
|
|
166
|
+
else {
|
|
167
|
+
const outcome = findings?.phaseOutcome ?? findings?.issueType ?? evidence?.issueType ?? evidence?.phaseOutcome ?? 'default';
|
|
168
|
+
nextPhaseId = phaseFlow.onSuccess[outcome] ?? phaseFlow.onSuccess['default'] ?? null;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
let message = '';
|
|
172
|
+
if (nextPhaseId) {
|
|
173
|
+
if (isJob) {
|
|
174
|
+
message += `Great work. Moving to the next phase: **${nextPhaseId}**.\n\n`;
|
|
175
|
+
}
|
|
176
|
+
else {
|
|
177
|
+
message += `✅ **Phase Complete: ${phaseId}**\n\nMoving to the next phase: **${nextPhaseId}**.\n\n`;
|
|
178
|
+
}
|
|
179
|
+
let nextInstructions = workflow.phases.get(nextPhaseId);
|
|
180
|
+
if (nextInstructions) {
|
|
181
|
+
nextInstructions = skipIncludes ? nextInstructions : await this.resolveIncludes(nextInstructions, workflow.path);
|
|
182
|
+
message += nextInstructions;
|
|
183
|
+
}
|
|
184
|
+
if (isJob) {
|
|
185
|
+
const nextPhaseFlow = workflow.metadata.phases?.[nextPhaseId];
|
|
186
|
+
message += this.buildReportBackFooter(workflow.metadata.name, nextPhaseId, nextPhaseFlow);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
message += `🎉 **${entityType} Accomplished!** You have completed all phases of the ${workflow.metadata.name} ${entityType.toLowerCase()}.`;
|
|
191
|
+
}
|
|
192
|
+
if (!skipIncludes) {
|
|
193
|
+
this.assertNoUnresolvedIncludes(message, `${workflow.metadata.name}:${phaseId} (complete)`);
|
|
194
|
+
}
|
|
195
|
+
return {
|
|
196
|
+
message: await this.prependPhaseAuthority(message, true),
|
|
197
|
+
nextPhase: nextPhaseId,
|
|
198
|
+
status: 'complete'
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
async generateHelpMessage(workflow, phaseId, status, skipIncludes) {
|
|
202
|
+
const entityType = workflow.metadata.type === 'job' ? 'Job' : 'Workflow';
|
|
203
|
+
if (workflow.isSimple) {
|
|
204
|
+
const message = `**${entityType}: ${workflow.metadata.name}**\n\n${workflow.overview}`;
|
|
205
|
+
this.assertNoUnresolvedIncludes(message, `${workflow.metadata.name} (${status})`);
|
|
206
|
+
return {
|
|
207
|
+
message,
|
|
208
|
+
nextPhase: null,
|
|
209
|
+
status
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
const phaseMeta = workflow.metadata.phases?.[phaseId];
|
|
213
|
+
const targetPhaseId = status === 'failure' ? (phaseMeta?.onFailure || phaseId) : phaseId;
|
|
214
|
+
let message = `### Current Phase: ${targetPhaseId}\n\n`;
|
|
215
|
+
let instructions = workflow.phases.get(targetPhaseId);
|
|
216
|
+
if (instructions) {
|
|
217
|
+
instructions = skipIncludes ? instructions : await this.resolveIncludes(instructions, workflow.path);
|
|
218
|
+
message += instructions;
|
|
219
|
+
}
|
|
220
|
+
if (!skipIncludes) {
|
|
221
|
+
this.assertNoUnresolvedIncludes(message, `${workflow.metadata.name}:${targetPhaseId} (${status})`);
|
|
222
|
+
}
|
|
223
|
+
return {
|
|
224
|
+
message: await this.prependPhaseAuthority(message, true),
|
|
225
|
+
nextPhase: targetPhaseId,
|
|
226
|
+
status
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
async getWorkflowOverview(workflowType) {
|
|
230
|
+
const workflow = await this.getOrLoadWorkflow(workflowType, 'workflow');
|
|
231
|
+
return workflow ? { overview: workflow.overview, isSimple: workflow.isSimple } : null;
|
|
232
|
+
}
|
|
233
|
+
async getJobOverview(jobName) {
|
|
234
|
+
const job = await this.getOrLoadWorkflow(jobName, 'job');
|
|
235
|
+
return job ? { overview: job.overview, isSimple: job.isSimple } : null;
|
|
236
|
+
}
|
|
237
|
+
async getAllWorkflowMetadata() {
|
|
238
|
+
const items = await this.resolver.listItems('workflow');
|
|
239
|
+
const workflows = [];
|
|
240
|
+
for (const item of items) {
|
|
241
|
+
const wf = await this.resolver.getWorkflow(item.name, 'workflow');
|
|
242
|
+
if (wf)
|
|
243
|
+
workflows.push(wf);
|
|
244
|
+
}
|
|
245
|
+
return workflows;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
exports.AIMentor = AIMentor;
|
|
@@ -12,9 +12,9 @@ const child_process_1 = require("child_process");
|
|
|
12
12
|
*/
|
|
13
13
|
function getPort() {
|
|
14
14
|
try {
|
|
15
|
-
const branchName = (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD').toString().trim();
|
|
16
|
-
// Match issue-123 or 123-feature-name
|
|
17
|
-
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(
|
|
15
|
+
const branchName = process.env.FRAIM_BRANCH || (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD').toString().trim();
|
|
16
|
+
// Match issue-123 or 123-feature-name or feature/123-name
|
|
17
|
+
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(/(\d+)-/);
|
|
18
18
|
if (issueMatch) {
|
|
19
19
|
const issueNum = parseInt(issueMatch[1], 10);
|
|
20
20
|
// Ensure port is in a safe range (10000-65535)
|
|
@@ -31,8 +31,8 @@ function getPort() {
|
|
|
31
31
|
*/
|
|
32
32
|
function determineDatabaseName() {
|
|
33
33
|
try {
|
|
34
|
-
const branchName = (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD').toString().trim();
|
|
35
|
-
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(
|
|
34
|
+
const branchName = process.env.FRAIM_BRANCH || (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD').toString().trim();
|
|
35
|
+
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(/(\d+)-/);
|
|
36
36
|
if (issueMatch) {
|
|
37
37
|
return `fraim_issue_${issueMatch[1]}`;
|
|
38
38
|
}
|
|
@@ -60,7 +60,7 @@ function getCurrentGitBranch() {
|
|
|
60
60
|
* Determines the database schema prefix based on the branch
|
|
61
61
|
*/
|
|
62
62
|
function determineSchema(branchName) {
|
|
63
|
-
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(
|
|
63
|
+
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(/(\d+)-/);
|
|
64
64
|
if (issueMatch) {
|
|
65
65
|
return `issue_${issueMatch[1]}`;
|
|
66
66
|
}
|
|
@@ -11,6 +11,7 @@
|
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
12
|
exports.MAX_INCLUDE_PASSES = void 0;
|
|
13
13
|
exports.resolveIncludesWithIndex = resolveIncludesWithIndex;
|
|
14
|
+
exports.resolveIncludes = resolveIncludes;
|
|
14
15
|
const fs_1 = require("fs");
|
|
15
16
|
/** Maximum resolution passes to prevent infinite loops from circular includes */
|
|
16
17
|
exports.MAX_INCLUDE_PASSES = 10;
|
|
@@ -45,3 +46,47 @@ function resolveIncludesWithIndex(content, fileIndex) {
|
|
|
45
46
|
}
|
|
46
47
|
return result;
|
|
47
48
|
}
|
|
49
|
+
/**
|
|
50
|
+
* Asynchronously resolve {{include:path}} directives in content using a RegistryResolver.
|
|
51
|
+
*
|
|
52
|
+
* @param content - Raw content that may contain {{include:path}} directives
|
|
53
|
+
* @param resolver - RegistryResolver instance
|
|
54
|
+
* @returns Content with all resolvable includes inlined
|
|
55
|
+
*/
|
|
56
|
+
async function resolveIncludes(content, resolver, basePath) {
|
|
57
|
+
let result = content;
|
|
58
|
+
let pass = 0;
|
|
59
|
+
while (result.includes('{{include:') && pass < exports.MAX_INCLUDE_PASSES) {
|
|
60
|
+
// Collect all unique includes in this pass
|
|
61
|
+
const matches = result.match(/\{\{include:([^}]+)\}\}/g);
|
|
62
|
+
if (!matches)
|
|
63
|
+
break;
|
|
64
|
+
const uniqueMatches = Array.from(new Set(matches));
|
|
65
|
+
for (const match of uniqueMatches) {
|
|
66
|
+
const filePath = match.match(/\{\{include:([^}]+)\}\}/)[1].trim();
|
|
67
|
+
let targetPath = filePath;
|
|
68
|
+
if (filePath.startsWith('./') && basePath) {
|
|
69
|
+
// Resolve relative to the directory of the current file
|
|
70
|
+
const dir = basePath.includes('/') ? basePath.substring(0, basePath.lastIndexOf('/')) : '';
|
|
71
|
+
targetPath = dir ? `${dir}/${filePath.substring(2)}` : filePath.substring(2);
|
|
72
|
+
}
|
|
73
|
+
try {
|
|
74
|
+
const fileContent = await resolver.getFile(targetPath);
|
|
75
|
+
if (fileContent !== null) {
|
|
76
|
+
// Recursively resolve includes in the newly fetched content
|
|
77
|
+
const resolvedContent = await resolveIncludes(fileContent, resolver, targetPath);
|
|
78
|
+
// Replace all occurrences of this specific include
|
|
79
|
+
result = result.split(match).join(resolvedContent);
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
console.warn(`⚠️ Include file not found via resolver: ${targetPath} (original: ${filePath}, base: ${basePath})`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
catch (error) {
|
|
86
|
+
console.error(`❌ Failed to resolve include via resolver: ${targetPath}`, error);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
pass++;
|
|
90
|
+
}
|
|
91
|
+
return result;
|
|
92
|
+
}
|
|
@@ -81,14 +81,14 @@ class InheritanceParser {
|
|
|
81
81
|
}
|
|
82
82
|
}
|
|
83
83
|
/**
|
|
84
|
-
* Resolve all import directives in content recursively
|
|
84
|
+
* Resolve all import and extends directives in content recursively
|
|
85
85
|
*
|
|
86
|
-
* @param content - Content with {{ import }}
|
|
86
|
+
* @param content - Content with {{ import }} or extends frontmatter
|
|
87
87
|
* @param currentPath - Path of current file (for circular detection)
|
|
88
88
|
* @param options - Resolution options
|
|
89
|
-
* @returns Resolved content with all
|
|
89
|
+
* @returns Resolved content with all inheritance applied
|
|
90
90
|
*
|
|
91
|
-
* @throws {InheritanceError} If circular
|
|
91
|
+
* @throws {InheritanceError} If circular inheritance, path traversal, or max depth exceeded
|
|
92
92
|
*/
|
|
93
93
|
async resolve(content, currentPath, options) {
|
|
94
94
|
const depth = options.currentDepth || 0;
|
|
@@ -96,18 +96,53 @@ class InheritanceParser {
|
|
|
96
96
|
const maxDepth = options.maxDepth || this.maxDepth;
|
|
97
97
|
// Check depth limit
|
|
98
98
|
if (depth > maxDepth) {
|
|
99
|
-
throw new InheritanceError(`Max
|
|
99
|
+
throw new InheritanceError(`Max inheritance depth exceeded (${maxDepth})`, currentPath);
|
|
100
100
|
}
|
|
101
|
-
// Check circular
|
|
101
|
+
// Check circular inheritance (but allow importing/extending the same path as parent)
|
|
102
102
|
this.detectCircularImport(currentPath, visited, false);
|
|
103
103
|
visited.add(currentPath);
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
104
|
+
let resolvedContent = content;
|
|
105
|
+
// 1. Handle JSON frontmatter 'extends'
|
|
106
|
+
const metadataMatch = resolvedContent.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
107
|
+
if (metadataMatch) {
|
|
108
|
+
try {
|
|
109
|
+
const metadata = JSON.parse(metadataMatch[1]);
|
|
110
|
+
const extendsPath = metadata.extends;
|
|
111
|
+
if (extendsPath && typeof extendsPath === 'string') {
|
|
112
|
+
// Sanitize path
|
|
113
|
+
const sanitizedExtends = this.sanitizePath(extendsPath);
|
|
114
|
+
const isParentExtends = sanitizedExtends === currentPath;
|
|
115
|
+
// Fetch parent content
|
|
116
|
+
let parentContent;
|
|
117
|
+
try {
|
|
118
|
+
parentContent = await options.fetchParent(sanitizedExtends);
|
|
119
|
+
}
|
|
120
|
+
catch (error) {
|
|
121
|
+
throw new InheritanceError(`Failed to fetch extended parent content: ${sanitizedExtends}. ${error.message}`, sanitizedExtends);
|
|
122
|
+
}
|
|
123
|
+
// Recursively resolve parent
|
|
124
|
+
const parentVisited = isParentExtends ? new Set() : new Set(visited);
|
|
125
|
+
const resolvedParent = await this.resolve(parentContent, sanitizedExtends, {
|
|
126
|
+
...options,
|
|
127
|
+
currentDepth: depth + 1,
|
|
128
|
+
visited: parentVisited
|
|
129
|
+
});
|
|
130
|
+
// Merge current content with resolved parent
|
|
131
|
+
resolvedContent = this.mergeContent(resolvedContent, resolvedParent);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
if (error instanceof SyntaxError) {
|
|
136
|
+
// Not JSON or invalid JSON, ignore extends logic but log it
|
|
137
|
+
console.warn(`[InheritanceParser] Failed to parse frontmatter for ${currentPath}: ${error.message}`);
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
throw error;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
108
143
|
}
|
|
109
|
-
//
|
|
110
|
-
|
|
144
|
+
// 2. Handle {{ import: path }}
|
|
145
|
+
const imports = this.extractImports(resolvedContent);
|
|
111
146
|
for (const importPath of imports) {
|
|
112
147
|
// Sanitize path
|
|
113
148
|
const sanitized = this.sanitizePath(importPath);
|
|
@@ -122,7 +157,6 @@ class InheritanceParser {
|
|
|
122
157
|
throw new InheritanceError(`Failed to fetch parent content: ${sanitized}. ${error.message}`, sanitized);
|
|
123
158
|
}
|
|
124
159
|
// Recursively resolve parent imports
|
|
125
|
-
// For parent imports, use a fresh visited set to allow the same path
|
|
126
160
|
const parentVisited = isParentImport ? new Set() : new Set(visited);
|
|
127
161
|
const resolvedParent = await this.resolve(parentContent, sanitized, {
|
|
128
162
|
...options,
|
|
@@ -131,20 +165,124 @@ class InheritanceParser {
|
|
|
131
165
|
});
|
|
132
166
|
// Replace import directive with resolved parent content
|
|
133
167
|
const importDirective = `{{ import: ${importPath} }}`;
|
|
134
|
-
|
|
168
|
+
resolvedContent = resolvedContent.replace(importDirective, resolvedParent);
|
|
135
169
|
}
|
|
136
|
-
return
|
|
170
|
+
return resolvedContent;
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Merge two registry files (child override + parent base)
|
|
174
|
+
*
|
|
175
|
+
* Merging rules:
|
|
176
|
+
* 1. Metadata: JSON merge (child overrides parent)
|
|
177
|
+
* 2. Overview: Parent overview + child overview (if multi-para)
|
|
178
|
+
* 3. Phases: Phase override (child phase with same ID replaces parent phase)
|
|
179
|
+
*/
|
|
180
|
+
mergeContent(child, parent) {
|
|
181
|
+
const childMatch = child.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
182
|
+
const parentMatch = parent.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
183
|
+
if (!childMatch || !parentMatch)
|
|
184
|
+
return child;
|
|
185
|
+
// 1. Merge Metadata
|
|
186
|
+
const childMeta = JSON.parse(childMatch[1]);
|
|
187
|
+
const parentMeta = JSON.parse(parentMatch[1]);
|
|
188
|
+
const mergedMeta = { ...parentMeta, ...childMeta };
|
|
189
|
+
delete mergedMeta.extends; // Remove extends from final merged content
|
|
190
|
+
// 2. Extract Body (everything after frontmatter)
|
|
191
|
+
const childBody = this.stripRedundantParentImports(child.substring(childMatch[0].length).trim(), typeof childMeta.extends === 'string' ? childMeta.extends : undefined);
|
|
192
|
+
const parentBody = parent.substring(parentMatch[0].length).trim();
|
|
193
|
+
// 3. Parse Phases and Overview
|
|
194
|
+
const parsePhases = (body) => {
|
|
195
|
+
const phases = new Map();
|
|
196
|
+
const sections = body.split(/^##\s+Phase:\s+/m);
|
|
197
|
+
const overview = sections[0]?.trim() || '';
|
|
198
|
+
for (let i = 1; i < sections.length; i++) {
|
|
199
|
+
const section = sections[i];
|
|
200
|
+
if (!section.trim())
|
|
201
|
+
continue;
|
|
202
|
+
// Extract ID from first line: e.g. "implement-scoping (Primary)" -> "implement-scoping"
|
|
203
|
+
const firstLine = section.split(/\r?\n/)[0].trim();
|
|
204
|
+
const id = firstLine.split(/[ (]/)[0].trim().toLowerCase();
|
|
205
|
+
phases.set(id, `## Phase: ${section.trim()}`);
|
|
206
|
+
}
|
|
207
|
+
return { overview, phases };
|
|
208
|
+
};
|
|
209
|
+
const childParts = parsePhases(childBody);
|
|
210
|
+
const parentParts = parsePhases(parentBody);
|
|
211
|
+
// 4. Merge Overview: retain the parent framing, then append local overview additions.
|
|
212
|
+
const mergedOverview = childParts.overview
|
|
213
|
+
? `${parentParts.overview}\n\n${childParts.overview}`.trim()
|
|
214
|
+
: parentParts.overview;
|
|
215
|
+
// 5. Merge Phases
|
|
216
|
+
const mergedPhases = new Map(parentParts.phases);
|
|
217
|
+
for (const [id, content] of childParts.phases.entries()) {
|
|
218
|
+
mergedPhases.set(id, content);
|
|
219
|
+
}
|
|
220
|
+
// 6. Reassemble
|
|
221
|
+
let finalContent = `---\n${JSON.stringify(mergedMeta, null, 2)}\n---\n\n`;
|
|
222
|
+
if (mergedOverview) {
|
|
223
|
+
finalContent += `${mergedOverview}\n\n`;
|
|
224
|
+
}
|
|
225
|
+
const addedPhases = new Set();
|
|
226
|
+
// First, add parent phases in order, using child overrides when present.
|
|
227
|
+
for (const id of parentParts.phases.keys()) {
|
|
228
|
+
if (mergedPhases.has(id)) {
|
|
229
|
+
finalContent += `${mergedPhases.get(id)}\n\n`;
|
|
230
|
+
addedPhases.add(id);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
// Then append child-only phases in the order the child declared them.
|
|
234
|
+
for (const [id, content] of childParts.phases.entries()) {
|
|
235
|
+
if (!addedPhases.has(id)) {
|
|
236
|
+
finalContent += `${content}\n\n`;
|
|
237
|
+
addedPhases.add(id);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
return finalContent.trim();
|
|
137
241
|
}
|
|
138
242
|
/**
|
|
139
243
|
* Parse content and return detailed information about imports
|
|
140
244
|
*/
|
|
141
245
|
parse(content) {
|
|
142
246
|
const imports = this.extractImports(content);
|
|
247
|
+
const hasExtends = /^---\r?\n[\s\S]*?"extends":\s*"[^"]+"[\s\S]*?\r?\n---/m.test(content);
|
|
143
248
|
return {
|
|
144
249
|
content,
|
|
145
250
|
imports,
|
|
146
|
-
hasImports: imports.length > 0
|
|
251
|
+
hasImports: imports.length > 0 || hasExtends
|
|
147
252
|
};
|
|
148
253
|
}
|
|
254
|
+
normalizeImportRef(path) {
|
|
255
|
+
let normalized = path.trim().replace(/\\/g, '/').replace(/^\/+/, '');
|
|
256
|
+
if (normalized.endsWith('.md')) {
|
|
257
|
+
normalized = normalized.slice(0, -3);
|
|
258
|
+
}
|
|
259
|
+
return normalized;
|
|
260
|
+
}
|
|
261
|
+
stripTypePrefix(path) {
|
|
262
|
+
return path.replace(/^(jobs|workflows|skills|rules|templates)\//, '');
|
|
263
|
+
}
|
|
264
|
+
isEquivalentImportRef(left, right) {
|
|
265
|
+
const normalizedLeft = this.normalizeImportRef(left);
|
|
266
|
+
const normalizedRight = this.normalizeImportRef(right);
|
|
267
|
+
const strippedLeft = this.stripTypePrefix(normalizedLeft);
|
|
268
|
+
const strippedRight = this.stripTypePrefix(normalizedRight);
|
|
269
|
+
return normalizedLeft === normalizedRight ||
|
|
270
|
+
strippedLeft === strippedRight ||
|
|
271
|
+
normalizedLeft.endsWith(`/${strippedRight}`) ||
|
|
272
|
+
normalizedRight.endsWith(`/${strippedLeft}`) ||
|
|
273
|
+
strippedLeft.endsWith(`/${strippedRight}`) ||
|
|
274
|
+
strippedRight.endsWith(`/${strippedLeft}`);
|
|
275
|
+
}
|
|
276
|
+
stripRedundantParentImports(body, extendsPath) {
|
|
277
|
+
if (!extendsPath) {
|
|
278
|
+
return body;
|
|
279
|
+
}
|
|
280
|
+
return body
|
|
281
|
+
.replace(/\{\{\s*import:\s*([^\}]+)\s*\}\}\s*\r?\n?/g, (match, importPath) => {
|
|
282
|
+
return this.isEquivalentImportRef(importPath, extendsPath) ? '' : match;
|
|
283
|
+
})
|
|
284
|
+
.replace(/\n{3,}/g, '\n\n')
|
|
285
|
+
.trim();
|
|
286
|
+
}
|
|
149
287
|
}
|
|
150
288
|
exports.InheritanceParser = InheritanceParser;
|