fraim 2.0.100
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +445 -0
- package/bin/fraim.js +23 -0
- package/dist/src/cli/api/get-provider-client.js +41 -0
- package/dist/src/cli/api/provider-client.js +107 -0
- package/dist/src/cli/commands/add-ide.js +430 -0
- package/dist/src/cli/commands/add-provider.js +233 -0
- package/dist/src/cli/commands/doctor.js +149 -0
- package/dist/src/cli/commands/init-project.js +301 -0
- package/dist/src/cli/commands/list-overridable.js +184 -0
- package/dist/src/cli/commands/list.js +57 -0
- package/dist/src/cli/commands/login.js +84 -0
- package/dist/src/cli/commands/mcp.js +15 -0
- package/dist/src/cli/commands/migrate-project-fraim.js +42 -0
- package/dist/src/cli/commands/override.js +177 -0
- package/dist/src/cli/commands/setup.js +651 -0
- package/dist/src/cli/commands/sync.js +162 -0
- package/dist/src/cli/commands/test-mcp.js +171 -0
- package/dist/src/cli/doctor/check-runner.js +199 -0
- package/dist/src/cli/doctor/checks/global-setup-checks.js +220 -0
- package/dist/src/cli/doctor/checks/ide-config-checks.js +250 -0
- package/dist/src/cli/doctor/checks/mcp-connectivity-checks.js +381 -0
- package/dist/src/cli/doctor/checks/project-setup-checks.js +282 -0
- package/dist/src/cli/doctor/checks/scripts-checks.js +157 -0
- package/dist/src/cli/doctor/checks/workflow-checks.js +251 -0
- package/dist/src/cli/doctor/reporters/console-reporter.js +96 -0
- package/dist/src/cli/doctor/reporters/json-reporter.js +11 -0
- package/dist/src/cli/doctor/types.js +6 -0
- package/dist/src/cli/fraim.js +100 -0
- package/dist/src/cli/internal/device-flow-service.js +83 -0
- package/dist/src/cli/mcp/ide-formats.js +243 -0
- package/dist/src/cli/mcp/mcp-server-builder.js +48 -0
- package/dist/src/cli/mcp/mcp-server-registry.js +160 -0
- package/dist/src/cli/mcp/types.js +3 -0
- package/dist/src/cli/providers/local-provider-registry.js +166 -0
- package/dist/src/cli/providers/provider-registry.js +230 -0
- package/dist/src/cli/setup/auto-mcp-setup.js +331 -0
- package/dist/src/cli/setup/codex-local-config.js +37 -0
- package/dist/src/cli/setup/first-run.js +242 -0
- package/dist/src/cli/setup/ide-detector.js +179 -0
- package/dist/src/cli/setup/mcp-config-generator.js +192 -0
- package/dist/src/cli/setup/provider-prompts.js +339 -0
- package/dist/src/cli/utils/agent-adapters.js +126 -0
- package/dist/src/cli/utils/digest-utils.js +47 -0
- package/dist/src/cli/utils/fraim-gitignore.js +40 -0
- package/dist/src/cli/utils/platform-detection.js +258 -0
- package/dist/src/cli/utils/project-bootstrap.js +93 -0
- package/dist/src/cli/utils/remote-sync.js +315 -0
- package/dist/src/cli/utils/script-sync-utils.js +221 -0
- package/dist/src/cli/utils/version-utils.js +32 -0
- package/dist/src/core/ai-mentor.js +230 -0
- package/dist/src/core/config-loader.js +114 -0
- package/dist/src/core/config-writer.js +75 -0
- package/dist/src/core/types.js +23 -0
- package/dist/src/core/utils/git-utils.js +95 -0
- package/dist/src/core/utils/include-resolver.js +92 -0
- package/dist/src/core/utils/inheritance-parser.js +288 -0
- package/dist/src/core/utils/job-parser.js +176 -0
- package/dist/src/core/utils/local-registry-resolver.js +616 -0
- package/dist/src/core/utils/object-utils.js +11 -0
- package/dist/src/core/utils/project-fraim-migration.js +103 -0
- package/dist/src/core/utils/project-fraim-paths.js +38 -0
- package/dist/src/core/utils/provider-utils.js +18 -0
- package/dist/src/core/utils/server-startup.js +34 -0
- package/dist/src/core/utils/stub-generator.js +147 -0
- package/dist/src/core/utils/workflow-parser.js +174 -0
- package/dist/src/local-mcp-server/learning-context-builder.js +229 -0
- package/dist/src/local-mcp-server/stdio-server.js +1698 -0
- package/dist/src/local-mcp-server/usage-collector.js +264 -0
- package/index.js +85 -0
- package/package.json +139 -0
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* FRAIM Configuration Types
|
|
4
|
+
* TypeScript types for the workspace FRAIM config file.
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.DEFAULT_FRAIM_CONFIG = void 0;
|
|
8
|
+
/**
|
|
9
|
+
* Default configuration values
|
|
10
|
+
*/
|
|
11
|
+
exports.DEFAULT_FRAIM_CONFIG = {
|
|
12
|
+
version: '2.0.47',
|
|
13
|
+
project: {
|
|
14
|
+
name: 'Untitled Project'
|
|
15
|
+
},
|
|
16
|
+
repository: {
|
|
17
|
+
provider: 'github',
|
|
18
|
+
owner: '',
|
|
19
|
+
name: '',
|
|
20
|
+
defaultBranch: 'main'
|
|
21
|
+
},
|
|
22
|
+
customizations: {}
|
|
23
|
+
};
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getPort = getPort;
|
|
4
|
+
exports.determineDatabaseName = determineDatabaseName;
|
|
5
|
+
exports.getCurrentGitBranch = getCurrentGitBranch;
|
|
6
|
+
exports.determineSchema = determineSchema;
|
|
7
|
+
exports.getDefaultBranch = getDefaultBranch;
|
|
8
|
+
const child_process_1 = require("child_process");
|
|
9
|
+
/**
|
|
10
|
+
* Gets a unique port based on the current git branch name (if it's an issue branch)
|
|
11
|
+
* Default to 15302 if not on an issue branch
|
|
12
|
+
*/
|
|
13
|
+
function getPort() {
|
|
14
|
+
try {
|
|
15
|
+
const branchName = process.env.FRAIM_BRANCH || (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD').toString().trim();
|
|
16
|
+
// Match issue-123 or 123-feature-name or feature/123-name
|
|
17
|
+
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(/(\d+)-/);
|
|
18
|
+
if (issueMatch) {
|
|
19
|
+
const issueNum = parseInt(issueMatch[1], 10);
|
|
20
|
+
// Ensure port is in a safe range (10000-65535)
|
|
21
|
+
return 10000 + (issueNum % 55535);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
catch (e) {
|
|
25
|
+
// Silently fail and use default
|
|
26
|
+
}
|
|
27
|
+
return Number(process.env.PORT) || Number(process.env.WEBSITES_PORT) || Number(process.env.FRAIM_MCP_PORT) || 15302;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Determines the database name based on the git branch
|
|
31
|
+
*/
|
|
32
|
+
function determineDatabaseName() {
|
|
33
|
+
try {
|
|
34
|
+
const branchName = process.env.FRAIM_BRANCH || process.env.FRAIM_BRANCH_NAME || (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD').toString().trim();
|
|
35
|
+
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(/(\d+)-/);
|
|
36
|
+
if (issueMatch) {
|
|
37
|
+
return `fraim_issue_${issueMatch[1]}`;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
// Silently fail
|
|
42
|
+
}
|
|
43
|
+
return process.env.MONGODB_DB_NAME || (process.env.NODE_ENV === 'production' ? 'fraim_prod' : 'fraim_dev');
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Gets the current git branch name
|
|
47
|
+
*/
|
|
48
|
+
function getCurrentGitBranch() {
|
|
49
|
+
try {
|
|
50
|
+
return (0, child_process_1.execSync)('git rev-parse --abbrev-ref HEAD', {
|
|
51
|
+
timeout: 2000, // 2 second timeout
|
|
52
|
+
stdio: 'pipe'
|
|
53
|
+
}).toString().trim();
|
|
54
|
+
}
|
|
55
|
+
catch (e) {
|
|
56
|
+
return 'master';
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Determines the database schema prefix based on the branch
|
|
61
|
+
*/
|
|
62
|
+
function determineSchema(branchName) {
|
|
63
|
+
const issueMatch = branchName.match(/issue-(\d+)/i) || branchName.match(/(\d+)-/);
|
|
64
|
+
if (issueMatch) {
|
|
65
|
+
return `issue_${issueMatch[1]}`;
|
|
66
|
+
}
|
|
67
|
+
return 'prod';
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Gets the default branch name from git remote
|
|
71
|
+
*/
|
|
72
|
+
function getDefaultBranch() {
|
|
73
|
+
try {
|
|
74
|
+
// Try to get the default branch from remote HEAD
|
|
75
|
+
const remoteHead = (0, child_process_1.execSync)('git symbolic-ref refs/remotes/origin/HEAD', {
|
|
76
|
+
timeout: 2000, // 2 second timeout
|
|
77
|
+
stdio: 'pipe'
|
|
78
|
+
}).toString().trim();
|
|
79
|
+
const match = remoteHead.match(/refs\/remotes\/origin\/(.+)$/);
|
|
80
|
+
if (match) {
|
|
81
|
+
return match[1];
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
catch (e) {
|
|
85
|
+
// If that fails, try to get it from the current branch
|
|
86
|
+
try {
|
|
87
|
+
return getCurrentGitBranch();
|
|
88
|
+
}
|
|
89
|
+
catch (e2) {
|
|
90
|
+
// Fall back to common defaults
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
// Default fallback
|
|
94
|
+
return 'main';
|
|
95
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Resolve {{include:path}} directives in registry content.
|
|
4
|
+
*
|
|
5
|
+
* Used by: MCP service (get_fraim_file, get_fraim_workflow), AI Mentor (phase instructions),
|
|
6
|
+
* and any other server-side content that needs includes resolved.
|
|
7
|
+
*
|
|
8
|
+
* - Resolves recursively (e.g. skills can include other skills)
|
|
9
|
+
* - MAX_PASSES prevents infinite loops from circular includes
|
|
10
|
+
*/
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.MAX_INCLUDE_PASSES = void 0;
|
|
13
|
+
exports.resolveIncludesWithIndex = resolveIncludesWithIndex;
|
|
14
|
+
exports.resolveIncludes = resolveIncludes;
|
|
15
|
+
const fs_1 = require("fs");
|
|
16
|
+
/** Maximum resolution passes to prevent infinite loops from circular includes */
|
|
17
|
+
exports.MAX_INCLUDE_PASSES = 10;
|
|
18
|
+
/**
|
|
19
|
+
* Resolve {{include:path}} directives in content.
|
|
20
|
+
* Looks up referenced files in the fileIndex and inlines their content.
|
|
21
|
+
*
|
|
22
|
+
* @param content - Raw content that may contain {{include:path}} directives
|
|
23
|
+
* @param fileIndex - Map of path -> { fullPath, ... } for registry files
|
|
24
|
+
* @returns Content with all resolvable includes inlined (recursive, up to MAX_PASSES)
|
|
25
|
+
*/
|
|
26
|
+
function resolveIncludesWithIndex(content, fileIndex) {
|
|
27
|
+
let result = content;
|
|
28
|
+
let pass = 0;
|
|
29
|
+
while (result.includes('{{include:') && pass < exports.MAX_INCLUDE_PASSES) {
|
|
30
|
+
result = result.replace(/\{\{include:([^}]+)\}\}/g, (match, filePath) => {
|
|
31
|
+
const trimmedPath = filePath.trim();
|
|
32
|
+
const fileEntry = fileIndex.get(trimmedPath);
|
|
33
|
+
if (fileEntry?.fullPath) {
|
|
34
|
+
try {
|
|
35
|
+
return (0, fs_1.readFileSync)(fileEntry.fullPath, 'utf-8');
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
console.error(`❌ Failed to read included file: ${trimmedPath}`, error);
|
|
39
|
+
return match;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
console.warn(`⚠️ Include file not found in fileIndex: ${trimmedPath}`);
|
|
43
|
+
return match;
|
|
44
|
+
});
|
|
45
|
+
pass++;
|
|
46
|
+
}
|
|
47
|
+
return result;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Asynchronously resolve {{include:path}} directives in content using a RegistryResolver.
|
|
51
|
+
*
|
|
52
|
+
* @param content - Raw content that may contain {{include:path}} directives
|
|
53
|
+
* @param resolver - RegistryResolver instance
|
|
54
|
+
* @returns Content with all resolvable includes inlined
|
|
55
|
+
*/
|
|
56
|
+
async function resolveIncludes(content, resolver, basePath) {
|
|
57
|
+
let result = content;
|
|
58
|
+
let pass = 0;
|
|
59
|
+
while (result.includes('{{include:') && pass < exports.MAX_INCLUDE_PASSES) {
|
|
60
|
+
// Collect all unique includes in this pass
|
|
61
|
+
const matches = result.match(/\{\{include:([^}]+)\}\}/g);
|
|
62
|
+
if (!matches)
|
|
63
|
+
break;
|
|
64
|
+
const uniqueMatches = Array.from(new Set(matches));
|
|
65
|
+
for (const match of uniqueMatches) {
|
|
66
|
+
const filePath = match.match(/\{\{include:([^}]+)\}\}/)[1].trim();
|
|
67
|
+
let targetPath = filePath;
|
|
68
|
+
if (filePath.startsWith('./') && basePath) {
|
|
69
|
+
// Resolve relative to the directory of the current file
|
|
70
|
+
const dir = basePath.includes('/') ? basePath.substring(0, basePath.lastIndexOf('/')) : '';
|
|
71
|
+
targetPath = dir ? `${dir}/${filePath.substring(2)}` : filePath.substring(2);
|
|
72
|
+
}
|
|
73
|
+
try {
|
|
74
|
+
const fileContent = await resolver.getFile(targetPath);
|
|
75
|
+
if (fileContent !== null) {
|
|
76
|
+
// Recursively resolve includes in the newly fetched content
|
|
77
|
+
const resolvedContent = await resolveIncludes(fileContent, resolver, targetPath);
|
|
78
|
+
// Replace all occurrences of this specific include
|
|
79
|
+
result = result.split(match).join(resolvedContent);
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
console.warn(`⚠️ Include file not found via resolver: ${targetPath} (original: ${filePath}, base: ${basePath})`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
catch (error) {
|
|
86
|
+
console.error(`❌ Failed to resolve include via resolver: ${targetPath}`, error);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
pass++;
|
|
90
|
+
}
|
|
91
|
+
return result;
|
|
92
|
+
}
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* InheritanceParser
|
|
4
|
+
*
|
|
5
|
+
* Parses and resolves {{ import: path }} directives in registry files,
|
|
6
|
+
* enabling local overrides to inherit from global registry files.
|
|
7
|
+
*
|
|
8
|
+
* Security features:
|
|
9
|
+
* - Path traversal protection (rejects .. and absolute paths)
|
|
10
|
+
* - Circular import detection
|
|
11
|
+
* - Max depth limit (5 levels)
|
|
12
|
+
*/
|
|
13
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
14
|
+
exports.InheritanceParser = exports.InheritanceError = void 0;
|
|
15
|
+
class InheritanceError extends Error {
|
|
16
|
+
constructor(message, path) {
|
|
17
|
+
super(message);
|
|
18
|
+
this.path = path;
|
|
19
|
+
this.name = 'InheritanceError';
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
exports.InheritanceError = InheritanceError;
|
|
23
|
+
/**
|
|
24
|
+
* Regular expression to match {{ import: path }} directives
|
|
25
|
+
*/
|
|
26
|
+
const IMPORT_REGEX = /\{\{\s*import:\s*([^\}]+)\s*\}\}/g;
|
|
27
|
+
class InheritanceParser {
|
|
28
|
+
constructor(maxDepth = 5) {
|
|
29
|
+
this.maxDepth = maxDepth;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Extract import directives from content without resolving them
|
|
33
|
+
*/
|
|
34
|
+
extractImports(content) {
|
|
35
|
+
const imports = [];
|
|
36
|
+
let match;
|
|
37
|
+
// Reset regex state
|
|
38
|
+
IMPORT_REGEX.lastIndex = 0;
|
|
39
|
+
while ((match = IMPORT_REGEX.exec(content)) !== null) {
|
|
40
|
+
imports.push(match[1].trim());
|
|
41
|
+
}
|
|
42
|
+
return imports;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Sanitize and validate import path
|
|
46
|
+
*
|
|
47
|
+
* @throws {InheritanceError} If path is invalid
|
|
48
|
+
*/
|
|
49
|
+
sanitizePath(path) {
|
|
50
|
+
const trimmed = path.trim();
|
|
51
|
+
// Reject empty paths
|
|
52
|
+
if (!trimmed) {
|
|
53
|
+
throw new InheritanceError('Import path cannot be empty');
|
|
54
|
+
}
|
|
55
|
+
// Reject absolute paths (Unix and Windows)
|
|
56
|
+
if (trimmed.startsWith('/') || trimmed.match(/^[A-Za-z]:\\/)) {
|
|
57
|
+
throw new InheritanceError(`Absolute paths not allowed: ${trimmed}`, trimmed);
|
|
58
|
+
}
|
|
59
|
+
// Reject path traversal attempts
|
|
60
|
+
if (trimmed.includes('..')) {
|
|
61
|
+
throw new InheritanceError(`Path traversal not allowed: ${trimmed}`, trimmed);
|
|
62
|
+
}
|
|
63
|
+
return trimmed;
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Detect circular imports
|
|
67
|
+
*
|
|
68
|
+
* Special case: If the import path is the same as the current path,
|
|
69
|
+
* it's not a circular import - it's importing the parent/remote version.
|
|
70
|
+
* This allows local overrides to inherit from their remote counterparts.
|
|
71
|
+
*
|
|
72
|
+
* @throws {InheritanceError} If circular import detected
|
|
73
|
+
*/
|
|
74
|
+
detectCircularImport(path, visited, isParentImport = false) {
|
|
75
|
+
// If this is a parent import (same path as current), allow it
|
|
76
|
+
if (isParentImport) {
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
if (visited.has(path)) {
|
|
80
|
+
throw new InheritanceError(`Circular import detected: ${path}`, path);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Resolve all import and extends directives in content recursively
|
|
85
|
+
*
|
|
86
|
+
* @param content - Content with {{ import }} or extends frontmatter
|
|
87
|
+
* @param currentPath - Path of current file (for circular detection)
|
|
88
|
+
* @param options - Resolution options
|
|
89
|
+
* @returns Resolved content with all inheritance applied
|
|
90
|
+
*
|
|
91
|
+
* @throws {InheritanceError} If circular inheritance, path traversal, or max depth exceeded
|
|
92
|
+
*/
|
|
93
|
+
async resolve(content, currentPath, options) {
|
|
94
|
+
const depth = options.currentDepth || 0;
|
|
95
|
+
const visited = options.visited || new Set();
|
|
96
|
+
const maxDepth = options.maxDepth || this.maxDepth;
|
|
97
|
+
// Check depth limit
|
|
98
|
+
if (depth > maxDepth) {
|
|
99
|
+
throw new InheritanceError(`Max inheritance depth exceeded (${maxDepth})`, currentPath);
|
|
100
|
+
}
|
|
101
|
+
// Check circular inheritance (but allow importing/extending the same path as parent)
|
|
102
|
+
this.detectCircularImport(currentPath, visited, false);
|
|
103
|
+
visited.add(currentPath);
|
|
104
|
+
let resolvedContent = content;
|
|
105
|
+
// 1. Handle JSON frontmatter 'extends'
|
|
106
|
+
const metadataMatch = resolvedContent.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
107
|
+
if (metadataMatch) {
|
|
108
|
+
try {
|
|
109
|
+
const metadata = JSON.parse(metadataMatch[1]);
|
|
110
|
+
const extendsPath = metadata.extends;
|
|
111
|
+
if (extendsPath && typeof extendsPath === 'string') {
|
|
112
|
+
// Sanitize path
|
|
113
|
+
const sanitizedExtends = this.sanitizePath(extendsPath);
|
|
114
|
+
const isParentExtends = sanitizedExtends === currentPath;
|
|
115
|
+
// Fetch parent content
|
|
116
|
+
let parentContent;
|
|
117
|
+
try {
|
|
118
|
+
parentContent = await options.fetchParent(sanitizedExtends);
|
|
119
|
+
}
|
|
120
|
+
catch (error) {
|
|
121
|
+
throw new InheritanceError(`Failed to fetch extended parent content: ${sanitizedExtends}. ${error.message}`, sanitizedExtends);
|
|
122
|
+
}
|
|
123
|
+
// Recursively resolve parent
|
|
124
|
+
const parentVisited = isParentExtends ? new Set() : new Set(visited);
|
|
125
|
+
const resolvedParent = await this.resolve(parentContent, sanitizedExtends, {
|
|
126
|
+
...options,
|
|
127
|
+
currentDepth: depth + 1,
|
|
128
|
+
visited: parentVisited
|
|
129
|
+
});
|
|
130
|
+
// Merge current content with resolved parent
|
|
131
|
+
resolvedContent = this.mergeContent(resolvedContent, resolvedParent);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
if (error instanceof SyntaxError) {
|
|
136
|
+
// Not JSON or invalid JSON, ignore extends logic but log it
|
|
137
|
+
console.warn(`[InheritanceParser] Failed to parse frontmatter for ${currentPath}: ${error.message}`);
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
throw error;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
// 2. Handle {{ import: path }}
|
|
145
|
+
const imports = this.extractImports(resolvedContent);
|
|
146
|
+
for (const importPath of imports) {
|
|
147
|
+
// Sanitize path
|
|
148
|
+
const sanitized = this.sanitizePath(importPath);
|
|
149
|
+
// Check if this is a parent import (same path as current)
|
|
150
|
+
const isParentImport = sanitized === currentPath;
|
|
151
|
+
// Fetch parent content
|
|
152
|
+
let parentContent;
|
|
153
|
+
try {
|
|
154
|
+
parentContent = await options.fetchParent(sanitized);
|
|
155
|
+
}
|
|
156
|
+
catch (error) {
|
|
157
|
+
throw new InheritanceError(`Failed to fetch parent content: ${sanitized}. ${error.message}`, sanitized);
|
|
158
|
+
}
|
|
159
|
+
// Recursively resolve parent imports
|
|
160
|
+
const parentVisited = isParentImport ? new Set() : new Set(visited);
|
|
161
|
+
const resolvedParent = await this.resolve(parentContent, sanitized, {
|
|
162
|
+
...options,
|
|
163
|
+
currentDepth: depth + 1,
|
|
164
|
+
visited: parentVisited
|
|
165
|
+
});
|
|
166
|
+
// Replace import directive with resolved parent content
|
|
167
|
+
const importDirective = `{{ import: ${importPath} }}`;
|
|
168
|
+
resolvedContent = resolvedContent.replace(importDirective, resolvedParent);
|
|
169
|
+
}
|
|
170
|
+
return resolvedContent;
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Merge two registry files (child override + parent base)
|
|
174
|
+
*
|
|
175
|
+
* Merging rules:
|
|
176
|
+
* 1. Metadata: JSON merge (child overrides parent)
|
|
177
|
+
* 2. Overview: Parent overview + child overview (if multi-para)
|
|
178
|
+
* 3. Phases: Phase override (child phase with same ID replaces parent phase)
|
|
179
|
+
*/
|
|
180
|
+
mergeContent(child, parent) {
|
|
181
|
+
const childMatch = child.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
182
|
+
const parentMatch = parent.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
183
|
+
if (!childMatch || !parentMatch)
|
|
184
|
+
return child;
|
|
185
|
+
// 1. Merge Metadata
|
|
186
|
+
const childMeta = JSON.parse(childMatch[1]);
|
|
187
|
+
const parentMeta = JSON.parse(parentMatch[1]);
|
|
188
|
+
const mergedMeta = { ...parentMeta, ...childMeta };
|
|
189
|
+
delete mergedMeta.extends; // Remove extends from final merged content
|
|
190
|
+
// 2. Extract Body (everything after frontmatter)
|
|
191
|
+
const childBody = this.stripRedundantParentImports(child.substring(childMatch[0].length).trim(), typeof childMeta.extends === 'string' ? childMeta.extends : undefined);
|
|
192
|
+
const parentBody = parent.substring(parentMatch[0].length).trim();
|
|
193
|
+
// 3. Parse Phases and Overview
|
|
194
|
+
const parsePhases = (body) => {
|
|
195
|
+
const phases = new Map();
|
|
196
|
+
const sections = body.split(/^##\s+Phase:\s+/m);
|
|
197
|
+
const overview = sections[0]?.trim() || '';
|
|
198
|
+
for (let i = 1; i < sections.length; i++) {
|
|
199
|
+
const section = sections[i];
|
|
200
|
+
if (!section.trim())
|
|
201
|
+
continue;
|
|
202
|
+
// Extract ID from first line: e.g. "implement-scoping (Primary)" -> "implement-scoping"
|
|
203
|
+
const firstLine = section.split(/\r?\n/)[0].trim();
|
|
204
|
+
const id = firstLine.split(/[ (]/)[0].trim().toLowerCase();
|
|
205
|
+
phases.set(id, `## Phase: ${section.trim()}`);
|
|
206
|
+
}
|
|
207
|
+
return { overview, phases };
|
|
208
|
+
};
|
|
209
|
+
const childParts = parsePhases(childBody);
|
|
210
|
+
const parentParts = parsePhases(parentBody);
|
|
211
|
+
// 4. Merge Overview: retain the parent framing, then append local overview additions.
|
|
212
|
+
const mergedOverview = childParts.overview
|
|
213
|
+
? `${parentParts.overview}\n\n${childParts.overview}`.trim()
|
|
214
|
+
: parentParts.overview;
|
|
215
|
+
// 5. Merge Phases
|
|
216
|
+
const mergedPhases = new Map(parentParts.phases);
|
|
217
|
+
for (const [id, content] of childParts.phases.entries()) {
|
|
218
|
+
mergedPhases.set(id, content);
|
|
219
|
+
}
|
|
220
|
+
// 6. Reassemble
|
|
221
|
+
let finalContent = `---\n${JSON.stringify(mergedMeta, null, 2)}\n---\n\n`;
|
|
222
|
+
if (mergedOverview) {
|
|
223
|
+
finalContent += `${mergedOverview}\n\n`;
|
|
224
|
+
}
|
|
225
|
+
const addedPhases = new Set();
|
|
226
|
+
// First, add parent phases in order, using child overrides when present.
|
|
227
|
+
for (const id of parentParts.phases.keys()) {
|
|
228
|
+
if (mergedPhases.has(id)) {
|
|
229
|
+
finalContent += `${mergedPhases.get(id)}\n\n`;
|
|
230
|
+
addedPhases.add(id);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
// Then append child-only phases in the order the child declared them.
|
|
234
|
+
for (const [id, content] of childParts.phases.entries()) {
|
|
235
|
+
if (!addedPhases.has(id)) {
|
|
236
|
+
finalContent += `${content}\n\n`;
|
|
237
|
+
addedPhases.add(id);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
return finalContent.trim();
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Parse content and return detailed information about imports
|
|
244
|
+
*/
|
|
245
|
+
parse(content) {
|
|
246
|
+
const imports = this.extractImports(content);
|
|
247
|
+
const hasExtends = /^---\r?\n[\s\S]*?"extends":\s*"[^"]+"[\s\S]*?\r?\n---/m.test(content);
|
|
248
|
+
return {
|
|
249
|
+
content,
|
|
250
|
+
imports,
|
|
251
|
+
hasImports: imports.length > 0 || hasExtends
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
normalizeImportRef(path) {
|
|
255
|
+
let normalized = path.trim().replace(/\\/g, '/').replace(/^\/+/, '');
|
|
256
|
+
if (normalized.endsWith('.md')) {
|
|
257
|
+
normalized = normalized.slice(0, -3);
|
|
258
|
+
}
|
|
259
|
+
return normalized;
|
|
260
|
+
}
|
|
261
|
+
stripTypePrefix(path) {
|
|
262
|
+
return path.replace(/^(jobs|workflows|skills|rules|templates)\//, '');
|
|
263
|
+
}
|
|
264
|
+
isEquivalentImportRef(left, right) {
|
|
265
|
+
const normalizedLeft = this.normalizeImportRef(left);
|
|
266
|
+
const normalizedRight = this.normalizeImportRef(right);
|
|
267
|
+
const strippedLeft = this.stripTypePrefix(normalizedLeft);
|
|
268
|
+
const strippedRight = this.stripTypePrefix(normalizedRight);
|
|
269
|
+
return normalizedLeft === normalizedRight ||
|
|
270
|
+
strippedLeft === strippedRight ||
|
|
271
|
+
normalizedLeft.endsWith(`/${strippedRight}`) ||
|
|
272
|
+
normalizedRight.endsWith(`/${strippedLeft}`) ||
|
|
273
|
+
strippedLeft.endsWith(`/${strippedRight}`) ||
|
|
274
|
+
strippedRight.endsWith(`/${strippedLeft}`);
|
|
275
|
+
}
|
|
276
|
+
stripRedundantParentImports(body, extendsPath) {
|
|
277
|
+
if (!extendsPath) {
|
|
278
|
+
return body;
|
|
279
|
+
}
|
|
280
|
+
return body
|
|
281
|
+
.replace(/\{\{\s*import:\s*([^\}]+)\s*\}\}\s*\r?\n?/g, (match, importPath) => {
|
|
282
|
+
return this.isEquivalentImportRef(importPath, extendsPath) ? '' : match;
|
|
283
|
+
})
|
|
284
|
+
.replace(/\n{3,}/g, '\n\n')
|
|
285
|
+
.trim();
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
exports.InheritanceParser = InheritanceParser;
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.JobParser = void 0;
|
|
4
|
+
const fs_1 = require("fs");
|
|
5
|
+
const path_1 = require("path");
|
|
6
|
+
class JobParser {
|
|
7
|
+
static extractMetadataBlock(content) {
|
|
8
|
+
// Allow leading comments and whitespace before frontmatter
|
|
9
|
+
const frontmatterMatch = content.match(/^[\s\S]*?---\r?\n([\s\S]+?)\r?\n---/);
|
|
10
|
+
if (frontmatterMatch) {
|
|
11
|
+
try {
|
|
12
|
+
const startIndex = frontmatterMatch.index || 0;
|
|
13
|
+
return {
|
|
14
|
+
state: 'valid',
|
|
15
|
+
metadata: JSON.parse(frontmatterMatch[1]),
|
|
16
|
+
bodyStartIndex: startIndex + frontmatterMatch[0].length
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
return { state: 'invalid' };
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
const trimmedStart = content.search(/\S/);
|
|
24
|
+
if (trimmedStart === -1 || content[trimmedStart] !== '{') {
|
|
25
|
+
return { state: 'none' };
|
|
26
|
+
}
|
|
27
|
+
let depth = 0;
|
|
28
|
+
let inString = false;
|
|
29
|
+
let escaping = false;
|
|
30
|
+
for (let i = trimmedStart; i < content.length; i++) {
|
|
31
|
+
const ch = content[i];
|
|
32
|
+
if (inString) {
|
|
33
|
+
if (escaping) {
|
|
34
|
+
escaping = false;
|
|
35
|
+
}
|
|
36
|
+
else if (ch === '\\') {
|
|
37
|
+
escaping = true;
|
|
38
|
+
}
|
|
39
|
+
else if (ch === '"') {
|
|
40
|
+
inString = false;
|
|
41
|
+
}
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
if (ch === '"') {
|
|
45
|
+
inString = true;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
if (ch === '{') {
|
|
49
|
+
depth++;
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
if (ch === '}') {
|
|
53
|
+
depth--;
|
|
54
|
+
if (depth === 0) {
|
|
55
|
+
const bodyStartIndex = i + 1;
|
|
56
|
+
const remainder = content.slice(bodyStartIndex).trimStart();
|
|
57
|
+
// `{...}\n---` is usually malformed frontmatter, not bare JSON metadata.
|
|
58
|
+
if (remainder.startsWith('---')) {
|
|
59
|
+
return { state: 'none' };
|
|
60
|
+
}
|
|
61
|
+
try {
|
|
62
|
+
return {
|
|
63
|
+
state: 'valid',
|
|
64
|
+
metadata: JSON.parse(content.slice(trimmedStart, bodyStartIndex)),
|
|
65
|
+
bodyStartIndex
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
return { state: 'invalid' };
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return { state: 'none' };
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Parse a job markdown file into a structured definition
|
|
78
|
+
* Supports three formats:
|
|
79
|
+
* 1. Phase-based jobs with JSON frontmatter
|
|
80
|
+
* 2. Phase-based jobs with bare leading JSON metadata
|
|
81
|
+
* 3. Simple jobs without metadata
|
|
82
|
+
*/
|
|
83
|
+
static parse(filePath) {
|
|
84
|
+
if (!(0, fs_1.existsSync)(filePath))
|
|
85
|
+
return null;
|
|
86
|
+
let content = (0, fs_1.readFileSync)(filePath, 'utf-8');
|
|
87
|
+
if (content.charCodeAt(0) === 0xfeff) {
|
|
88
|
+
content = content.slice(1);
|
|
89
|
+
}
|
|
90
|
+
const metadataBlock = this.extractMetadataBlock(content);
|
|
91
|
+
if (metadataBlock.state === 'invalid') {
|
|
92
|
+
return null;
|
|
93
|
+
}
|
|
94
|
+
if (metadataBlock.state === 'valid') {
|
|
95
|
+
return this.parsePhaseBasedJob(filePath, content, metadataBlock.metadata, metadataBlock.bodyStartIndex);
|
|
96
|
+
}
|
|
97
|
+
return this.parseSimpleJob(filePath, content);
|
|
98
|
+
}
|
|
99
|
+
static parsePhaseBasedJob(filePath, content, metadata, bodyStartIndex) {
|
|
100
|
+
const contentAfterMetadata = content.substring(bodyStartIndex).trim();
|
|
101
|
+
const firstPhaseIndex = contentAfterMetadata.search(/^##\s+Phase:/m);
|
|
102
|
+
let overview = '';
|
|
103
|
+
let restOfContent = '';
|
|
104
|
+
if (firstPhaseIndex !== -1) {
|
|
105
|
+
overview = contentAfterMetadata.substring(0, firstPhaseIndex).trim();
|
|
106
|
+
restOfContent = contentAfterMetadata.substring(firstPhaseIndex);
|
|
107
|
+
}
|
|
108
|
+
else {
|
|
109
|
+
overview = contentAfterMetadata;
|
|
110
|
+
}
|
|
111
|
+
const phases = new Map();
|
|
112
|
+
const phaseSections = restOfContent.split(/^##\s+Phase:\s*/im);
|
|
113
|
+
if (!metadata.phases) {
|
|
114
|
+
metadata.phases = {};
|
|
115
|
+
}
|
|
116
|
+
for (let i = 1; i < phaseSections.length; i++) {
|
|
117
|
+
const section = phaseSections[i];
|
|
118
|
+
const sectionLines = section.split('\n');
|
|
119
|
+
const firstLine = sectionLines[0].trim();
|
|
120
|
+
const id = firstLine.split(/[ (]/)[0].trim().toLowerCase();
|
|
121
|
+
phases.set(id, `## Phase: ${section.trim()}`);
|
|
122
|
+
}
|
|
123
|
+
return {
|
|
124
|
+
metadata,
|
|
125
|
+
overview,
|
|
126
|
+
phases,
|
|
127
|
+
isSimple: false,
|
|
128
|
+
path: filePath
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
static parseSimpleJob(filePath, content) {
|
|
132
|
+
const jobName = (0, path_1.basename)(filePath, '.md');
|
|
133
|
+
const metadata = {
|
|
134
|
+
name: jobName
|
|
135
|
+
};
|
|
136
|
+
return {
|
|
137
|
+
metadata,
|
|
138
|
+
overview: content.trim(),
|
|
139
|
+
phases: new Map(),
|
|
140
|
+
isSimple: true,
|
|
141
|
+
path: filePath
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
static parseContent(content, name, path) {
|
|
145
|
+
if (content.charCodeAt(0) === 0xfeff) {
|
|
146
|
+
content = content.slice(1);
|
|
147
|
+
}
|
|
148
|
+
const metadataBlock = this.extractMetadataBlock(content);
|
|
149
|
+
if (metadataBlock.state === 'invalid') {
|
|
150
|
+
return null;
|
|
151
|
+
}
|
|
152
|
+
if (metadataBlock.state === 'valid') {
|
|
153
|
+
return this.parsePhaseBasedJob(path || `content:${name}`, content, metadataBlock.metadata, metadataBlock.bodyStartIndex);
|
|
154
|
+
}
|
|
155
|
+
return this.parseSimpleJob(path || `content:${name}`, content);
|
|
156
|
+
}
|
|
157
|
+
static getOverviewFromContent(content, name) {
|
|
158
|
+
const job = this.parseContent(content, name);
|
|
159
|
+
return job ? job.overview : null;
|
|
160
|
+
}
|
|
161
|
+
static getOverview(filePath) {
|
|
162
|
+
const job = this.parse(filePath);
|
|
163
|
+
return job ? job.overview : null;
|
|
164
|
+
}
|
|
165
|
+
static extractDescription(filePath) {
|
|
166
|
+
const job = this.parse(filePath);
|
|
167
|
+
if (!job)
|
|
168
|
+
return '';
|
|
169
|
+
const intentMatch = job.overview.match(/## Intent\s+([\s\S]+?)(?:\r?\n##|$)/);
|
|
170
|
+
if (intentMatch)
|
|
171
|
+
return intentMatch[1].trim().split(/\r?\n/)[0];
|
|
172
|
+
const firstPara = job.overview.split(/\r?\n/).find(l => l.trim() !== '' && !l.startsWith('#'));
|
|
173
|
+
return firstPara ? firstPara.trim() : '';
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
exports.JobParser = JobParser;
|