@xelth/eck-snapshot 4.0.0 ā 4.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +13 -3
- package/setup.json +57 -35
- package/src/cli/cli.js +81 -134
- package/src/cli/commands/autoDocs.js +1 -32
- package/src/cli/commands/createSnapshot.js +338 -198
- package/src/cli/commands/doctor.js +60 -0
- package/src/cli/commands/setupGemini.js +1 -1
- package/src/cli/commands/setupGemini.test.js +1 -1
- package/src/cli/commands/showFile.js +39 -0
- package/src/cli/commands/updateSnapshot.js +75 -0
- package/src/config.js +44 -0
- package/src/core/skeletonizer.js +201 -0
- package/src/services/claudeCliService.js +5 -0
- package/src/templates/agent-prompt.template.md +104 -7
- package/src/templates/architect-prompt.template.md +112 -23
- package/src/templates/multiAgent.md +40 -86
- package/src/templates/skeleton-instruction.md +16 -0
- package/src/templates/update-prompt.template.md +19 -0
- package/src/utils/aiHeader.js +373 -147
- package/src/utils/eckProtocolParser.js +221 -0
- package/src/utils/fileUtils.js +212 -175
- package/src/utils/gitUtils.js +44 -0
- package/src/utils/tokenEstimator.js +4 -1
- package/src/cli/commands/askGpt.js +0 -29
- package/src/services/authService.js +0 -20
- package/src/services/dispatcherService.js +0 -33
- package/src/services/gptService.js +0 -302
- package/src/services/gptService.test.js +0 -120
- package/src/templates/vectorMode.md +0 -22
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Scans .eck directory for files containing [STUB] markers
|
|
7
|
+
*/
|
|
8
|
+
export async function runDoctor(repoPath = process.cwd()) {
|
|
9
|
+
const eckDir = path.join(repoPath, '.eck');
|
|
10
|
+
console.log(chalk.blue('š„ Checking project health and manifest integrity...'));
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
await fs.access(eckDir);
|
|
14
|
+
} catch {
|
|
15
|
+
console.log(chalk.yellow('ā ļø .eck directory not found. Nothing to check.'));
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const stubFiles = [];
|
|
20
|
+
const scannedFiles = [];
|
|
21
|
+
|
|
22
|
+
async function scan(dir) {
|
|
23
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
24
|
+
for (const entry of entries) {
|
|
25
|
+
const fullPath = path.join(dir, entry.name);
|
|
26
|
+
if (entry.isDirectory()) {
|
|
27
|
+
await scan(fullPath);
|
|
28
|
+
} else if (entry.isFile() && (entry.name.endsWith('.md') || entry.name.endsWith('.json'))) {
|
|
29
|
+
scannedFiles.push(fullPath);
|
|
30
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
31
|
+
if (content.includes('[STUB:')) {
|
|
32
|
+
stubFiles.push({
|
|
33
|
+
path: path.relative(repoPath, fullPath),
|
|
34
|
+
type: 'STUB'
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
await scan(eckDir);
|
|
42
|
+
|
|
43
|
+
if (stubFiles.length === 0) {
|
|
44
|
+
console.log(chalk.green(`\nā
All clear! Found ${scannedFiles.length} manifest files and no stubs.`));
|
|
45
|
+
} else {
|
|
46
|
+
console.log(chalk.red(`\nā Found ${stubFiles.length} files that need attention:`));
|
|
47
|
+
stubFiles.forEach(file => {
|
|
48
|
+
console.log(chalk.yellow(` - ${file.path} `) + chalk.gray('(contains [STUB] marker)'));
|
|
49
|
+
});
|
|
50
|
+
console.log(chalk.cyan('\nš” Tip: Instruct your Coder agent to "Finalize these stubs by analyzing the code".'));
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Cross-platform tree-sitter check
|
|
54
|
+
try {
|
|
55
|
+
const ts = await import('tree-sitter');
|
|
56
|
+
console.log(chalk.green('ā
tree-sitter: Installed and loadable.'));
|
|
57
|
+
} catch (e) {
|
|
58
|
+
console.log(chalk.yellow('ā¹ļø tree-sitter: Not available (Skeleton mode will be limited for non-JS files).'));
|
|
59
|
+
}
|
|
60
|
+
}
|
|
@@ -134,7 +134,7 @@ args = ["${indexJsPath}", "ask-claude"]
|
|
|
134
134
|
# gemini-cli claude "Generate a project overview"
|
|
135
135
|
|
|
136
136
|
[claude.metadata]
|
|
137
|
-
version = "4.
|
|
137
|
+
version = "4.1.0"
|
|
138
138
|
author = "eck-snapshot"
|
|
139
139
|
generated_at = "${new Date().toISOString()}"
|
|
140
140
|
platform = "${process.platform}"
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Show the full content of specific files
|
|
7
|
+
* Used for AI lazy loading when skeleton mode is active
|
|
8
|
+
* @param {string[]} filePaths - Array of paths to the files to display
|
|
9
|
+
*/
|
|
10
|
+
export async function showFile(filePaths) {
|
|
11
|
+
// Ensure input is array (commander passes array for variadic args)
|
|
12
|
+
const files = Array.isArray(filePaths) ? filePaths : [filePaths];
|
|
13
|
+
|
|
14
|
+
if (files.length === 0) {
|
|
15
|
+
console.error(chalk.yellow('No files specified. Usage: eck-snapshot show <file1> [file2] ...'));
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
for (const filePath of files) {
|
|
20
|
+
try {
|
|
21
|
+
const fullPath = path.resolve(process.cwd(), filePath);
|
|
22
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
23
|
+
|
|
24
|
+
console.log(chalk.green(`\n--- FULL CONTENT: ${filePath} ---\n`));
|
|
25
|
+
|
|
26
|
+
// Detect file extension for syntax highlighting hint
|
|
27
|
+
const ext = path.extname(filePath).slice(1);
|
|
28
|
+
console.log('```' + ext);
|
|
29
|
+
console.log(content);
|
|
30
|
+
console.log('```');
|
|
31
|
+
|
|
32
|
+
console.log(chalk.green(`\n--- END OF FILE: ${filePath} ---\n`));
|
|
33
|
+
|
|
34
|
+
} catch (error) {
|
|
35
|
+
console.error(chalk.red(`Failed to read file ${filePath}: ${error.message}`));
|
|
36
|
+
// Continue to next file even if one fails
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import ora from 'ora';
|
|
4
|
+
import { getGitAnchor, getChangedFiles, getGitDiffOutput } from '../../utils/gitUtils.js';
|
|
5
|
+
import { loadSetupConfig } from '../../config.js';
|
|
6
|
+
import { readFileWithSizeCheck, parseSize, formatSize, matchesPattern, loadGitignore, generateTimestamp } from '../../utils/fileUtils.js';
|
|
7
|
+
import { fileURLToPath } from 'url';
|
|
8
|
+
|
|
9
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
+
const __dirname = path.dirname(__filename);
|
|
11
|
+
|
|
12
|
+
export async function updateSnapshot(repoPath, options) {
|
|
13
|
+
const spinner = ora('Generating update snapshot...').start();
|
|
14
|
+
try {
|
|
15
|
+
const anchor = await getGitAnchor(repoPath);
|
|
16
|
+
if (!anchor) {
|
|
17
|
+
throw new Error('No snapshot anchor found. Run a full snapshot first: eck-snapshot snapshot');
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const changedFiles = await getChangedFiles(repoPath, anchor);
|
|
21
|
+
if (changedFiles.length === 0) {
|
|
22
|
+
spinner.succeed('No changes detected since last full snapshot.');
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Load configs for filtering logic
|
|
27
|
+
const setupConfig = await loadSetupConfig();
|
|
28
|
+
const config = { ...setupConfig.fileFiltering, ...setupConfig.performance, ...options };
|
|
29
|
+
const gitignore = await loadGitignore(repoPath);
|
|
30
|
+
|
|
31
|
+
let contentOutput = '';
|
|
32
|
+
let includedCount = 0;
|
|
33
|
+
const fileList = [];
|
|
34
|
+
|
|
35
|
+
for (const filePath of changedFiles) {
|
|
36
|
+
// Basic filtering (reuse logic roughly)
|
|
37
|
+
if (config.dirsToIgnore.some(d => filePath.startsWith(d))) continue;
|
|
38
|
+
if (gitignore.ignores(filePath)) continue;
|
|
39
|
+
|
|
40
|
+
try {
|
|
41
|
+
const fullPath = path.join(repoPath, filePath);
|
|
42
|
+
const content = await readFileWithSizeCheck(fullPath, parseSize(config.maxFileSize));
|
|
43
|
+
|
|
44
|
+
contentOutput += `--- File: /${filePath} ---\n\n${content}\n\n`;
|
|
45
|
+
fileList.push(`- ${filePath}`);
|
|
46
|
+
includedCount++;
|
|
47
|
+
} catch (e) {
|
|
48
|
+
// Skip deleted files or read errors
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Load Template
|
|
53
|
+
const templatePath = path.join(__dirname, '../../templates/update-prompt.template.md');
|
|
54
|
+
let header = await fs.readFile(templatePath, 'utf-8');
|
|
55
|
+
header = header.replace('{{anchor}}', anchor.substring(0, 7))
|
|
56
|
+
.replace('{{timestamp}}', new Date().toLocaleString())
|
|
57
|
+
.replace('{{fileList}}', fileList.join('\n'));
|
|
58
|
+
|
|
59
|
+
// Add Git Diff at the end for context
|
|
60
|
+
const diffOutput = await getGitDiffOutput(repoPath, anchor);
|
|
61
|
+
const diffSection = `\n--- GIT DIFF (For Context) ---\n\n\`\`\`diff\n${diffOutput}\n\`\`\``;
|
|
62
|
+
|
|
63
|
+
const outputFilename = `update_${generateTimestamp()}.md`;
|
|
64
|
+
const outputPath = path.join(repoPath, '.eck', 'snapshots', outputFilename);
|
|
65
|
+
|
|
66
|
+
await fs.mkdir(path.dirname(outputPath), { recursive: true });
|
|
67
|
+
await fs.writeFile(outputPath, header + contentOutput + diffSection);
|
|
68
|
+
|
|
69
|
+
spinner.succeed(`Update snapshot created: .eck/snapshots/${outputFilename}`);
|
|
70
|
+
console.log(`š¦ Included ${includedCount} changed files.`);
|
|
71
|
+
|
|
72
|
+
} catch (error) {
|
|
73
|
+
spinner.fail(`Update failed: ${error.message}`);
|
|
74
|
+
}
|
|
75
|
+
}
|
package/src/config.js
CHANGED
|
@@ -16,6 +16,10 @@ export async function loadSetupConfig() {
|
|
|
16
16
|
const setupPath = path.join(__dirname, '..', 'setup.json');
|
|
17
17
|
const setupContent = await fs.readFile(setupPath, 'utf-8');
|
|
18
18
|
cachedConfig = JSON.parse(setupContent);
|
|
19
|
+
|
|
20
|
+
// Basic schema validation for critical fields
|
|
21
|
+
validateConfigSchema(cachedConfig);
|
|
22
|
+
|
|
19
23
|
return cachedConfig;
|
|
20
24
|
} catch (error) {
|
|
21
25
|
console.error('Error loading setup.json:', error.message);
|
|
@@ -23,6 +27,46 @@ export async function loadSetupConfig() {
|
|
|
23
27
|
}
|
|
24
28
|
}
|
|
25
29
|
|
|
30
|
+
/**
|
|
31
|
+
* Validates critical config fields and warns if missing or invalid
|
|
32
|
+
*/
|
|
33
|
+
function validateConfigSchema(config) {
|
|
34
|
+
const warnings = [];
|
|
35
|
+
|
|
36
|
+
// Validate fileFiltering section
|
|
37
|
+
if (!config.fileFiltering) {
|
|
38
|
+
warnings.push('Missing "fileFiltering" section');
|
|
39
|
+
} else {
|
|
40
|
+
if (!Array.isArray(config.fileFiltering.filesToIgnore)) {
|
|
41
|
+
warnings.push('"fileFiltering.filesToIgnore" must be an array');
|
|
42
|
+
}
|
|
43
|
+
if (!Array.isArray(config.fileFiltering.dirsToIgnore)) {
|
|
44
|
+
warnings.push('"fileFiltering.dirsToIgnore" must be an array');
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Validate aiInstructions section
|
|
49
|
+
if (!config.aiInstructions) {
|
|
50
|
+
warnings.push('Missing "aiInstructions" section');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Legacy support
|
|
54
|
+
if (!config.filesToIgnore || !Array.isArray(config.filesToIgnore)) {
|
|
55
|
+
warnings.push('filesToIgnore missing or not an array - using defaults');
|
|
56
|
+
config.filesToIgnore = DEFAULT_CONFIG.filesToIgnore;
|
|
57
|
+
}
|
|
58
|
+
if (!config.dirsToIgnore || !Array.isArray(config.dirsToIgnore)) {
|
|
59
|
+
warnings.push('dirsToIgnore missing or not an array - using defaults');
|
|
60
|
+
config.dirsToIgnore = DEFAULT_CONFIG.dirsToIgnore;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (warnings.length > 0) {
|
|
64
|
+
console.warn('\nā ļø Config Validation Warnings:');
|
|
65
|
+
warnings.forEach(w => console.warn(` - ${w}`));
|
|
66
|
+
console.warn(' (Falling back to defaults for missing values where possible)\n');
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
26
70
|
/**
|
|
27
71
|
* Loads and merges all profiles (local-first).
|
|
28
72
|
*/
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
import { parse } from '@babel/parser';
|
|
2
|
+
import _traverse from '@babel/traverse';
|
|
3
|
+
const traverse = _traverse.default || _traverse;
|
|
4
|
+
import _generate from '@babel/generator';
|
|
5
|
+
const generate = _generate.default || _generate;
|
|
6
|
+
|
|
7
|
+
// Lazy-load tree-sitter to avoid breaking when native bindings are unavailable
|
|
8
|
+
let Parser = null;
|
|
9
|
+
let Python = null;
|
|
10
|
+
let Java = null;
|
|
11
|
+
let Kotlin = null;
|
|
12
|
+
let C = null;
|
|
13
|
+
let Rust = null;
|
|
14
|
+
let Go = null;
|
|
15
|
+
|
|
16
|
+
async function loadTreeSitter() {
|
|
17
|
+
if (Parser) return true; // Already loaded
|
|
18
|
+
|
|
19
|
+
try {
|
|
20
|
+
// We use dynamic imports and check for basic sanity to handle broken native builds (common on Windows)
|
|
21
|
+
const treeSitterModule = await import('tree-sitter').catch(() => null);
|
|
22
|
+
if (!treeSitterModule || !treeSitterModule.default) return false;
|
|
23
|
+
|
|
24
|
+
Parser = treeSitterModule.default;
|
|
25
|
+
|
|
26
|
+
// Load language packs with Promise.allSettled to handle individual failures
|
|
27
|
+
const langs = await Promise.allSettled([
|
|
28
|
+
import('tree-sitter-python'),
|
|
29
|
+
import('tree-sitter-java'),
|
|
30
|
+
import('tree-sitter-kotlin'),
|
|
31
|
+
import('tree-sitter-c'),
|
|
32
|
+
import('tree-sitter-rust'),
|
|
33
|
+
import('tree-sitter-go')
|
|
34
|
+
]);
|
|
35
|
+
|
|
36
|
+
Python = langs[0].status === 'fulfilled' ? langs[0].value.default : null;
|
|
37
|
+
Java = langs[1].status === 'fulfilled' ? langs[1].value.default : null;
|
|
38
|
+
Kotlin = langs[2].status === 'fulfilled' ? langs[2].value.default : null;
|
|
39
|
+
C = langs[3].status === 'fulfilled' ? langs[3].value.default : null;
|
|
40
|
+
Rust = langs[4].status === 'fulfilled' ? langs[4].value.default : null;
|
|
41
|
+
Go = langs[5].status === 'fulfilled' ? langs[5].value.default : null;
|
|
42
|
+
|
|
43
|
+
return true;
|
|
44
|
+
} catch (error) {
|
|
45
|
+
// Silently fail, skeletonize will fallback to original content
|
|
46
|
+
return false;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Initialize parsers map (will be populated lazily)
|
|
51
|
+
const languages = {
|
|
52
|
+
'.py': () => Python,
|
|
53
|
+
'.java': () => Java,
|
|
54
|
+
'.kt': () => Kotlin,
|
|
55
|
+
'.c': () => C,
|
|
56
|
+
'.h': () => C,
|
|
57
|
+
'.cpp': () => C,
|
|
58
|
+
'.hpp': () => C,
|
|
59
|
+
'.rs': () => Rust,
|
|
60
|
+
'.go': () => Go
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Strips implementation details from code.
|
|
65
|
+
* @param {string} content - Full file content
|
|
66
|
+
* @param {string} filePath - File path to determine language
|
|
67
|
+
* @returns {Promise<string>} - Skeletonized code
|
|
68
|
+
*/
|
|
69
|
+
export async function skeletonize(content, filePath) {
|
|
70
|
+
if (!content) return content;
|
|
71
|
+
|
|
72
|
+
// 1. JS/TS Strategy (Babel is better for JS ecosystem)
|
|
73
|
+
if (/\.(js|jsx|ts|tsx|mjs|cjs)$/.test(filePath)) {
|
|
74
|
+
return skeletonizeJs(content);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// 2. Tree-sitter Strategy (Python, Java, Kotlin, C, Rust, Go)
|
|
78
|
+
const ext = filePath.substring(filePath.lastIndexOf('.'));
|
|
79
|
+
if (languages[ext]) {
|
|
80
|
+
// Lazy-load tree-sitter
|
|
81
|
+
const available = await loadTreeSitter();
|
|
82
|
+
const langModule = languages[ext]();
|
|
83
|
+
|
|
84
|
+
// Only attempt tree-sitter if both the parser and the specific language module are ready
|
|
85
|
+
if (available && Parser && langModule) {
|
|
86
|
+
return skeletonizeTreeSitter(content, langModule, ext);
|
|
87
|
+
}
|
|
88
|
+
return content; // Fallback: return original content if tree-sitter unavailable
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// 3. Fallback (Return as is)
|
|
92
|
+
return content;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function skeletonizeJs(content) {
|
|
96
|
+
try {
|
|
97
|
+
const ast = parse(content, {
|
|
98
|
+
sourceType: 'module',
|
|
99
|
+
plugins: ['typescript', 'jsx', 'decorators-legacy'],
|
|
100
|
+
errorRecovery: true
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
traverse(ast, {
|
|
104
|
+
Function(path) {
|
|
105
|
+
if (path.node.body && path.node.body.type === 'BlockStatement') {
|
|
106
|
+
// Preserve leading comments (JSDoc) before emptying body
|
|
107
|
+
const leadingComments = path.node.leadingComments || [];
|
|
108
|
+
path.node.body.body = [];
|
|
109
|
+
path.node.body.innerComments = leadingComments.length > 0
|
|
110
|
+
? leadingComments
|
|
111
|
+
: [{ type: 'CommentBlock', value: ' ... ' }];
|
|
112
|
+
}
|
|
113
|
+
},
|
|
114
|
+
ClassMethod(path) {
|
|
115
|
+
if (path.node.body && path.node.body.type === 'BlockStatement') {
|
|
116
|
+
// Preserve leading comments (JSDoc) before emptying body
|
|
117
|
+
const leadingComments = path.node.leadingComments || [];
|
|
118
|
+
path.node.body.body = [];
|
|
119
|
+
path.node.body.innerComments = leadingComments.length > 0
|
|
120
|
+
? leadingComments
|
|
121
|
+
: [{ type: 'CommentBlock', value: ' ... ' }];
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
const output = generate(ast, {}, content);
|
|
127
|
+
return output.code;
|
|
128
|
+
} catch (e) {
|
|
129
|
+
return content + '\n// [Skeleton parse error]';
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
function skeletonizeTreeSitter(content, language, ext) {
|
|
134
|
+
try {
|
|
135
|
+
const parser = new Parser();
|
|
136
|
+
parser.setLanguage(language);
|
|
137
|
+
const tree = parser.parse(content);
|
|
138
|
+
|
|
139
|
+
// Define node types that represent function bodies
|
|
140
|
+
const bodyTypes = ['block', 'function_body', 'compound_statement'];
|
|
141
|
+
const replacements = [];
|
|
142
|
+
|
|
143
|
+
const visit = (node) => {
|
|
144
|
+
const type = node.type;
|
|
145
|
+
let isFunction = false;
|
|
146
|
+
let replacementText = '{ /* ... */ }';
|
|
147
|
+
|
|
148
|
+
// Language specific detection
|
|
149
|
+
if (ext === '.rs') {
|
|
150
|
+
isFunction = ['function_item', 'method_declaration'].includes(type);
|
|
151
|
+
} else if (ext === '.go') {
|
|
152
|
+
isFunction = ['function_declaration', 'method_declaration'].includes(type);
|
|
153
|
+
} else if (ext === '.py') {
|
|
154
|
+
isFunction = type === 'function_definition';
|
|
155
|
+
replacementText = '...';
|
|
156
|
+
} else {
|
|
157
|
+
isFunction = [
|
|
158
|
+
'function_definition',
|
|
159
|
+
'method_declaration',
|
|
160
|
+
'function_declaration'
|
|
161
|
+
].includes(type);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (isFunction) {
|
|
165
|
+
let bodyNode = null;
|
|
166
|
+
for (let i = 0; i < node.childCount; i++) {
|
|
167
|
+
const child = node.child(i);
|
|
168
|
+
if (bodyTypes.includes(child.type)) {
|
|
169
|
+
bodyNode = child;
|
|
170
|
+
break;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (bodyNode) {
|
|
175
|
+
replacements.push({
|
|
176
|
+
start: bodyNode.startIndex,
|
|
177
|
+
end: bodyNode.endIndex,
|
|
178
|
+
text: replacementText
|
|
179
|
+
});
|
|
180
|
+
return;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
for (let i = 0; i < node.childCount; i++) {
|
|
185
|
+
visit(node.child(i));
|
|
186
|
+
}
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
visit(tree.rootNode);
|
|
190
|
+
replacements.sort((a, b) => b.start - a.start);
|
|
191
|
+
|
|
192
|
+
let currentContent = content;
|
|
193
|
+
for (const rep of replacements) {
|
|
194
|
+
currentContent = currentContent.substring(0, rep.start) + rep.text + currentContent.substring(rep.end);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return currentContent;
|
|
198
|
+
} catch (e) {
|
|
199
|
+
return content + `\n// [Skeleton error: ${e.message}]`;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { execa } from 'execa';
|
|
2
2
|
import { spawn } from 'child_process';
|
|
3
3
|
import pRetry from 'p-retry';
|
|
4
|
+
import { parseWithFallback } from '../utils/eckProtocolParser.js';
|
|
4
5
|
|
|
5
6
|
/**
|
|
6
7
|
* Executes a prompt using the claude-code CLI in non-interactive print mode.
|
|
@@ -145,8 +146,12 @@ async function attemptClaudeExecution(prompt, sessionId = null, options = {}) {
|
|
|
145
146
|
throw new Error('No result JSON found in claude-code output.');
|
|
146
147
|
}
|
|
147
148
|
|
|
149
|
+
// Parse the result using Eck-Protocol v2 parser
|
|
150
|
+
const parsed = parseWithFallback(resultJson.result || '');
|
|
151
|
+
|
|
148
152
|
return {
|
|
149
153
|
result: resultJson.result,
|
|
154
|
+
parsed: parsed, // Structured data from Eck-Protocol v2
|
|
150
155
|
cost: resultJson.total_cost_usd,
|
|
151
156
|
usage: resultJson.usage,
|
|
152
157
|
duration_ms: resultJson.duration_ms
|
|
@@ -12,18 +12,115 @@ You are the **Junior Architect** agent (`gemini_wsl`). Your primary goal is to e
|
|
|
12
12
|
- **You (Junior Architect / `gemini_wsl`)** analyze the task, break it down, and use your tools.
|
|
13
13
|
- The **Coder (`claude`)** is your primary tool for *writing code*.
|
|
14
14
|
|
|
15
|
-
## CRITICAL WORKFLOW:
|
|
15
|
+
## CRITICAL WORKFLOW: Eck-Protocol v2 (Hybrid Format)
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
When you need to write or modify code, you **MUST** use the `/claude` command with the **Eck-Protocol v2** format. This format uses Markdown for readability, XML tags for file boundaries, and JSON for metadata.
|
|
18
18
|
|
|
19
|
-
|
|
19
|
+
### Response Format
|
|
20
20
|
|
|
21
|
-
**
|
|
21
|
+
**CRITICAL DISPLAY RULE:**
|
|
22
|
+
You MUST wrap your ENTIRE response in a `text` block using **QUADRUPLE BACKTICKS** (` ```` `). This prevents internal code blocks from breaking the container.
|
|
22
23
|
|
|
23
|
-
|
|
24
|
+
````text
|
|
25
|
+
# Analysis
|
|
24
26
|
|
|
27
|
+
[Your thinking and analysis of the task goes here.
|
|
28
|
+
Explain what you're going to do and why.]
|
|
29
|
+
|
|
30
|
+
## Changes
|
|
31
|
+
|
|
32
|
+
<file path="src/path/to/file.js" action="replace">
|
|
33
|
+
```javascript
|
|
34
|
+
// Your code here - no escaping needed!
|
|
35
|
+
async function example() {
|
|
36
|
+
console.log("Clean code with quotes!");
|
|
37
|
+
return { success: true };
|
|
38
|
+
}
|
|
25
39
|
```
|
|
26
|
-
|
|
40
|
+
</file>
|
|
41
|
+
|
|
42
|
+
<file path="src/another/file.js" action="create">
|
|
43
|
+
```javascript
|
|
44
|
+
export const helper = () => true;
|
|
27
45
|
```
|
|
46
|
+
</file>
|
|
47
|
+
|
|
48
|
+
## Metadata
|
|
49
|
+
|
|
50
|
+
```json
|
|
51
|
+
{
|
|
52
|
+
"journal": {
|
|
53
|
+
"type": "feat",
|
|
54
|
+
"scope": "api",
|
|
55
|
+
"summary": "Add example function"
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
```
|
|
59
|
+
````
|
|
60
|
+
|
|
61
|
+
### File Actions
|
|
62
|
+
|
|
63
|
+
| Action | Description |
|
|
64
|
+
|--------|-------------|
|
|
65
|
+
| `create` | Create a new file |
|
|
66
|
+
| `replace` | Replace entire file content |
|
|
67
|
+
| `modify` | Partial modification (include context) |
|
|
68
|
+
| `delete` | Delete the file (no content needed) |
|
|
69
|
+
|
|
70
|
+
### Example Command
|
|
71
|
+
|
|
72
|
+
```
|
|
73
|
+
/claude
|
|
74
|
+
````text
|
|
75
|
+
# Analysis
|
|
76
|
+
|
|
77
|
+
I need to fix the null check in auth.js and add a helper function.
|
|
78
|
+
|
|
79
|
+
## Changes
|
|
80
|
+
|
|
81
|
+
<file path="src/auth.js" action="replace">
|
|
82
|
+
```javascript
|
|
83
|
+
async function login(user) {
|
|
84
|
+
if (!user) throw new Error("No user provided");
|
|
85
|
+
return await db.authenticate(user);
|
|
86
|
+
}
|
|
87
|
+
```
|
|
88
|
+
</file>
|
|
89
|
+
|
|
90
|
+
<file path="src/utils/validate.js" action="create">
|
|
91
|
+
```javascript
|
|
92
|
+
export const validateUser = (user) => {
|
|
93
|
+
return user && typeof user.id === 'string';
|
|
94
|
+
};
|
|
95
|
+
```
|
|
96
|
+
</file>
|
|
97
|
+
|
|
98
|
+
## Metadata
|
|
99
|
+
|
|
100
|
+
```json
|
|
101
|
+
{
|
|
102
|
+
"journal": {
|
|
103
|
+
"type": "fix",
|
|
104
|
+
"scope": "auth",
|
|
105
|
+
"summary": "Add null check and validation helper"
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
```
|
|
109
|
+
````
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
### Why This Format?
|
|
113
|
+
|
|
114
|
+
1. **No escaping hell** - Code is written in standard markdown fences, no `\"` or `\n`
|
|
115
|
+
2. **Readable** - Both humans and AI can easily read and write this format
|
|
116
|
+
3. **Parseable** - XML tags provide clear boundaries for automated processing
|
|
117
|
+
4. **Flexible** - Markdown sections allow for thinking and context
|
|
118
|
+
|
|
119
|
+
### Important Rules
|
|
120
|
+
|
|
121
|
+
- Always wrap code in markdown fences (` ``` `) inside `<file>` tags
|
|
122
|
+
- Always include the `path` and `action` attributes on `<file>` tags
|
|
123
|
+
- Use the `## Metadata` section for journal entries and other structured data
|
|
124
|
+
- The `# Analysis` section is optional but recommended for complex tasks
|
|
28
125
|
|
|
29
|
-
Your other tools (like `bash`) can be used for analysis and validation.
|
|
126
|
+
Your other tools (like `bash`) can be used for analysis and validation.
|