@xelth/eck-snapshot 5.9.0 → 6.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +321 -190
- package/index.js +1 -1
- package/package.json +15 -2
- package/scripts/mcp-eck-core.js +143 -13
- package/setup.json +119 -81
- package/src/cli/cli.js +256 -385
- package/src/cli/commands/createSnapshot.js +391 -175
- package/src/cli/commands/recon.js +308 -0
- package/src/cli/commands/setupMcp.js +280 -19
- package/src/cli/commands/trainTokens.js +42 -32
- package/src/cli/commands/updateSnapshot.js +136 -43
- package/src/core/depthConfig.js +54 -0
- package/src/core/skeletonizer.js +280 -21
- package/src/templates/architect-prompt.template.md +34 -0
- package/src/templates/multiAgent.md +68 -15
- package/src/templates/opencode/coder.template.md +53 -17
- package/src/templates/opencode/junior-architect.template.md +54 -15
- package/src/templates/skeleton-instruction.md +1 -1
- package/src/templates/update-prompt.template.md +2 -0
- package/src/utils/aiHeader.js +57 -27
- package/src/utils/claudeMdGenerator.js +182 -88
- package/src/utils/fileUtils.js +217 -149
- package/src/utils/gitUtils.js +12 -8
- package/src/utils/opencodeAgentsGenerator.js +8 -2
- package/src/utils/projectDetector.js +66 -21
- package/src/utils/tokenEstimator.js +11 -7
- package/src/cli/commands/consilium.js +0 -86
- package/src/cli/commands/detectProfiles.js +0 -98
- package/src/cli/commands/envSync.js +0 -319
- package/src/cli/commands/generateProfileGuide.js +0 -144
- package/src/cli/commands/pruneSnapshot.js +0 -106
- package/src/cli/commands/restoreSnapshot.js +0 -173
- package/src/cli/commands/setupGemini.js +0 -149
- package/src/cli/commands/setupGemini.test.js +0 -115
- package/src/cli/commands/showFile.js +0 -39
- package/src/services/claudeCliService.js +0 -626
- package/src/services/claudeCliService.test.js +0 -267
|
@@ -5,11 +5,12 @@ import chalk from 'chalk';
|
|
|
5
5
|
import isBinaryPath from 'is-binary-path';
|
|
6
6
|
import { getGitAnchor, getChangedFiles } from '../../utils/gitUtils.js';
|
|
7
7
|
import { loadSetupConfig } from '../../config.js';
|
|
8
|
-
import { readFileWithSizeCheck, parseSize, formatSize, matchesPattern, loadGitignore, generateTimestamp, getShortRepoName } from '../../utils/fileUtils.js';
|
|
8
|
+
import { readFileWithSizeCheck, parseSize, formatSize, matchesPattern, loadGitignore, generateTimestamp, getShortRepoName, ensureSnapshotsInGitignore, readMlModelMetadata } from '../../utils/fileUtils.js';
|
|
9
9
|
import { detectProjectType, getProjectSpecificFiltering } from '../../utils/projectDetector.js';
|
|
10
10
|
import { execa } from 'execa';
|
|
11
11
|
import { fileURLToPath } from 'url';
|
|
12
12
|
import { pushTelemetry } from '../../utils/telemetry.js';
|
|
13
|
+
import { syncTokenWeights } from '../../utils/tokenEstimator.js';
|
|
13
14
|
|
|
14
15
|
// Mirror the same hidden-path guard used in createSnapshot.js
|
|
15
16
|
function isHiddenPath(filePath) {
|
|
@@ -36,6 +37,15 @@ async function autoCommit(repoPath) {
|
|
|
36
37
|
const __filename = fileURLToPath(import.meta.url);
|
|
37
38
|
const __dirname = path.dirname(__filename);
|
|
38
39
|
|
|
40
|
+
function resolveBaseHash(base) {
|
|
41
|
+
if (!base) return null;
|
|
42
|
+
const basename = path.basename(base, '.md');
|
|
43
|
+
const match = basename.match(/_([0-9a-f]{7,40})_/);
|
|
44
|
+
if (match) return match[1];
|
|
45
|
+
if (/^[0-9a-f]{7,40}$/i.test(base)) return base;
|
|
46
|
+
throw new Error(`Invalid --base value: "${base}". Expected a snapshot filename or a git commit hash.`);
|
|
47
|
+
}
|
|
48
|
+
|
|
39
49
|
// Shared logic to generate the snapshot content string
|
|
40
50
|
async function generateSnapshotContent(repoPath, changedFiles, anchor, config, gitignore) {
|
|
41
51
|
let contentOutput = '';
|
|
@@ -45,13 +55,45 @@ async function generateSnapshotContent(repoPath, changedFiles, anchor, config, g
|
|
|
45
55
|
// Include Agent Report if it exists and hasn't been embedded yet
|
|
46
56
|
let agentReport = null;
|
|
47
57
|
const reportPath = path.join(repoPath, '.eck', 'lastsnapshot', 'AnswerToSA.md');
|
|
58
|
+
const lockPath = path.join(repoPath, '.eck', 'lastsnapshot', 'AnswerToSA.lock');
|
|
48
59
|
try {
|
|
60
|
+
// Use atomic directory creation as a lock to prevent race conditions
|
|
61
|
+
await fs.mkdir(lockPath);
|
|
49
62
|
const reportContent = await fs.readFile(reportPath, 'utf-8');
|
|
63
|
+
|
|
50
64
|
if (!reportContent.includes('[SYSTEM: EMBEDDED]')) {
|
|
51
65
|
agentReport = reportContent;
|
|
66
|
+
|
|
67
|
+
// Immediately mark as embedded to release the race window
|
|
52
68
|
await fs.appendFile(reportPath, '\n\n[SYSTEM: EMBEDDED]\n', 'utf-8');
|
|
69
|
+
|
|
70
|
+
// Auto-Journaling: prepend agent report to JOURNAL.md
|
|
71
|
+
const journalPath = path.join(repoPath, '.eck', 'JOURNAL.md');
|
|
72
|
+
try {
|
|
73
|
+
const dateStr = new Date().toISOString().split('T')[0];
|
|
74
|
+
const journalEntry = `## ${dateStr} — Agent Report\n\n${reportContent.trim()}\n`;
|
|
75
|
+
|
|
76
|
+
let existingJournal = '';
|
|
77
|
+
try {
|
|
78
|
+
existingJournal = await fs.readFile(journalPath, 'utf-8');
|
|
79
|
+
} catch (e) { /* might not exist */ }
|
|
80
|
+
|
|
81
|
+
const insertPos = existingJournal.indexOf('\n## ');
|
|
82
|
+
if (insertPos !== -1) {
|
|
83
|
+
const newJournal = existingJournal.slice(0, insertPos) + '\n\n' + journalEntry + existingJournal.slice(insertPos);
|
|
84
|
+
await fs.writeFile(journalPath, newJournal, 'utf-8');
|
|
85
|
+
} else {
|
|
86
|
+
await fs.writeFile(journalPath, (existingJournal ? existingJournal + '\n\n' : '') + journalEntry + '\n', 'utf-8');
|
|
87
|
+
}
|
|
88
|
+
} catch (je) {
|
|
89
|
+
console.warn('Could not auto-update JOURNAL.md', je.message);
|
|
90
|
+
}
|
|
53
91
|
}
|
|
54
|
-
|
|
92
|
+
await fs.rmdir(lockPath);
|
|
93
|
+
} catch (e) {
|
|
94
|
+
// File not found or locked by another process
|
|
95
|
+
try { await fs.rmdir(lockPath); } catch (_) {}
|
|
96
|
+
}
|
|
55
97
|
|
|
56
98
|
const cleanDirsToIgnore = (config.dirsToIgnore || []).map(d => d.replace(/\/$/, ''));
|
|
57
99
|
|
|
@@ -61,8 +103,12 @@ async function generateSnapshotContent(repoPath, changedFiles, anchor, config, g
|
|
|
61
103
|
// Skip hidden paths (.idea/, .vscode/, etc.) — mirrors createSnapshot.js
|
|
62
104
|
if (isHiddenPath(normalizedPath)) continue;
|
|
63
105
|
|
|
106
|
+
const mlExt = path.extname(filePath).toLowerCase();
|
|
107
|
+
const ML_EXTENSIONS = ['.safetensors', '.onnx', '.pt', '.pth', '.h5', '.pb', '.bin', '.ckpt', '.gguf'];
|
|
108
|
+
const isMlModel = ML_EXTENSIONS.includes(mlExt);
|
|
109
|
+
|
|
64
110
|
// Skip binary files — mirrors createSnapshot.js
|
|
65
|
-
if (isBinaryPath(filePath)) continue;
|
|
111
|
+
if (isBinaryPath(filePath) && !isMlModel) continue;
|
|
66
112
|
|
|
67
113
|
const pathParts = normalizedPath.split('/');
|
|
68
114
|
let isIgnoredDir = false;
|
|
@@ -82,9 +128,26 @@ async function generateSnapshotContent(repoPath, changedFiles, anchor, config, g
|
|
|
82
128
|
|
|
83
129
|
try {
|
|
84
130
|
const fullPath = path.join(repoPath, filePath);
|
|
85
|
-
|
|
131
|
+
|
|
132
|
+
// Explicitly check if file was deleted
|
|
133
|
+
try {
|
|
134
|
+
await fs.access(fullPath);
|
|
135
|
+
} catch (accessErr) {
|
|
136
|
+
contentOutput += `--- File: /${normalizedPath} ---\n\n[FILE DELETED]\n\n`;
|
|
137
|
+
fileList.push(`- ${normalizedPath} (Deleted)`);
|
|
138
|
+
includedCount++;
|
|
139
|
+
continue;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
let content;
|
|
143
|
+
if (isMlModel) {
|
|
144
|
+
content = await readMlModelMetadata(fullPath);
|
|
145
|
+
} else {
|
|
146
|
+
content = await readFileWithSizeCheck(fullPath, parseSize(config.maxFileSize));
|
|
147
|
+
}
|
|
148
|
+
|
|
86
149
|
contentOutput += `--- File: /${normalizedPath} ---\n\n${content}\n\n`;
|
|
87
|
-
fileList.push(`- ${normalizedPath}`);
|
|
150
|
+
fileList.push(`- ${normalizedPath} (Modified/Added)`);
|
|
88
151
|
includedCount++;
|
|
89
152
|
} catch (e) { /* Skip */ }
|
|
90
153
|
}
|
|
@@ -99,7 +162,10 @@ async function generateSnapshotContent(repoPath, changedFiles, anchor, config, g
|
|
|
99
162
|
reportSection = `\n---\n### 📨 MESSAGE FROM EXECUTION AGENT\n\n${agentReport}\n---\n\n`;
|
|
100
163
|
}
|
|
101
164
|
|
|
102
|
-
|
|
165
|
+
const repoName = path.basename(repoPath);
|
|
166
|
+
|
|
167
|
+
header = header.replace('{{repoName}}', repoName)
|
|
168
|
+
.replace('{{anchor}}', anchor.substring(0, 7))
|
|
103
169
|
.replace('{{timestamp}}', new Date().toLocaleString())
|
|
104
170
|
.replace('{{fileList}}', fileList.join('\n'));
|
|
105
171
|
|
|
@@ -116,19 +182,31 @@ async function generateSnapshotContent(repoPath, changedFiles, anchor, config, g
|
|
|
116
182
|
export async function updateSnapshot(repoPath, options) {
|
|
117
183
|
const spinner = ora('Generating update snapshot...').start();
|
|
118
184
|
try {
|
|
119
|
-
const
|
|
185
|
+
const isCustomBase = !!options.base;
|
|
186
|
+
const anchor = resolveBaseHash(options.base) || await getGitAnchor(repoPath);
|
|
187
|
+
|
|
120
188
|
if (!anchor) {
|
|
121
189
|
throw new Error('No snapshot anchor found. Run a full snapshot first: eck-snapshot snapshot');
|
|
122
190
|
}
|
|
123
191
|
|
|
124
192
|
// Auto-commit any uncommitted changes so they appear in the diff
|
|
125
|
-
|
|
126
|
-
if (
|
|
127
|
-
|
|
128
|
-
|
|
193
|
+
let didCommit = false;
|
|
194
|
+
if (!options.fail) {
|
|
195
|
+
didCommit = await autoCommit(repoPath);
|
|
196
|
+
if (didCommit) {
|
|
197
|
+
spinner.info('Auto-committed uncommitted changes.');
|
|
198
|
+
}
|
|
199
|
+
} else {
|
|
200
|
+
spinner.info('Fail flag passed: skipping auto-commit.');
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
if (isCustomBase) {
|
|
204
|
+
spinner.info(`Using custom base: ${anchor.substring(0, 7)} (from ${path.basename(options.base)})`);
|
|
129
205
|
}
|
|
130
206
|
|
|
131
|
-
|
|
207
|
+
spinner.start('Generating update snapshot...');
|
|
208
|
+
|
|
209
|
+
const changedFiles = await getChangedFiles(repoPath, anchor, options.fail);
|
|
132
210
|
if (changedFiles.length === 0) {
|
|
133
211
|
spinner.succeed('No changes detected since last full snapshot.');
|
|
134
212
|
return;
|
|
@@ -154,27 +232,32 @@ export async function updateSnapshot(repoPath, options) {
|
|
|
154
232
|
const { fullContent, includedCount, agentReport } = await generateSnapshotContent(repoPath, changedFiles, anchor, config, gitignore);
|
|
155
233
|
|
|
156
234
|
// Determine sequence number
|
|
157
|
-
let
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
const
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
235
|
+
let seqStr = 'custom';
|
|
236
|
+
if (!isCustomBase) {
|
|
237
|
+
let seqNum = 1;
|
|
238
|
+
const counterPath = path.join(repoPath, '.eck', 'update_seq');
|
|
239
|
+
try {
|
|
240
|
+
const seqData = await fs.readFile(counterPath, 'utf-8');
|
|
241
|
+
const [savedHash, savedCount] = seqData.split(':');
|
|
242
|
+
if (savedHash && savedHash.trim() === anchor.substring(0, 7).trim()) {
|
|
243
|
+
seqNum = parseInt(savedCount || '0') + 1;
|
|
244
|
+
}
|
|
245
|
+
} catch (e) {}
|
|
166
246
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
247
|
+
try {
|
|
248
|
+
await fs.writeFile(counterPath, `${anchor.substring(0, 7)}:${seqNum}`);
|
|
249
|
+
} catch (e) {}
|
|
250
|
+
seqStr = seqNum.toString();
|
|
251
|
+
}
|
|
170
252
|
|
|
171
253
|
const timestamp = generateTimestamp();
|
|
172
254
|
const shortRepoName = getShortRepoName(path.basename(repoPath));
|
|
173
255
|
const sizeKB = Math.max(1, Math.round(Buffer.byteLength(fullContent, 'utf-8') / 1024));
|
|
174
|
-
const outputFilename = `eck${shortRepoName}${timestamp}_${anchor.substring(0, 7)}_up${
|
|
256
|
+
const outputFilename = `eck${shortRepoName}${timestamp}_${anchor.substring(0, 7)}_up${seqStr}_${sizeKB}kb.md`;
|
|
175
257
|
const outputPath = path.join(repoPath, '.eck', 'snapshots', outputFilename);
|
|
176
258
|
|
|
177
259
|
await fs.mkdir(path.dirname(outputPath), { recursive: true });
|
|
260
|
+
await ensureSnapshotsInGitignore(repoPath);
|
|
178
261
|
await fs.writeFile(outputPath, fullContent);
|
|
179
262
|
|
|
180
263
|
spinner.succeed(`Update snapshot created: .eck/snapshots/${outputFilename}`);
|
|
@@ -216,18 +299,22 @@ export async function updateSnapshot(repoPath, options) {
|
|
|
216
299
|
}
|
|
217
300
|
|
|
218
301
|
// New Silent/JSON command for Agents
|
|
219
|
-
export async function updateSnapshotJson(repoPath) {
|
|
302
|
+
export async function updateSnapshotJson(repoPath, options = {}) {
|
|
220
303
|
try {
|
|
221
|
-
const
|
|
304
|
+
const isCustomBase = !!options.base;
|
|
305
|
+
const anchor = resolveBaseHash(options.base) || await getGitAnchor(repoPath);
|
|
306
|
+
|
|
222
307
|
if (!anchor) {
|
|
223
308
|
console.log(JSON.stringify({ status: "error", message: "No snapshot anchor found" }));
|
|
224
309
|
return;
|
|
225
310
|
}
|
|
226
311
|
|
|
227
312
|
// Auto-commit any uncommitted changes
|
|
228
|
-
|
|
313
|
+
if (!options.fail) {
|
|
314
|
+
await autoCommit(repoPath);
|
|
315
|
+
}
|
|
229
316
|
|
|
230
|
-
const changedFiles = await getChangedFiles(repoPath, anchor);
|
|
317
|
+
const changedFiles = await getChangedFiles(repoPath, anchor, !!options.fail);
|
|
231
318
|
if (changedFiles.length === 0) {
|
|
232
319
|
console.log(JSON.stringify({ status: "no_changes", message: "No changes detected" }));
|
|
233
320
|
return;
|
|
@@ -252,26 +339,31 @@ export async function updateSnapshotJson(repoPath) {
|
|
|
252
339
|
|
|
253
340
|
const { fullContent, includedCount, agentReport } = await generateSnapshotContent(repoPath, changedFiles, anchor, config, gitignore);
|
|
254
341
|
|
|
255
|
-
let
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
const
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
342
|
+
let seqStr = 'custom';
|
|
343
|
+
if (!isCustomBase) {
|
|
344
|
+
let seqNum = 1;
|
|
345
|
+
const counterPath = path.join(repoPath, '.eck', 'update_seq');
|
|
346
|
+
try {
|
|
347
|
+
const seqData = await fs.readFile(counterPath, 'utf-8');
|
|
348
|
+
const [savedHash, savedCount] = seqData.split(':');
|
|
349
|
+
if (savedHash && savedHash.trim() === anchor.substring(0, 7).trim()) {
|
|
350
|
+
seqNum = parseInt(savedCount || '0') + 1;
|
|
351
|
+
}
|
|
352
|
+
} catch (e) {}
|
|
264
353
|
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
354
|
+
try {
|
|
355
|
+
await fs.writeFile(counterPath, `${anchor.substring(0, 7)}:${seqNum}`);
|
|
356
|
+
} catch (e) {}
|
|
357
|
+
seqStr = seqNum.toString();
|
|
358
|
+
}
|
|
268
359
|
|
|
269
360
|
const timestamp = generateTimestamp();
|
|
270
361
|
const shortRepoName = getShortRepoName(path.basename(repoPath));
|
|
271
362
|
const sizeKB = Math.max(1, Math.round(Buffer.byteLength(fullContent, 'utf-8') / 1024));
|
|
272
|
-
const outputFilename = `eck${shortRepoName}${timestamp}_${anchor.substring(0, 7)}_up${
|
|
363
|
+
const outputFilename = `eck${shortRepoName}${timestamp}_${anchor.substring(0, 7)}_up${seqStr}_${sizeKB}kb.md`;
|
|
273
364
|
const outputPath = path.join(repoPath, '.eck', 'snapshots', outputFilename);
|
|
274
365
|
await fs.mkdir(path.dirname(outputPath), { recursive: true });
|
|
366
|
+
await ensureSnapshotsInGitignore(repoPath);
|
|
275
367
|
await fs.writeFile(outputPath, fullContent);
|
|
276
368
|
|
|
277
369
|
// --- FEATURE: Active Snapshot (.eck/lastsnapshot/) ---
|
|
@@ -301,8 +393,9 @@ export async function updateSnapshotJson(repoPath) {
|
|
|
301
393
|
timestamp: timestamp
|
|
302
394
|
}));
|
|
303
395
|
|
|
304
|
-
// Auto-push telemetry (fire and forget so it doesn't break JSON output)
|
|
396
|
+
// Auto-push telemetry and sync weights (fire and forget so it doesn't break JSON output)
|
|
305
397
|
pushTelemetry(repoPath, true).catch(() => {});
|
|
398
|
+
syncTokenWeights(true).catch(() => {});
|
|
306
399
|
|
|
307
400
|
} catch (error) {
|
|
308
401
|
console.log(JSON.stringify({ status: "error", message: error.message }));
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared depth configuration for link and scout commands.
|
|
3
|
+
* Scale: 0-9
|
|
4
|
+
*
|
|
5
|
+
* @param {number} depth - Depth level (0-9)
|
|
6
|
+
* @returns {object} Configuration object with mode settings
|
|
7
|
+
*/
|
|
8
|
+
export function getDepthConfig(depth) {
|
|
9
|
+
const d = Math.max(0, Math.min(9, parseInt(depth, 10) || 0));
|
|
10
|
+
|
|
11
|
+
if (d === 0) {
|
|
12
|
+
return { mode: 'tree', skipContent: true };
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
if (d >= 1 && d <= 4) {
|
|
16
|
+
const linesMap = { 1: 10, 2: 30, 3: 60, 4: 100 };
|
|
17
|
+
return { mode: 'truncated', maxLinesPerFile: linesMap[d], skeleton: false };
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
if (d === 5) {
|
|
21
|
+
return { mode: 'skeleton', skeleton: true, preserveDocs: false, maxLinesPerFile: 0 };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (d === 6) {
|
|
25
|
+
return { mode: 'skeleton+docs', skeleton: true, preserveDocs: true, maxLinesPerFile: 0 };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (d === 7) {
|
|
29
|
+
return { mode: 'full', skeleton: false, maxLinesPerFile: 500 };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (d === 8) {
|
|
33
|
+
return { mode: 'full', skeleton: false, maxLinesPerFile: 1000 };
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// d === 9
|
|
37
|
+
return { mode: 'full', skeleton: false, maxLinesPerFile: 0 };
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Human-readable depth scale table for documentation/headers.
|
|
42
|
+
*/
|
|
43
|
+
export const DEPTH_SCALE = [
|
|
44
|
+
{ depth: 0, mode: 'Tree only', description: 'Directory structure, no file contents' },
|
|
45
|
+
{ depth: 1, mode: 'Truncated 10', description: '10 lines per file (imports/header)' },
|
|
46
|
+
{ depth: 2, mode: 'Truncated 30', description: '30 lines per file' },
|
|
47
|
+
{ depth: 3, mode: 'Truncated 60', description: '60 lines per file' },
|
|
48
|
+
{ depth: 4, mode: 'Truncated 100', description: '100 lines per file' },
|
|
49
|
+
{ depth: 5, mode: 'Skeleton', description: 'Function/class signatures only' },
|
|
50
|
+
{ depth: 6, mode: 'Skeleton + docs', description: 'Signatures + docstrings/comments' },
|
|
51
|
+
{ depth: 7, mode: 'Full (compact)', description: 'Full content, truncated at 500 lines' },
|
|
52
|
+
{ depth: 8, mode: 'Full (standard)', description: 'Full content, truncated at 1000 lines' },
|
|
53
|
+
{ depth: 9, mode: 'Full (unlimited)', description: 'Everything, no limits' },
|
|
54
|
+
];
|