@xelth/eck-snapshot 2.2.0 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +119 -225
  3. package/index.js +14 -776
  4. package/package.json +25 -7
  5. package/setup.json +805 -0
  6. package/src/cli/cli.js +427 -0
  7. package/src/cli/commands/askGpt.js +29 -0
  8. package/src/cli/commands/autoDocs.js +150 -0
  9. package/src/cli/commands/consilium.js +86 -0
  10. package/src/cli/commands/createSnapshot.js +601 -0
  11. package/src/cli/commands/detectProfiles.js +98 -0
  12. package/src/cli/commands/detectProject.js +112 -0
  13. package/src/cli/commands/generateProfileGuide.js +91 -0
  14. package/src/cli/commands/pruneSnapshot.js +106 -0
  15. package/src/cli/commands/restoreSnapshot.js +173 -0
  16. package/src/cli/commands/setupGemini.js +149 -0
  17. package/src/cli/commands/setupGemini.test.js +115 -0
  18. package/src/cli/commands/trainTokens.js +38 -0
  19. package/src/config.js +81 -0
  20. package/src/services/authService.js +20 -0
  21. package/src/services/claudeCliService.js +621 -0
  22. package/src/services/claudeCliService.test.js +267 -0
  23. package/src/services/dispatcherService.js +33 -0
  24. package/src/services/gptService.js +302 -0
  25. package/src/services/gptService.test.js +120 -0
  26. package/src/templates/agent-prompt.template.md +29 -0
  27. package/src/templates/architect-prompt.template.md +50 -0
  28. package/src/templates/envScanRequest.md +4 -0
  29. package/src/templates/gitWorkflow.md +32 -0
  30. package/src/templates/multiAgent.md +164 -0
  31. package/src/templates/vectorMode.md +22 -0
  32. package/src/utils/aiHeader.js +303 -0
  33. package/src/utils/fileUtils.js +928 -0
  34. package/src/utils/projectDetector.js +704 -0
  35. package/src/utils/tokenEstimator.js +198 -0
  36. package/.ecksnapshot.config.js +0 -35
@@ -0,0 +1,601 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+ import { execa } from 'execa';
4
+ import pLimit from 'p-limit';
5
+ import { SingleBar, Presets } from 'cli-progress';
6
+ import isBinaryPath from 'is-binary-path';
7
+ import zlib from 'zlib';
8
+ import { promisify } from 'util';
9
+ import ora from 'ora';
10
+ import micromatch from 'micromatch';
11
+
12
+ import {
13
+ parseSize, formatSize, matchesPattern, checkGitRepository,
14
+ scanDirectoryRecursively, loadGitignore, readFileWithSizeCheck,
15
+ generateDirectoryTree, loadConfig, displayProjectInfo, loadProjectEckManifest,
16
+ ensureSnapshotsInGitignore, initializeEckManifest
17
+ } from '../../utils/fileUtils.js';
18
+ import { detectProjectType, getProjectSpecificFiltering } from '../../utils/projectDetector.js';
19
+ import { estimateTokensWithPolynomial, generateTrainingCommand } from '../../utils/tokenEstimator.js';
20
+ import { loadSetupConfig, getProfile } from '../../config.js';
21
+ import { applyProfileFilter } from '../../utils/fileUtils.js';
22
+
23
+ /**
24
+ * Creates dynamic project context based on detection results
25
+ */
26
+ function createDynamicProjectContext(detection) {
27
+ const { type, details } = detection;
28
+ const context = {
29
+ name: details.name || 'detected-project',
30
+ type: type,
31
+ detectedAt: new Date().toISOString()
32
+ };
33
+
34
+ // Create architecture info based on project type
35
+ const architecture = {
36
+ stack: [],
37
+ structure: type
38
+ };
39
+
40
+ switch (type) {
41
+ case 'android':
42
+ architecture.stack = ['Android', details.language || 'Java', 'Gradle'];
43
+ if (details.packageName) {
44
+ context.packageName = details.packageName;
45
+ }
46
+ break;
47
+
48
+ case 'nodejs':
49
+ architecture.stack = ['Node.js'];
50
+ if (details.framework) {
51
+ architecture.stack.push(details.framework);
52
+ }
53
+ if (details.hasTypescript) {
54
+ architecture.stack.push('TypeScript');
55
+ }
56
+ break;
57
+
58
+ case 'nodejs-monorepo':
59
+ architecture.stack = ['Node.js', 'Monorepo'];
60
+ if (details.monorepoTool) {
61
+ architecture.stack.push(details.monorepoTool);
62
+ }
63
+ if (details.framework) {
64
+ architecture.stack.push(details.framework);
65
+ }
66
+ if (details.hasTypescript) {
67
+ architecture.stack.push('TypeScript');
68
+ }
69
+ break;
70
+
71
+ case 'python-poetry':
72
+ case 'python-pip':
73
+ case 'python-conda':
74
+ architecture.stack = ['Python'];
75
+ if (details.packageManager) {
76
+ architecture.stack.push(details.packageManager);
77
+ }
78
+ break;
79
+
80
+ case 'django':
81
+ architecture.stack = ['Python', 'Django'];
82
+ break;
83
+
84
+ case 'flask':
85
+ architecture.stack = ['Python', 'Flask'];
86
+ break;
87
+
88
+ case 'rust':
89
+ architecture.stack = ['Rust', 'Cargo'];
90
+ if (details.edition) {
91
+ architecture.stack.push(`Rust ${details.edition}`);
92
+ }
93
+ break;
94
+
95
+ case 'go':
96
+ architecture.stack = ['Go'];
97
+ if (details.goVersion) {
98
+ architecture.stack.push(`Go ${details.goVersion}`);
99
+ }
100
+ break;
101
+
102
+ case 'dotnet':
103
+ architecture.stack = ['.NET'];
104
+ if (details.language) {
105
+ architecture.stack.push(details.language);
106
+ }
107
+ break;
108
+
109
+ case 'flutter':
110
+ architecture.stack = ['Flutter', 'Dart'];
111
+ break;
112
+
113
+ case 'react-native':
114
+ architecture.stack = ['React Native', 'JavaScript'];
115
+ if (details.hasTypescript) {
116
+ architecture.stack.push('TypeScript');
117
+ }
118
+ break;
119
+
120
+ default:
121
+ architecture.stack = ['Unknown'];
122
+ }
123
+
124
+ context.architecture = architecture;
125
+
126
+ return context;
127
+ }
128
+ import { generateEnhancedAIHeader } from '../../utils/aiHeader.js';
129
+
130
+ const gzip = promisify(zlib.gzip);
131
+
132
+ async function getProjectFiles(projectPath, config) {
133
+ const isGitRepo = await checkGitRepository(projectPath);
134
+ if (isGitRepo) {
135
+ const { stdout } = await execa('git', ['ls-files'], { cwd: projectPath });
136
+ return stdout.split('\n').filter(Boolean);
137
+ }
138
+ return scanDirectoryRecursively(projectPath, config);
139
+ }
140
+
141
+ async function getGitCommitHash(projectPath) {
142
+ try {
143
+ const isGitRepo = await checkGitRepository(projectPath);
144
+ if (isGitRepo) {
145
+ const { stdout } = await execa('git', ['rev-parse', '--short=7', 'HEAD'], { cwd: projectPath });
146
+ return stdout.trim();
147
+ }
148
+ } catch (error) {
149
+ // Ignore errors - not a git repo or no commits
150
+ }
151
+ return null;
152
+ }
153
+
154
+ async function estimateProjectTokens(projectPath, config, projectType = null) {
155
+ // Get project-specific filtering if not provided
156
+ if (!projectType) {
157
+ const detection = await detectProjectType(projectPath);
158
+ projectType = detection.type;
159
+ }
160
+
161
+ const projectSpecific = await getProjectSpecificFiltering(projectType);
162
+
163
+ // Merge project-specific filters with global config (same as in scanDirectoryRecursively)
164
+ const effectiveConfig = {
165
+ ...config,
166
+ dirsToIgnore: [...(config.dirsToIgnore || []), ...(projectSpecific.dirsToIgnore || [])],
167
+ filesToIgnore: [...(config.filesToIgnore || []), ...(projectSpecific.filesToIgnore || [])],
168
+ extensionsToIgnore: [...(config.extensionsToIgnore || []), ...(projectSpecific.extensionsToIgnore || [])]
169
+ };
170
+
171
+ const files = await getProjectFiles(projectPath, effectiveConfig);
172
+ const gitignore = await loadGitignore(projectPath);
173
+ const maxFileSize = parseSize(effectiveConfig.maxFileSize);
174
+ let totalSize = 0;
175
+ let includedFiles = 0;
176
+
177
+ for (const file of files) {
178
+ try {
179
+ const normalizedPath = file.replace(/\\/g, '/');
180
+
181
+ // Apply the same filtering logic as in runFileSnapshot
182
+ if (effectiveConfig.dirsToIgnore.some(dir => normalizedPath.startsWith(dir))) {
183
+ continue;
184
+ }
185
+
186
+ if (gitignore.ignores(normalizedPath)) {
187
+ continue;
188
+ }
189
+
190
+ if (isBinaryPath(file)) {
191
+ continue;
192
+ }
193
+
194
+ const fileExtension = path.extname(file);
195
+ if (effectiveConfig.extensionsToIgnore.includes(fileExtension)) {
196
+ continue;
197
+ }
198
+
199
+ if (matchesPattern(normalizedPath, effectiveConfig.filesToIgnore)) {
200
+ continue;
201
+ }
202
+
203
+ const stats = await fs.stat(path.join(projectPath, file));
204
+ if (stats.size > maxFileSize) {
205
+ continue;
206
+ }
207
+
208
+ totalSize += stats.size;
209
+ includedFiles++;
210
+ } catch (e) { /* ignore errors for estimation */ }
211
+ }
212
+
213
+ // Use adaptive polynomial estimation
214
+ const estimatedTokens = await estimateTokensWithPolynomial(projectType, totalSize);
215
+
216
+ return { estimatedTokens, totalSize, includedFiles };
217
+ }
218
+
219
+ async function processProjectFiles(repoPath, options, config, projectType = null) {
220
+ const originalCwd = process.cwd();
221
+ console.log(`\nšŸ“ø Processing files for: ${path.basename(repoPath)}`);
222
+
223
+ const stats = {
224
+ totalFiles: 0,
225
+ includedFiles: 0,
226
+ excludedFiles: 0,
227
+ binaryFiles: 0,
228
+ oversizedFiles: 0,
229
+ ignoredFiles: 0,
230
+ totalSize: 0,
231
+ processedSize: 0,
232
+ errors: [],
233
+ skipReasons: new Map(),
234
+ skippedFilesDetails: new Map()
235
+ };
236
+
237
+ try {
238
+ process.chdir(repoPath);
239
+
240
+ console.log('šŸ” Scanning repository...');
241
+ let allFiles = await getProjectFiles(repoPath, config);
242
+
243
+ if (options.profile) {
244
+ console.log(`Applying profile filter: '${options.profile}'...`);
245
+ allFiles = await applyProfileFilter(allFiles, options.profile, repoPath);
246
+ console.log(`Filtered down to ${allFiles.length} files based on profile rules.`);
247
+ if (allFiles.length === 0) {
248
+ throw new Error(`Profile filter '${options.profile}' resulted in 0 files. Aborting.`);
249
+ }
250
+ }
251
+ const gitignore = await loadGitignore(repoPath);
252
+ stats.totalFiles = allFiles.length;
253
+
254
+ console.log(`šŸ“Š Found ${stats.totalFiles} files`);
255
+
256
+ const progressBar = new SingleBar({
257
+ format: 'šŸ“„ Processing |{bar}| {percentage}% | {value}/{total} files | {filename}',
258
+ barCompleteChar: '\u2588',
259
+ barIncompleteChar: '\u2591',
260
+ hideCursor: true
261
+ }, Presets.rect);
262
+ progressBar.start(allFiles.length, 0);
263
+
264
+ const trackSkippedFile = (filePath, reason) => {
265
+ if (!stats.skippedFilesDetails.has(reason)) {
266
+ stats.skippedFilesDetails.set(reason, []);
267
+ }
268
+ stats.skippedFilesDetails.get(reason).push(filePath);
269
+ stats.skipReasons.set(reason, (stats.skipReasons.get(reason) || 0) + 1);
270
+ };
271
+
272
+ const limit = pLimit(config.concurrency);
273
+ const processFile = async (filePath, index) => {
274
+ const normalizedPath = filePath.replace(/\\/g, '/');
275
+ progressBar.update(index + 1, { filename: normalizedPath.slice(0, 50) });
276
+
277
+ try {
278
+ // Check if file should be ignored by directory patterns
279
+ if (config.dirsToIgnore.some(dir => normalizedPath.startsWith(dir))) {
280
+ stats.ignoredFiles++;
281
+ trackSkippedFile(normalizedPath, 'Directory ignore patterns');
282
+ return null;
283
+ }
284
+
285
+ // Check gitignore patterns
286
+ if (gitignore.ignores(normalizedPath)) {
287
+ stats.ignoredFiles++;
288
+ trackSkippedFile(normalizedPath, 'Gitignore rules');
289
+ return null;
290
+ }
291
+
292
+ // Check if binary file
293
+ if (isBinaryPath(filePath)) {
294
+ stats.binaryFiles++;
295
+ trackSkippedFile(normalizedPath, 'Binary files');
296
+ return null;
297
+ }
298
+
299
+ // Check extensions and file patterns
300
+ const fileExtension = path.extname(filePath);
301
+ if (config.extensionsToIgnore.includes(fileExtension)) {
302
+ stats.excludedFiles++;
303
+ trackSkippedFile(normalizedPath, `File extension filter (${fileExtension})`);
304
+ return null;
305
+ }
306
+
307
+ if (matchesPattern(normalizedPath, config.filesToIgnore)) {
308
+ stats.excludedFiles++;
309
+ trackSkippedFile(normalizedPath, 'File pattern filter');
310
+ return null;
311
+ }
312
+
313
+ // Read file with size check
314
+ const fullPath = path.join(repoPath, filePath);
315
+ const fileStats = await fs.stat(fullPath);
316
+ stats.totalSize += fileStats.size;
317
+
318
+ const maxFileSize = parseSize(config.maxFileSize);
319
+ if (fileStats.size > maxFileSize) {
320
+ stats.oversizedFiles++;
321
+ trackSkippedFile(normalizedPath, `File too large (${formatSize(fileStats.size)} > ${formatSize(maxFileSize)})`);
322
+ return null;
323
+ }
324
+
325
+ const content = await readFileWithSizeCheck(fullPath, maxFileSize);
326
+ stats.includedFiles++;
327
+ stats.processedSize += fileStats.size;
328
+ let outputBody = content;
329
+
330
+ // Apply max-lines-per-file truncation if specified
331
+ if (options.maxLinesPerFile && options.maxLinesPerFile > 0) {
332
+ const lines = outputBody.split('\n');
333
+ if (lines.length > options.maxLinesPerFile) {
334
+ outputBody = lines.slice(0, options.maxLinesPerFile).join('\n') +
335
+ `\n\n[... truncated ${lines.length - options.maxLinesPerFile} lines ...]`;
336
+ }
337
+ }
338
+
339
+ return {
340
+ content: `--- File: /${normalizedPath} ---\n\n${outputBody}\n\n`,
341
+ path: normalizedPath,
342
+ size: fileStats.size
343
+ };
344
+ } catch (error) {
345
+ stats.errors.push(`${normalizedPath}: ${error.message}`);
346
+ trackSkippedFile(normalizedPath, `Error: ${error.message}`);
347
+ return null;
348
+ }
349
+ };
350
+
351
+ const results = await Promise.all(allFiles.map((fp, index) => limit(() => processFile(fp, index))));
352
+ progressBar.stop();
353
+
354
+ const successfulFileObjects = results.filter(Boolean);
355
+ const contentArray = successfulFileObjects.map(f => f.content);
356
+
357
+ // Return all processed data instead of writing file
358
+ return {
359
+ stats,
360
+ contentArray,
361
+ successfulFileObjects,
362
+ allFiles,
363
+ originalCwd,
364
+ repoPath
365
+ };
366
+
367
+ } finally {
368
+ process.chdir(originalCwd); // Ensure we always change back
369
+ }
370
+ }
371
+
372
+ export async function createRepoSnapshot(repoPath, options) {
373
+ const spinner = ora('Analyzing project...').start();
374
+ try {
375
+ // Ensure snapshots/ is in .gitignore to prevent accidental commits
376
+ await ensureSnapshotsInGitignore(repoPath);
377
+
378
+ // Initialize .eck manifest directory if it doesn't exist
379
+ await initializeEckManifest(repoPath);
380
+
381
+ // Auto-commit unstaged changes if in a git repo
382
+ const isGitRepo = await checkGitRepository(repoPath);
383
+ if (isGitRepo) {
384
+ spinner.text = 'Checking for unstaged changes...';
385
+ try {
386
+ const { stdout: status } = await execa('git', ['status', '--porcelain'], { cwd: repoPath });
387
+ if (status) {
388
+ spinner.text = 'Unstaged changes detected. Auto-committing...';
389
+ await execa('git', ['add', '.'], { cwd: repoPath });
390
+ const timestamp = new Date().toISOString().slice(0, 19).replace('T', '_').replace(/:/g, '-');
391
+ await execa('git', ['commit', '-m', `chore(snapshot): Auto-commit before snapshot [${timestamp}]`], { cwd: repoPath });
392
+ spinner.info('Auto-commit complete.');
393
+ } else {
394
+ // No changes, do nothing. Logging this would be too verbose.
395
+ }
396
+ } catch (e) {
397
+ spinner.warn(`Auto-commit failed: ${e.message}`);
398
+ }
399
+ }
400
+ spinner.text = 'Analyzing project...'; // Reset spinner text
401
+
402
+ // Detect project type first
403
+ const projectDetection = await detectProjectType(repoPath);
404
+ spinner.stop();
405
+ displayProjectInfo(projectDetection);
406
+
407
+ const setupConfig = await loadSetupConfig();
408
+ const userConfig = await loadConfig(options.config);
409
+
410
+ // Update project context based on detection
411
+ if (projectDetection.type !== 'unknown' && projectDetection.details) {
412
+ setupConfig.projectContext = createDynamicProjectContext(projectDetection);
413
+ }
414
+
415
+ // Merge configs: setup.json base, user overrides, command options
416
+ const config = {
417
+ ...userConfig, // Start with old defaults
418
+ ...setupConfig.fileFiltering, // Overwrite with setup.json values
419
+ ...setupConfig.performance,
420
+ defaultFormat: setupConfig.output?.defaultFormat || 'md',
421
+ aiHeaderEnabled: setupConfig.aiInstructions?.header?.defaultEnabled ?? true,
422
+ ...options // Command-line options have the final say
423
+ };
424
+
425
+ // Apply defaults for options that may not be provided via command line
426
+ if (!config.output) {
427
+ config.output = setupConfig.output?.defaultPath || './snapshots';
428
+ }
429
+ // For tree option, we need to check if --no-tree was explicitly passed
430
+ // Commander.js sets tree to false when --no-tree is passed, true otherwise
431
+ // We only want to use the config default if the user didn't specify --no-tree
432
+ if (!('noTree' in options)) {
433
+ // User didn't pass --no-tree, so we can use the config default
434
+ config.tree = setupConfig.output?.includeTree ?? true;
435
+ }
436
+ if (config.includeHidden === undefined) {
437
+ config.includeHidden = setupConfig.fileFiltering?.includeHidden ?? false;
438
+ }
439
+
440
+ const estimation = await estimateProjectTokens(repoPath, config, projectDetection.type);
441
+ spinner.info(`Estimated project size: ~${Math.round(estimation.estimatedTokens).toLocaleString()} tokens.`);
442
+
443
+ spinner.succeed('Creating snapshots...');
444
+
445
+ // Step 1: Process all files ONCE
446
+ const {
447
+ stats,
448
+ contentArray,
449
+ successfulFileObjects,
450
+ allFiles,
451
+ originalCwd: processingOriginalCwd, // We get originalCwd from the processing function
452
+ repoPath: processedRepoPath
453
+ } = await processProjectFiles(repoPath, options, config, projectDetection.type);
454
+
455
+ const originalCwd = process.cwd(); // Get CWD *before* chdir
456
+ process.chdir(processedRepoPath); // Go back to repo path for git hash and tree
457
+
458
+ try {
459
+ // --- Common Data ---
460
+ const timestamp = new Date().toISOString().slice(0, 19).replace('T', '_').replace(/:/g, '-');
461
+ const repoName = path.basename(processedRepoPath);
462
+ const gitHash = await getGitCommitHash(processedRepoPath);
463
+ const fileExtension = options.format || config.defaultFormat || 'md';
464
+ const outputPath = options.output || path.resolve(originalCwd, config.output);
465
+ await fs.mkdir(outputPath, { recursive: true });
466
+
467
+ const shouldIncludeTree = config.tree && !options.noTree;
468
+ let directoryTree = '';
469
+ if (shouldIncludeTree) {
470
+ console.log('🌳 Generating directory tree...');
471
+ directoryTree = await generateDirectoryTree(processedRepoPath, '', allFiles, 0, config.maxDepth || 10, config);
472
+ }
473
+
474
+ // Calculate included file stats by extension
475
+ const includedFilesByType = new Map();
476
+ for (const fileObj of successfulFileObjects) {
477
+ try {
478
+ let ext = path.extname(fileObj.path);
479
+ if (ext === '') ext = '.no-extension';
480
+ includedFilesByType.set(ext, (includedFilesByType.get(ext) || 0) + 1);
481
+ } catch (e) { /* Silently ignore */ }
482
+ }
483
+ const sortedIncludedStats = [...includedFilesByType.entries()].sort((a, b) => b[1] - a[1]);
484
+
485
+ // Calculate Top 10 Largest Files
486
+ const largestFiles = [...successfulFileObjects].sort((a, b) => b.size - a.size).slice(0, 10);
487
+
488
+ const fileBody = (directoryTree ? `\n## Directory Structure\n\n\`\`\`\n${directoryTree}\`\`\`\n\n` : '') + contentArray.join('');
489
+
490
+ // --- File 1: Architect Snapshot ---
491
+ const architectOptions = { ...options, agent: false };
492
+ // Load manifest for headers
493
+ const eckManifest = await loadProjectEckManifest(processedRepoPath);
494
+ const isGitRepo = await checkGitRepository(processedRepoPath);
495
+
496
+ const architectHeader = await generateEnhancedAIHeader({ stats, repoName, mode: 'file', eckManifest, options: architectOptions, repoPath: processedRepoPath }, isGitRepo);
497
+ const architectBaseFilename = `${repoName}_snapshot_${timestamp}${gitHash ? `_${gitHash}` : ''}`;
498
+ const architectFilename = `${architectBaseFilename}.${fileExtension}`;
499
+ const architectFilePath = path.join(outputPath, architectFilename);
500
+ await fs.writeFile(architectFilePath, architectHeader + fileBody);
501
+
502
+ // --- File 2: Junior Architect Snapshot ---
503
+ let jaFilePath = null;
504
+ if (options.withJa && fileExtension === 'md') { // Only create JA snapshot if requested and main is MD
505
+ console.log('šŸ–‹ļø Generating Junior Architect (_ja) snapshot...');
506
+ const jaOptions = { ...options, agent: true, noTree: false, noAiHeader: false };
507
+ const jaHeader = await generateEnhancedAIHeader({ stats, repoName, mode: 'file', eckManifest, options: jaOptions, repoPath: processedRepoPath }, isGitRepo);
508
+ const jaFilename = `${architectBaseFilename}_ja.${fileExtension}`;
509
+ jaFilePath = path.join(outputPath, jaFilename);
510
+ await fs.writeFile(jaFilePath, jaHeader + fileBody);
511
+ }
512
+
513
+ // --- Combined Report ---
514
+ console.log('\nāœ… Snapshot generation complete!');
515
+ console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
516
+ console.log(`šŸ“„ Architect File: ${architectFilePath}`);
517
+ if (jaFilePath) {
518
+ console.log(`šŸ“„ Junior Arch File: ${jaFilePath}`);
519
+ }
520
+ console.log(`šŸ“Š Files processed: ${stats.includedFiles}/${stats.totalFiles}`);
521
+ console.log(`šŸ“ Total size: ${formatSize(stats.totalSize)}`);
522
+ console.log(`šŸ“¦ Processed size: ${formatSize(stats.processedSize)}`);
523
+ console.log(`šŸ“‹ Format: ${fileExtension.toUpperCase()}`);
524
+
525
+ if (sortedIncludedStats.length > 0) {
526
+ console.log('\nšŸ“¦ Included File Types:');
527
+ console.log('---------------------------------');
528
+ for (const [ext, count] of sortedIncludedStats.slice(0, 10)) {
529
+ console.log(` - ${String(ext).padEnd(15)} ${String(count).padStart(5)} files`);
530
+ }
531
+ if (sortedIncludedStats.length > 10) {
532
+ console.log(` ... and ${sortedIncludedStats.length - 10} other types.`);
533
+ }
534
+ }
535
+
536
+ if (largestFiles.length > 0) {
537
+ console.log('\n🐘 Top 10 Largest Files (Included):');
538
+ console.log('---------------------------------');
539
+ for (const fileObj of largestFiles) {
540
+ console.log(` - ${String(formatSize(fileObj.size)).padEnd(15)} ${fileObj.path}`);
541
+ }
542
+ }
543
+
544
+ // Excluded/Skipped Files Section
545
+ const hasExcludedContent = stats.excludedFiles > 0 || stats.binaryFiles > 0 || stats.oversizedFiles > 0 || stats.ignoredFiles > 0 || stats.errors.length > 0;
546
+ if (hasExcludedContent) {
547
+ console.log('\n🚫 Excluded/Skipped Files:');
548
+ console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
549
+ }
550
+
551
+ if (stats.excludedFiles > 0) {
552
+ console.log(`🚫 Excluded files: ${stats.excludedFiles}`);
553
+ }
554
+ if (stats.binaryFiles > 0) {
555
+ console.log(`šŸ“± Binary files skipped: ${stats.binaryFiles}`);
556
+ }
557
+ if (stats.oversizedFiles > 0) {
558
+ console.log(`šŸ“ Oversized files skipped: ${stats.oversizedFiles}`);
559
+ }
560
+ if (stats.ignoredFiles > 0) {
561
+ console.log(`šŸ™ˆ Ignored files: ${stats.ignoredFiles}`);
562
+ }
563
+ if (stats.errors.length > 0) {
564
+ console.log(`āŒ Errors: ${stats.errors.length}`);
565
+ if (options.verbose) {
566
+ stats.errors.forEach(err => console.log(` ${err}`));
567
+ }
568
+ }
569
+
570
+ // Print detailed skip reasons report
571
+ if (stats.skippedFilesDetails.size > 0) {
572
+ console.log('\nšŸ“‹ Skip Reasons:');
573
+ console.log('---------------------------------');
574
+
575
+ for (const [reason, files] of stats.skippedFilesDetails.entries()) {
576
+ console.log(`\nšŸ”ø ${reason} (${files.length} files):`);
577
+ files.forEach(file => {
578
+ console.log(` • ${file}`);
579
+ });
580
+ }
581
+ console.log('---------------------------------');
582
+ } else {
583
+ console.log('---------------------------------');
584
+ }
585
+
586
+ // Generate training command string if estimation data is available
587
+ if (estimation && projectDetection.type && !options.profile) {
588
+ const trainingCommand = generateTrainingCommand(projectDetection.type, estimation.estimatedTokens, estimation.totalSize, repoPath);
589
+ console.log('\nšŸŽÆ To improve token estimation accuracy, run this command after checking actual tokens:');
590
+ console.log(`${trainingCommand}[ACTUAL_TOKENS_HERE]`);
591
+ console.log(' Replace [ACTUAL_TOKENS_HERE] with the real token count from your LLM');
592
+ }
593
+
594
+ } finally {
595
+ process.chdir(originalCwd); // Final reset back to original CWD
596
+ }
597
+ } catch (error) {
598
+ spinner.fail(`Operation failed: ${error.message}`);
599
+ process.exit(1);
600
+ }
601
+ }
@@ -0,0 +1,98 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+ import ora from 'ora';
4
+ import { executePrompt as askClaude } from '../../services/claudeCliService.js';
5
+ import { scanDirectoryRecursively, generateDirectoryTree, initializeEckManifest, loadConfig } from '../../utils/fileUtils.js';
6
+ import { loadSetupConfig } from '../../config.js';
7
+
8
+ /**
9
+ * Extracts a JSON object from a string that might contain markdown wrappers or log output.
10
+ * Finds the first opening brace '{' and the last closing brace '}' to extract the JSON.
11
+ */
12
+ function extractJson(text) {
13
+ const match = text.match(/```(json)?([\s\S]*?)```/);
14
+ if (match && match[2]) {
15
+ return match[2].trim();
16
+ }
17
+
18
+ const firstBrace = text.indexOf('{');
19
+ const lastBrace = text.lastIndexOf('}');
20
+
21
+ if (firstBrace !== -1 && lastBrace !== -1 && lastBrace > firstBrace) {
22
+ return text.substring(firstBrace, lastBrace + 1).trim();
23
+ }
24
+
25
+ return text.trim();
26
+ }
27
+
28
+ /**
29
+ * Scans the project structure, saves the directory tree to a file, and asks an AI to generate
30
+ * context profiles, saving them to .eck/profiles.json.
31
+ */
32
+ export async function detectProfiles(repoPath, options) {
33
+ const spinner = ora('Initializing and scanning project structure...').start();
34
+ try {
35
+ await initializeEckManifest(repoPath);
36
+
37
+ const setupConfig = await loadSetupConfig();
38
+ const userConfig = await loadConfig(options.config);
39
+ const config = {
40
+ ...userConfig,
41
+ ...setupConfig.fileFiltering,
42
+ ...setupConfig.performance
43
+ };
44
+
45
+ const allFiles = await scanDirectoryRecursively(repoPath, config, repoPath);
46
+ spinner.text = 'Generating directory tree...';
47
+ const dirTree = await generateDirectoryTree(repoPath, '', allFiles, 0, config.maxDepth, config);
48
+
49
+ if (!dirTree) {
50
+ throw new Error('Failed to generate directory tree or project is empty.');
51
+ }
52
+
53
+ spinner.text = 'Saving directory tree to file...';
54
+ const treeFilePath = path.join(repoPath, '.eck', 'directory_tree_for_profiling.md');
55
+ await fs.writeFile(treeFilePath, dirTree);
56
+
57
+ const prompt = `You are a code architect. Based on the file directory tree found in the file at './.eck/directory_tree_for_profiling.md', please identify logical 'context profiles' for splitting the project.
58
+ Your output MUST be ONLY a valid JSON object.
59
+ The keys of the object MUST be the profile names (e.g., 'frontend', 'backend', 'core-logic', 'docs').
60
+ The values MUST be an object containing 'include' and 'exclude' arrays of glob patterns.
61
+ Example: {"frontend": {"include": ["packages/ui/**"], "exclude": []}, "docs": {"include": ["docs/**"], "exclude": []}}.
62
+ DO NOT add any conversational text, introductory sentences, or explanations. Your entire response must be ONLY the JSON object.`;
63
+
64
+ spinner.text = 'Asking AI to analyze directory tree and detect profiles...';
65
+ const aiResponseObject = await askClaude(prompt, { taskSize: allFiles.length });
66
+ const rawText = aiResponseObject.result;
67
+
68
+ if (!rawText || typeof rawText.replace !== 'function') {
69
+ throw new Error(`AI returned invalid content type: ${typeof rawText}`);
70
+ }
71
+
72
+ spinner.text = 'Saving generated profiles...';
73
+ const cleanedJson = extractJson(rawText);
74
+ let parsedProfiles;
75
+ try {
76
+ parsedProfiles = JSON.parse(cleanedJson);
77
+ } catch (e) {
78
+ console.error('\nInvalid JSON received from AI:', cleanedJson);
79
+ throw new Error(`AI returned invalid JSON: ${e.message}`);
80
+ }
81
+
82
+ const outputPath = path.join(repoPath, '.eck', 'profiles.json');
83
+ await fs.writeFile(outputPath, JSON.stringify(parsedProfiles, null, 2));
84
+
85
+ const profileKeys = Object.keys(parsedProfiles);
86
+ spinner.succeed(`Successfully detected and saved ${profileKeys.length} profiles to ${outputPath}`);
87
+
88
+ console.log('\n✨ Detected Profiles:');
89
+ console.log('---------------------------');
90
+ for (const profileName of profileKeys) {
91
+ console.log(` - ${profileName}`);
92
+ }
93
+ console.log('\nYou can now use these profile names with the --profile flag.');
94
+
95
+ } catch (error) {
96
+ spinner.fail(`Failed to detect profiles: ${error.message}`);
97
+ }
98
+ }