@xelth/eck-snapshot 4.0.0 → 4.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,17 +8,21 @@ import zlib from 'zlib';
8
8
  import { promisify } from 'util';
9
9
  import ora from 'ora';
10
10
  import micromatch from 'micromatch';
11
+ import chalk from 'chalk';
11
12
 
12
13
  import {
13
- parseSize, formatSize, matchesPattern, checkGitRepository,
14
- scanDirectoryRecursively, loadGitignore, readFileWithSizeCheck,
14
+ parseSize, formatSize, matchesPattern, checkGitRepository,
15
+ scanDirectoryRecursively, loadGitignore, readFileWithSizeCheck,
15
16
  generateDirectoryTree, loadConfig, displayProjectInfo, loadProjectEckManifest,
16
- ensureSnapshotsInGitignore, initializeEckManifest
17
+ ensureSnapshotsInGitignore, initializeEckManifest, generateTimestamp,
18
+ SecretScanner
17
19
  } from '../../utils/fileUtils.js';
18
20
  import { detectProjectType, getProjectSpecificFiltering } from '../../utils/projectDetector.js';
19
21
  import { estimateTokensWithPolynomial, generateTrainingCommand } from '../../utils/tokenEstimator.js';
20
22
  import { loadSetupConfig, getProfile } from '../../config.js';
21
23
  import { applyProfileFilter } from '../../utils/fileUtils.js';
24
+ import { saveGitAnchor } from '../../utils/gitUtils.js';
25
+ import { skeletonize } from '../../core/skeletonizer.js';
22
26
 
23
27
  /**
24
28
  * Creates dynamic project context based on detection results
@@ -30,13 +34,13 @@ function createDynamicProjectContext(detection) {
30
34
  type: type,
31
35
  detectedAt: new Date().toISOString()
32
36
  };
33
-
37
+
34
38
  // Create architecture info based on project type
35
39
  const architecture = {
36
40
  stack: [],
37
41
  structure: type
38
42
  };
39
-
43
+
40
44
  switch (type) {
41
45
  case 'android':
42
46
  architecture.stack = ['Android', details.language || 'Java', 'Gradle'];
@@ -44,7 +48,7 @@ function createDynamicProjectContext(detection) {
44
48
  context.packageName = details.packageName;
45
49
  }
46
50
  break;
47
-
51
+
48
52
  case 'nodejs':
49
53
  architecture.stack = ['Node.js'];
50
54
  if (details.framework) {
@@ -54,7 +58,7 @@ function createDynamicProjectContext(detection) {
54
58
  architecture.stack.push('TypeScript');
55
59
  }
56
60
  break;
57
-
61
+
58
62
  case 'nodejs-monorepo':
59
63
  architecture.stack = ['Node.js', 'Monorepo'];
60
64
  if (details.monorepoTool) {
@@ -67,7 +71,7 @@ function createDynamicProjectContext(detection) {
67
71
  architecture.stack.push('TypeScript');
68
72
  }
69
73
  break;
70
-
74
+
71
75
  case 'python-poetry':
72
76
  case 'python-pip':
73
77
  case 'python-conda':
@@ -76,64 +80,130 @@ function createDynamicProjectContext(detection) {
76
80
  architecture.stack.push(details.packageManager);
77
81
  }
78
82
  break;
79
-
83
+
80
84
  case 'django':
81
85
  architecture.stack = ['Python', 'Django'];
82
86
  break;
83
-
87
+
84
88
  case 'flask':
85
89
  architecture.stack = ['Python', 'Flask'];
86
90
  break;
87
-
91
+
88
92
  case 'rust':
89
93
  architecture.stack = ['Rust', 'Cargo'];
90
94
  if (details.edition) {
91
95
  architecture.stack.push(`Rust ${details.edition}`);
92
96
  }
93
97
  break;
94
-
98
+
95
99
  case 'go':
96
100
  architecture.stack = ['Go'];
97
101
  if (details.goVersion) {
98
102
  architecture.stack.push(`Go ${details.goVersion}`);
99
103
  }
100
104
  break;
101
-
105
+
102
106
  case 'dotnet':
103
107
  architecture.stack = ['.NET'];
104
108
  if (details.language) {
105
109
  architecture.stack.push(details.language);
106
110
  }
107
111
  break;
108
-
112
+
109
113
  case 'flutter':
110
114
  architecture.stack = ['Flutter', 'Dart'];
111
115
  break;
112
-
116
+
113
117
  case 'react-native':
114
118
  architecture.stack = ['React Native', 'JavaScript'];
115
119
  if (details.hasTypescript) {
116
120
  architecture.stack.push('TypeScript');
117
121
  }
118
122
  break;
119
-
123
+
120
124
  default:
121
125
  architecture.stack = ['Unknown'];
122
126
  }
123
-
127
+
124
128
  context.architecture = architecture;
125
-
129
+
126
130
  return context;
127
131
  }
128
132
  import { generateEnhancedAIHeader } from '../../utils/aiHeader.js';
129
133
 
130
134
  const gzip = promisify(zlib.gzip);
131
135
 
136
+ /**
137
+ * Check if a path is a hidden directory/folder (starts with '.')
138
+ * This excludes all hidden folders like .git, .eck, .claude, .gemini from snapshots
139
+ * @param {string} filePath - File or directory path
140
+ * @returns {boolean} True if path is hidden
141
+ */
142
+ function isHiddenPath(filePath) {
143
+ // Check if path or any parent directory starts with '.'
144
+ const parts = filePath.split('/');
145
+ return parts.some(part => part.startsWith('.'));
146
+ }
147
+
148
+ /**
149
+ * Scans the .eck directory for confidential files
150
+ * @param {string} projectPath - Path to the project
151
+ * @param {object} config - Configuration object
152
+ * @returns {Promise<string[]>} Array of confidential file paths
153
+ */
154
+ async function scanEckForConfidentialFiles(projectPath, config) {
155
+ const eckPath = path.join(projectPath, '.eck');
156
+
157
+ try {
158
+ await fs.access(eckPath);
159
+ } catch {
160
+ return []; // .eck directory doesn't exist
161
+ }
162
+
163
+ const result = await scanDirectoryRecursively(eckPath, config, projectPath, null, true);
164
+ return result.confidentialFiles || [];
165
+ }
166
+
167
+ /**
168
+ * Generates CLAUDE.md content with references to confidential files
169
+ * @param {string[]} confidentialFiles - Array of confidential file paths
170
+ * @param {string} repoPath - Path to the repository
171
+ * @returns {string} CLAUDE.md content
172
+ */
173
+ function generateClaudeMdContent(confidentialFiles, repoPath) {
174
+ const content = [`# Project Access & Credentials Reference`, ``];
175
+
176
+ if (confidentialFiles.length === 0) {
177
+ content.push('No confidential files found in .eck directory.');
178
+ return content.join('\n');
179
+ }
180
+
181
+ content.push('## Access & Credentials');
182
+ content.push('');
183
+ content.push('The following confidential files are available locally but not included in snapshots:');
184
+ content.push('');
185
+
186
+ for (const file of confidentialFiles) {
187
+ const absolutePath = path.join(repoPath, file);
188
+ const fileName = path.basename(file);
189
+ content.push(`- **${fileName}**: \`${absolutePath}\``);
190
+ }
191
+
192
+ content.push('');
193
+ content.push('> **Note**: These files contain sensitive information and should only be accessed when needed.');
194
+ content.push('> They are excluded from snapshots for security reasons but can be referenced on demand.');
195
+
196
+ return content.join('\n');
197
+ }
198
+
132
199
  async function getProjectFiles(projectPath, config) {
133
200
  const isGitRepo = await checkGitRepository(projectPath);
134
201
  if (isGitRepo) {
135
202
  const { stdout } = await execa('git', ['ls-files'], { cwd: projectPath });
136
- return stdout.split('\n').filter(Boolean);
203
+ const gitFiles = stdout.split('\n').filter(Boolean);
204
+ // Filter out hidden directories/files (starting with '.')
205
+ const filteredFiles = gitFiles.filter(file => !isHiddenPath(file));
206
+ return filteredFiles;
137
207
  }
138
208
  return scanDirectoryRecursively(projectPath, config);
139
209
  }
@@ -157,9 +227,9 @@ async function estimateProjectTokens(projectPath, config, projectType = null) {
157
227
  const detection = await detectProjectType(projectPath);
158
228
  projectType = detection.type;
159
229
  }
160
-
230
+
161
231
  const projectSpecific = await getProjectSpecificFiltering(projectType);
162
-
232
+
163
233
  // Merge project-specific filters with global config (same as in scanDirectoryRecursively)
164
234
  const effectiveConfig = {
165
235
  ...config,
@@ -167,59 +237,59 @@ async function estimateProjectTokens(projectPath, config, projectType = null) {
167
237
  filesToIgnore: [...(config.filesToIgnore || []), ...(projectSpecific.filesToIgnore || [])],
168
238
  extensionsToIgnore: [...(config.extensionsToIgnore || []), ...(projectSpecific.extensionsToIgnore || [])]
169
239
  };
170
-
240
+
171
241
  const files = await getProjectFiles(projectPath, effectiveConfig);
172
242
  const gitignore = await loadGitignore(projectPath);
173
243
  const maxFileSize = parseSize(effectiveConfig.maxFileSize);
174
244
  let totalSize = 0;
175
245
  let includedFiles = 0;
176
-
246
+
177
247
  for (const file of files) {
178
248
  try {
179
249
  const normalizedPath = file.replace(/\\/g, '/');
180
-
250
+
181
251
  // Apply the same filtering logic as in runFileSnapshot
182
252
  if (effectiveConfig.dirsToIgnore.some(dir => normalizedPath.startsWith(dir))) {
183
253
  continue;
184
254
  }
185
-
255
+
186
256
  if (gitignore.ignores(normalizedPath)) {
187
257
  continue;
188
258
  }
189
-
259
+
190
260
  if (isBinaryPath(file)) {
191
261
  continue;
192
262
  }
193
-
263
+
194
264
  const fileExtension = path.extname(file);
195
265
  if (effectiveConfig.extensionsToIgnore.includes(fileExtension)) {
196
266
  continue;
197
267
  }
198
-
268
+
199
269
  if (matchesPattern(normalizedPath, effectiveConfig.filesToIgnore)) {
200
270
  continue;
201
271
  }
202
-
272
+
203
273
  const stats = await fs.stat(path.join(projectPath, file));
204
274
  if (stats.size > maxFileSize) {
205
275
  continue;
206
276
  }
207
-
277
+
208
278
  totalSize += stats.size;
209
279
  includedFiles++;
210
280
  } catch (e) { /* ignore errors for estimation */ }
211
281
  }
212
-
282
+
213
283
  // Use adaptive polynomial estimation
214
284
  const estimatedTokens = await estimateTokensWithPolynomial(projectType, totalSize);
215
-
285
+
216
286
  return { estimatedTokens, totalSize, includedFiles };
217
287
  }
218
288
 
219
289
  async function processProjectFiles(repoPath, options, config, projectType = null) {
220
290
  const originalCwd = process.cwd();
221
291
  console.log(`\nšŸ“ø Processing files for: ${path.basename(repoPath)}`);
222
-
292
+
223
293
  const stats = {
224
294
  totalFiles: 0,
225
295
  includedFiles: 0,
@@ -229,6 +299,7 @@ async function processProjectFiles(repoPath, options, config, projectType = null
229
299
  ignoredFiles: 0,
230
300
  totalSize: 0,
231
301
  processedSize: 0,
302
+ secretsRedacted: 0,
232
303
  errors: [],
233
304
  skipReasons: new Map(),
234
305
  skippedFilesDetails: new Map()
@@ -236,10 +307,15 @@ async function processProjectFiles(repoPath, options, config, projectType = null
236
307
 
237
308
  try {
238
309
  process.chdir(repoPath);
239
-
310
+
240
311
  console.log('šŸ” Scanning repository...');
241
312
  let allFiles = await getProjectFiles(repoPath, config);
242
313
 
314
+ // Filter the raw file list immediately so ignored files don't show up in the Tree
315
+ if (config.filesToIgnore && config.filesToIgnore.length > 0) {
316
+ allFiles = allFiles.filter(file => !matchesPattern(file, config.filesToIgnore));
317
+ }
318
+
243
319
  if (options.profile) {
244
320
  console.log(`Applying profile filter: '${options.profile}'...`);
245
321
  allFiles = await applyProfileFilter(allFiles, options.profile, repoPath);
@@ -250,9 +326,9 @@ async function processProjectFiles(repoPath, options, config, projectType = null
250
326
  }
251
327
  const gitignore = await loadGitignore(repoPath);
252
328
  stats.totalFiles = allFiles.length;
253
-
329
+
254
330
  console.log(`šŸ“Š Found ${stats.totalFiles} files`);
255
-
331
+
256
332
  const progressBar = new SingleBar({
257
333
  format: 'šŸ“„ Processing |{bar}| {percentage}% | {value}/{total} files | {filename}',
258
334
  barCompleteChar: '\u2588',
@@ -260,7 +336,7 @@ async function processProjectFiles(repoPath, options, config, projectType = null
260
336
  hideCursor: true
261
337
  }, Presets.rect);
262
338
  progressBar.start(allFiles.length, 0);
263
-
339
+
264
340
  const trackSkippedFile = (filePath, reason) => {
265
341
  if (!stats.skippedFilesDetails.has(reason)) {
266
342
  stats.skippedFilesDetails.set(reason, []);
@@ -268,34 +344,41 @@ async function processProjectFiles(repoPath, options, config, projectType = null
268
344
  stats.skippedFilesDetails.get(reason).push(filePath);
269
345
  stats.skipReasons.set(reason, (stats.skipReasons.get(reason) || 0) + 1);
270
346
  };
271
-
347
+
272
348
  const limit = pLimit(config.concurrency);
273
349
  const processFile = async (filePath, index) => {
274
350
  const normalizedPath = filePath.replace(/\\/g, '/');
275
351
  progressBar.update(index + 1, { filename: normalizedPath.slice(0, 50) });
276
-
352
+
277
353
  try {
354
+ // Skip all hidden directories and files (starting with '.')
355
+ if (isHiddenPath(normalizedPath)) {
356
+ stats.ignoredFiles++;
357
+ trackSkippedFile(normalizedPath, 'Hidden directories/files');
358
+ return null;
359
+ }
360
+
278
361
  // Check if file should be ignored by directory patterns
279
362
  if (config.dirsToIgnore.some(dir => normalizedPath.startsWith(dir))) {
280
363
  stats.ignoredFiles++;
281
364
  trackSkippedFile(normalizedPath, 'Directory ignore patterns');
282
365
  return null;
283
366
  }
284
-
367
+
285
368
  // Check gitignore patterns
286
369
  if (gitignore.ignores(normalizedPath)) {
287
370
  stats.ignoredFiles++;
288
371
  trackSkippedFile(normalizedPath, 'Gitignore rules');
289
372
  return null;
290
373
  }
291
-
374
+
292
375
  // Check if binary file
293
376
  if (isBinaryPath(filePath)) {
294
377
  stats.binaryFiles++;
295
378
  trackSkippedFile(normalizedPath, 'Binary files');
296
379
  return null;
297
380
  }
298
-
381
+
299
382
  // Check extensions and file patterns
300
383
  const fileExtension = path.extname(filePath);
301
384
  if (config.extensionsToIgnore.includes(fileExtension)) {
@@ -309,22 +392,43 @@ async function processProjectFiles(repoPath, options, config, projectType = null
309
392
  trackSkippedFile(normalizedPath, 'File pattern filter');
310
393
  return null;
311
394
  }
312
-
395
+
313
396
  // Read file with size check
314
397
  const fullPath = path.join(repoPath, filePath);
315
398
  const fileStats = await fs.stat(fullPath);
316
399
  stats.totalSize += fileStats.size;
317
-
400
+
318
401
  const maxFileSize = parseSize(config.maxFileSize);
319
402
  if (fileStats.size > maxFileSize) {
320
403
  stats.oversizedFiles++;
321
404
  trackSkippedFile(normalizedPath, `File too large (${formatSize(fileStats.size)} > ${formatSize(maxFileSize)})`);
322
405
  return null;
323
406
  }
324
-
325
- const content = await readFileWithSizeCheck(fullPath, maxFileSize);
407
+
408
+ let content = await readFileWithSizeCheck(fullPath, maxFileSize);
409
+
410
+ // Security scan for secrets
411
+ if (config.security?.scanForSecrets !== false) {
412
+ const scanResult = SecretScanner.redact(content, normalizedPath);
413
+ if (scanResult.found.length > 0) {
414
+ stats.secretsRedacted += scanResult.found.length;
415
+ console.log(chalk.yellow(`\n āš ļø Security: Found ${scanResult.found.join(', ')} in ${normalizedPath}. Redacting...`));
416
+ content = scanResult.content;
417
+ }
418
+ }
419
+
326
420
  stats.includedFiles++;
327
421
  stats.processedSize += fileStats.size;
422
+
423
+ // Apply skeletonization if enabled
424
+ if (options.skeleton) {
425
+ // Check if file should be focused (kept full)
426
+ const isFocused = options.focus && micromatch.isMatch(normalizedPath, options.focus);
427
+ if (!isFocused) {
428
+ content = await skeletonize(content, normalizedPath);
429
+ }
430
+ }
431
+
328
432
  let outputBody = content;
329
433
 
330
434
  // Apply max-lines-per-file truncation if specified
@@ -350,7 +454,7 @@ async function processProjectFiles(repoPath, options, config, projectType = null
350
454
 
351
455
  const results = await Promise.all(allFiles.map((fp, index) => limit(() => processFile(fp, index))));
352
456
  progressBar.stop();
353
-
457
+
354
458
  const successfulFileObjects = results.filter(Boolean);
355
459
  const contentArray = successfulFileObjects.map(f => f.content);
356
460
 
@@ -363,7 +467,7 @@ async function processProjectFiles(repoPath, options, config, projectType = null
363
467
  originalCwd,
364
468
  repoPath
365
469
  };
366
-
470
+
367
471
  } finally {
368
472
  process.chdir(originalCwd); // Ensure we always change back
369
473
  }
@@ -374,7 +478,7 @@ export async function createRepoSnapshot(repoPath, options) {
374
478
  try {
375
479
  // Ensure snapshots/ is in .gitignore to prevent accidental commits
376
480
  await ensureSnapshotsInGitignore(repoPath);
377
-
481
+
378
482
  // Initialize .eck manifest directory if it doesn't exist
379
483
  await initializeEckManifest(repoPath);
380
484
 
@@ -387,8 +491,8 @@ export async function createRepoSnapshot(repoPath, options) {
387
491
  if (status) {
388
492
  spinner.text = 'Unstaged changes detected. Auto-committing...';
389
493
  await execa('git', ['add', '.'], { cwd: repoPath });
390
- const timestamp = new Date().toISOString().slice(0, 19).replace('T', '_').replace(/:/g, '-');
391
- await execa('git', ['commit', '-m', `chore(snapshot): Auto-commit before snapshot [${timestamp}]`], { cwd: repoPath });
494
+ const commitTimestamp = generateTimestamp();
495
+ await execa('git', ['commit', '-m', `chore(snapshot): Auto-commit before snapshot [${commitTimestamp}]`], { cwd: repoPath });
392
496
  spinner.info('Auto-commit complete.');
393
497
  } else {
394
498
  // No changes, do nothing. Logging this would be too verbose.
@@ -398,20 +502,20 @@ export async function createRepoSnapshot(repoPath, options) {
398
502
  }
399
503
  }
400
504
  spinner.text = 'Analyzing project...'; // Reset spinner text
401
-
505
+
402
506
  // Detect project type first
403
507
  const projectDetection = await detectProjectType(repoPath);
404
508
  spinner.stop();
405
509
  displayProjectInfo(projectDetection);
406
-
510
+
407
511
  const setupConfig = await loadSetupConfig();
408
512
  const userConfig = await loadConfig(options.config);
409
-
513
+
410
514
  // Update project context based on detection
411
515
  if (projectDetection.type !== 'unknown' && projectDetection.details) {
412
516
  setupConfig.projectContext = createDynamicProjectContext(projectDetection);
413
517
  }
414
-
518
+
415
519
  // Merge configs: setup.json base, user overrides, command options
416
520
  const config = {
417
521
  ...userConfig, // Start with old defaults
@@ -421,7 +525,16 @@ export async function createRepoSnapshot(repoPath, options) {
421
525
  aiHeaderEnabled: setupConfig.aiInstructions?.header?.defaultEnabled ?? true,
422
526
  ...options // Command-line options have the final say
423
527
  };
424
-
528
+
529
+ // If NOT in Junior Architect mode, hide JA-specific documentation to prevent context pollution
530
+ if (!options.withJa) {
531
+ if (!config.filesToIgnore) config.filesToIgnore = [];
532
+ config.filesToIgnore.push(
533
+ 'COMMANDS_REFERENCE.md',
534
+ 'codex_delegation_snapshot.md'
535
+ );
536
+ }
537
+
425
538
  // Apply defaults for options that may not be provided via command line
426
539
  if (!config.output) {
427
540
  config.output = setupConfig.output?.defaultPath || './snapshots';
@@ -441,159 +554,186 @@ export async function createRepoSnapshot(repoPath, options) {
441
554
  spinner.info(`Estimated project size: ~${Math.round(estimation.estimatedTokens).toLocaleString()} tokens.`);
442
555
 
443
556
  spinner.succeed('Creating snapshots...');
444
-
445
- // Step 1: Process all files ONCE
446
- const {
447
- stats,
448
- contentArray,
449
- successfulFileObjects,
450
- allFiles,
451
- originalCwd: processingOriginalCwd, // We get originalCwd from the processing function
452
- repoPath: processedRepoPath
453
- } = await processProjectFiles(repoPath, options, config, projectDetection.type);
454
-
455
- const originalCwd = process.cwd(); // Get CWD *before* chdir
456
- process.chdir(processedRepoPath); // Go back to repo path for git hash and tree
457
557
 
458
- try {
459
- // --- Common Data ---
460
- const timestamp = new Date().toISOString().slice(0, 19).replace('T', '_').replace(/:/g, '-');
461
- const repoName = path.basename(processedRepoPath);
462
- const gitHash = await getGitCommitHash(processedRepoPath);
463
- const fileExtension = options.format || config.defaultFormat || 'md';
464
- const outputPath = options.output || path.resolve(originalCwd, config.output);
465
- await fs.mkdir(outputPath, { recursive: true });
466
-
467
- const shouldIncludeTree = config.tree && !options.noTree;
468
- let directoryTree = '';
469
- if (shouldIncludeTree) {
470
- console.log('🌳 Generating directory tree...');
471
- directoryTree = await generateDirectoryTree(processedRepoPath, '', allFiles, 0, config.maxDepth || 10, config);
472
- }
558
+ // Step 1: Process all files ONCE
559
+ const {
560
+ stats,
561
+ contentArray,
562
+ successfulFileObjects,
563
+ allFiles,
564
+ originalCwd: processingOriginalCwd, // We get originalCwd from the processing function
565
+ repoPath: processedRepoPath
566
+ } = await processProjectFiles(repoPath, options, config, projectDetection.type);
473
567
 
474
- // Calculate included file stats by extension
475
- const includedFilesByType = new Map();
476
- for (const fileObj of successfulFileObjects) {
477
- try {
478
- let ext = path.extname(fileObj.path);
479
- if (ext === '') ext = '.no-extension';
480
- includedFilesByType.set(ext, (includedFilesByType.get(ext) || 0) + 1);
481
- } catch (e) { /* Silently ignore */ }
482
- }
483
- const sortedIncludedStats = [...includedFilesByType.entries()].sort((a, b) => b[1] - a[1]);
484
-
485
- // Calculate Top 10 Largest Files
486
- const largestFiles = [...successfulFileObjects].sort((a, b) => b.size - a.size).slice(0, 10);
487
-
488
- const fileBody = (directoryTree ? `\n## Directory Structure\n\n\`\`\`\n${directoryTree}\`\`\`\n\n` : '') + contentArray.join('');
489
-
490
- // --- File 1: Architect Snapshot ---
491
- const architectOptions = { ...options, agent: false };
492
- // Load manifest for headers
493
- const eckManifest = await loadProjectEckManifest(processedRepoPath);
494
- const isGitRepo = await checkGitRepository(processedRepoPath);
495
-
496
- const architectHeader = await generateEnhancedAIHeader({ stats, repoName, mode: 'file', eckManifest, options: architectOptions, repoPath: processedRepoPath }, isGitRepo);
497
- const architectBaseFilename = `${repoName}_snapshot_${timestamp}${gitHash ? `_${gitHash}` : ''}`;
498
- const architectFilename = `${architectBaseFilename}.${fileExtension}`;
499
- const architectFilePath = path.join(outputPath, architectFilename);
500
- await fs.writeFile(architectFilePath, architectHeader + fileBody);
501
-
502
- // --- File 2: Junior Architect Snapshot ---
503
- let jaFilePath = null;
504
- if (options.withJa && fileExtension === 'md') { // Only create JA snapshot if requested and main is MD
505
- console.log('šŸ–‹ļø Generating Junior Architect (_ja) snapshot...');
506
- const jaOptions = { ...options, agent: true, noTree: false, noAiHeader: false };
507
- const jaHeader = await generateEnhancedAIHeader({ stats, repoName, mode: 'file', eckManifest, options: jaOptions, repoPath: processedRepoPath }, isGitRepo);
508
- const jaFilename = `${architectBaseFilename}_ja.${fileExtension}`;
509
- jaFilePath = path.join(outputPath, jaFilename);
510
- await fs.writeFile(jaFilePath, jaHeader + fileBody);
511
- }
568
+ const originalCwd = process.cwd(); // Get CWD *before* chdir
569
+ process.chdir(processedRepoPath); // Go back to repo path for git hash and tree
512
570
 
513
- // --- Combined Report ---
514
- console.log('\nāœ… Snapshot generation complete!');
515
- console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
516
- console.log(`šŸ“„ Architect File: ${architectFilePath}`);
517
- if (jaFilePath) {
518
- console.log(`šŸ“„ Junior Arch File: ${jaFilePath}`);
519
- }
520
- console.log(`šŸ“Š Files processed: ${stats.includedFiles}/${stats.totalFiles}`);
521
- console.log(`šŸ“ Total size: ${formatSize(stats.totalSize)}`);
522
- console.log(`šŸ“¦ Processed size: ${formatSize(stats.processedSize)}`);
523
- console.log(`šŸ“‹ Format: ${fileExtension.toUpperCase()}`);
524
-
525
- if (sortedIncludedStats.length > 0) {
526
- console.log('\nšŸ“¦ Included File Types:');
527
- console.log('---------------------------------');
528
- for (const [ext, count] of sortedIncludedStats.slice(0, 10)) {
529
- console.log(` - ${String(ext).padEnd(15)} ${String(count).padStart(5)} files`);
530
- }
531
- if (sortedIncludedStats.length > 10) {
532
- console.log(` ... and ${sortedIncludedStats.length - 10} other types.`);
533
- }
534
- }
571
+ try {
572
+ // --- Common Data ---
573
+ const timestamp = generateTimestamp();
574
+ const repoName = path.basename(processedRepoPath);
575
+ const gitHash = await getGitCommitHash(processedRepoPath);
576
+ const fileExtension = options.format || config.defaultFormat || 'md';
577
+ const outputPath = options.output || path.resolve(originalCwd, config.output);
578
+ await fs.mkdir(outputPath, { recursive: true });
535
579
 
536
- if (largestFiles.length > 0) {
537
- console.log('\n🐘 Top 10 Largest Files (Included):');
538
- console.log('---------------------------------');
539
- for (const fileObj of largestFiles) {
540
- console.log(` - ${String(formatSize(fileObj.size)).padEnd(15)} ${fileObj.path}`);
541
- }
542
- }
543
-
544
- // Excluded/Skipped Files Section
545
- const hasExcludedContent = stats.excludedFiles > 0 || stats.binaryFiles > 0 || stats.oversizedFiles > 0 || stats.ignoredFiles > 0 || stats.errors.length > 0;
546
- if (hasExcludedContent) {
547
- console.log('\n🚫 Excluded/Skipped Files:');
548
- console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
549
- }
550
-
551
- if (stats.excludedFiles > 0) {
552
- console.log(`🚫 Excluded files: ${stats.excludedFiles}`);
553
- }
554
- if (stats.binaryFiles > 0) {
555
- console.log(`šŸ“± Binary files skipped: ${stats.binaryFiles}`);
556
- }
557
- if (stats.oversizedFiles > 0) {
558
- console.log(`šŸ“ Oversized files skipped: ${stats.oversizedFiles}`);
580
+ const shouldIncludeTree = config.tree && !options.noTree;
581
+ let directoryTree = '';
582
+ if (shouldIncludeTree) {
583
+ console.log('🌳 Generating directory tree...');
584
+ directoryTree = await generateDirectoryTree(processedRepoPath, '', allFiles, 0, config.maxDepth || 10, config);
585
+ }
586
+
587
+ // Calculate included file stats by extension
588
+ const includedFilesByType = new Map();
589
+ for (const fileObj of successfulFileObjects) {
590
+ try {
591
+ let ext = path.extname(fileObj.path);
592
+ if (ext === '') ext = '.no-extension';
593
+ includedFilesByType.set(ext, (includedFilesByType.get(ext) || 0) + 1);
594
+ } catch (e) { /* Silently ignore */ }
595
+ }
596
+ const sortedIncludedStats = [...includedFilesByType.entries()].sort((a, b) => b[1] - a[1]);
597
+
598
+ // Calculate Top 10 Largest Files
599
+ const largestFiles = [...successfulFileObjects].sort((a, b) => b.size - a.size).slice(0, 10);
600
+
601
+ const fileBody = (directoryTree ? `\n## Directory Structure\n\n\`\`\`\n${directoryTree}\`\`\`\n\n` : '') + contentArray.join('');
602
+
603
+ // --- File 1: Architect Snapshot ---
604
+ const architectOptions = { ...options, agent: false };
605
+ // Load manifest for headers
606
+ const eckManifest = await loadProjectEckManifest(processedRepoPath);
607
+ const isGitRepo = await checkGitRepository(processedRepoPath);
608
+
609
+ const architectHeader = await generateEnhancedAIHeader({ stats, repoName, mode: 'file', eckManifest, options: architectOptions, repoPath: processedRepoPath }, isGitRepo);
610
+ let architectBaseFilename = `${repoName}_snapshot_${timestamp}${gitHash ? `_${gitHash}` : ''}`;
611
+
612
+ // Add '_sk' suffix for skeleton mode snapshots
613
+ if (options.skeleton) {
614
+ architectBaseFilename += '_sk';
615
+ }
616
+
617
+ const architectFilename = `${architectBaseFilename}.${fileExtension}`;
618
+ const architectFilePath = path.join(outputPath, architectFilename);
619
+ await fs.writeFile(architectFilePath, architectHeader + fileBody);
620
+
621
+ // --- File 2: Junior Architect Snapshot ---
622
+ let jaFilePath = null;
623
+ if (options.withJa && fileExtension === 'md') { // Only create JA snapshot if requested and main is MD
624
+ console.log('šŸ–‹ļø Generating Junior Architect (_ja) snapshot...');
625
+ const jaOptions = { ...options, agent: true, noTree: false, noAiHeader: false };
626
+ const jaHeader = await generateEnhancedAIHeader({ stats, repoName, mode: 'file', eckManifest, options: jaOptions, repoPath: processedRepoPath }, isGitRepo);
627
+ const jaFilename = `${architectBaseFilename}_ja.${fileExtension}`;
628
+ jaFilePath = path.join(outputPath, jaFilename);
629
+ await fs.writeFile(jaFilePath, jaHeader + fileBody);
630
+ }
631
+
632
+ // Save git anchor for future delta updates
633
+ await saveGitAnchor(processedRepoPath);
634
+
635
+ // --- Generate CLAUDE.md with confidential file references ---
636
+ console.log('šŸ” Scanning for confidential files in .eck directory...');
637
+ const confidentialFiles = await scanEckForConfidentialFiles(processedRepoPath, config);
638
+
639
+ if (confidentialFiles.length > 0) {
640
+ const claudeMdContent = generateClaudeMdContent(confidentialFiles, processedRepoPath);
641
+ const claudeMdPath = path.join(processedRepoPath, 'CLAUDE.md');
642
+ await fs.writeFile(claudeMdPath, claudeMdContent);
643
+ console.log(`šŸ“ Generated CLAUDE.md with ${confidentialFiles.length} confidential file reference(s)`);
644
+ }
645
+
646
+ // --- Combined Report ---
647
+ console.log('\nāœ… Snapshot generation complete!');
648
+ console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
649
+ console.log(`šŸ“„ Architect File: ${architectFilePath}`);
650
+ if (jaFilePath) {
651
+ console.log(`šŸ“„ Junior Arch File: ${jaFilePath}`);
652
+ }
653
+ console.log(`šŸ“Š Files processed: ${stats.includedFiles}/${stats.totalFiles}`);
654
+ console.log(`šŸ“ Total size: ${formatSize(stats.totalSize)}`);
655
+ console.log(`šŸ“¦ Processed size: ${formatSize(stats.processedSize)}`);
656
+ console.log(`šŸ“‹ Format: ${fileExtension.toUpperCase()}`);
657
+
658
+ if (sortedIncludedStats.length > 0) {
659
+ console.log('\nšŸ“¦ Included File Types:');
660
+ console.log('---------------------------------');
661
+ for (const [ext, count] of sortedIncludedStats.slice(0, 10)) {
662
+ console.log(` - ${String(ext).padEnd(15)} ${String(count).padStart(5)} files`);
559
663
  }
560
- if (stats.ignoredFiles > 0) {
561
- console.log(`šŸ™ˆ Ignored files: ${stats.ignoredFiles}`);
664
+ if (sortedIncludedStats.length > 10) {
665
+ console.log(` ... and ${sortedIncludedStats.length - 10} other types.`);
562
666
  }
563
- if (stats.errors.length > 0) {
564
- console.log(`āŒ Errors: ${stats.errors.length}`);
565
- if (options.verbose) {
566
- stats.errors.forEach(err => console.log(` ${err}`));
567
- }
667
+ }
668
+
669
+ if (largestFiles.length > 0) {
670
+ console.log('\n🐘 Top 10 Largest Files (Included):');
671
+ console.log('---------------------------------');
672
+ for (const fileObj of largestFiles) {
673
+ console.log(` - ${String(formatSize(fileObj.size)).padEnd(15)} ${fileObj.path}`);
568
674
  }
569
-
570
- // Print detailed skip reasons report
571
- if (stats.skippedFilesDetails.size > 0) {
572
- console.log('\nšŸ“‹ Skip Reasons:');
573
- console.log('---------------------------------');
574
-
575
- for (const [reason, files] of stats.skippedFilesDetails.entries()) {
576
- console.log(`\nšŸ”ø ${reason} (${files.length} files):`);
577
- files.forEach(file => {
578
- console.log(` • ${file}`);
579
- });
580
- }
581
- console.log('---------------------------------');
582
- } else {
583
- console.log('---------------------------------');
675
+ }
676
+
677
+ // Security Report Section
678
+ if (stats.secretsRedacted > 0) {
679
+ console.log('\nšŸ” Security:');
680
+ console.log('---------------------------------');
681
+ console.log(chalk.yellow(` āš ļø ${stats.secretsRedacted} secret(s) detected and redacted`));
682
+ }
683
+
684
+ // Excluded/Skipped Files Section
685
+ const hasExcludedContent = stats.excludedFiles > 0 || stats.binaryFiles > 0 || stats.oversizedFiles > 0 || stats.ignoredFiles > 0 || stats.errors.length > 0;
686
+ if (hasExcludedContent) {
687
+ console.log('\n🚫 Excluded/Skipped Files:');
688
+ console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
689
+ }
690
+
691
+ if (stats.excludedFiles > 0) {
692
+ console.log(`🚫 Excluded files: ${stats.excludedFiles}`);
693
+ }
694
+ if (stats.binaryFiles > 0) {
695
+ console.log(`šŸ“± Binary files skipped: ${stats.binaryFiles}`);
696
+ }
697
+ if (stats.oversizedFiles > 0) {
698
+ console.log(`šŸ“ Oversized files skipped: ${stats.oversizedFiles}`);
699
+ }
700
+ if (stats.ignoredFiles > 0) {
701
+ console.log(`šŸ™ˆ Ignored files: ${stats.ignoredFiles}`);
702
+ }
703
+ if (stats.errors.length > 0) {
704
+ console.log(`āŒ Errors: ${stats.errors.length}`);
705
+ if (options.verbose) {
706
+ stats.errors.forEach(err => console.log(` ${err}`));
584
707
  }
585
-
586
- // Generate training command string if estimation data is available
587
- if (estimation && projectDetection.type && !options.profile) {
588
- const trainingCommand = generateTrainingCommand(projectDetection.type, estimation.estimatedTokens, estimation.totalSize, repoPath);
589
- console.log('\nšŸŽÆ To improve token estimation accuracy, run this command after checking actual tokens:');
590
- console.log(`${trainingCommand}[ACTUAL_TOKENS_HERE]`);
591
- console.log(' Replace [ACTUAL_TOKENS_HERE] with the real token count from your LLM');
708
+ }
709
+
710
+ // Print detailed skip reasons report
711
+ if (stats.skippedFilesDetails.size > 0) {
712
+ console.log('\nšŸ“‹ Skip Reasons:');
713
+ console.log('---------------------------------');
714
+
715
+ for (const [reason, files] of stats.skippedFilesDetails.entries()) {
716
+ console.log(`\nšŸ”ø ${reason} (${files.length} files):`);
717
+ files.forEach(file => {
718
+ console.log(` • ${file}`);
719
+ });
592
720
  }
721
+ console.log('---------------------------------');
722
+ } else {
723
+ console.log('---------------------------------');
724
+ }
593
725
 
594
- } finally {
595
- process.chdir(originalCwd); // Final reset back to original CWD
726
+ // Generate training command string if estimation data is available
727
+ if (estimation && projectDetection.type && !options.profile) {
728
+ const trainingCommand = generateTrainingCommand(projectDetection.type, estimation.estimatedTokens, estimation.totalSize, repoPath);
729
+ console.log('\nšŸŽÆ To improve token estimation accuracy, run this command after checking actual tokens:');
730
+ console.log(`${trainingCommand}[ACTUAL_TOKENS_HERE]`);
731
+ console.log(' Replace [ACTUAL_TOKENS_HERE] with the real token count from your LLM');
596
732
  }
733
+
734
+ } finally {
735
+ process.chdir(originalCwd); // Final reset back to original CWD
736
+ }
597
737
  } catch (error) {
598
738
  spinner.fail(`Operation failed: ${error.message}`);
599
739
  process.exit(1);