@xelth/eck-snapshot 2.2.0 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +119 -225
- package/index.js +14 -776
- package/package.json +25 -7
- package/setup.json +805 -0
- package/src/cli/cli.js +427 -0
- package/src/cli/commands/askGpt.js +29 -0
- package/src/cli/commands/autoDocs.js +150 -0
- package/src/cli/commands/consilium.js +86 -0
- package/src/cli/commands/createSnapshot.js +601 -0
- package/src/cli/commands/detectProfiles.js +98 -0
- package/src/cli/commands/detectProject.js +112 -0
- package/src/cli/commands/generateProfileGuide.js +91 -0
- package/src/cli/commands/pruneSnapshot.js +106 -0
- package/src/cli/commands/restoreSnapshot.js +173 -0
- package/src/cli/commands/setupGemini.js +149 -0
- package/src/cli/commands/setupGemini.test.js +115 -0
- package/src/cli/commands/trainTokens.js +38 -0
- package/src/config.js +81 -0
- package/src/services/authService.js +20 -0
- package/src/services/claudeCliService.js +621 -0
- package/src/services/claudeCliService.test.js +267 -0
- package/src/services/dispatcherService.js +33 -0
- package/src/services/gptService.js +302 -0
- package/src/services/gptService.test.js +120 -0
- package/src/templates/agent-prompt.template.md +29 -0
- package/src/templates/architect-prompt.template.md +50 -0
- package/src/templates/envScanRequest.md +4 -0
- package/src/templates/gitWorkflow.md +32 -0
- package/src/templates/multiAgent.md +164 -0
- package/src/templates/vectorMode.md +22 -0
- package/src/utils/aiHeader.js +303 -0
- package/src/utils/fileUtils.js +928 -0
- package/src/utils/projectDetector.js +704 -0
- package/src/utils/tokenEstimator.js +198 -0
- package/.ecksnapshot.config.js +0 -35
package/index.js
CHANGED
|
@@ -1,776 +1,14 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
import inquirer from 'inquirer';
|
|
16
|
-
|
|
17
|
-
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
18
|
-
const gzip = promisify(zlib.gzip);
|
|
19
|
-
const gunzip = promisify(zlib.gunzip);
|
|
20
|
-
|
|
21
|
-
const DEFAULT_CONFIG = {
|
|
22
|
-
filesToIgnore: ['package-lock.json', '*.log', 'yarn.lock'],
|
|
23
|
-
extensionsToIgnore: ['.sqlite3', '.db', '.DS_Store', '.env', '.pyc', '.class', '.o', '.so', '.dylib'],
|
|
24
|
-
dirsToIgnore: ['node_modules/', '.git/', 'dist/', 'build/', '.next/', '.nuxt/', 'target/', 'bin/', 'obj/'],
|
|
25
|
-
maxFileSize: '10MB',
|
|
26
|
-
maxTotalSize: '100MB',
|
|
27
|
-
maxDepth: 10,
|
|
28
|
-
concurrency: 10
|
|
29
|
-
};
|
|
30
|
-
|
|
31
|
-
// ... (остальные существующие функции: parseSize, formatSize, matchesPattern, loadConfig, и т.д. остаются без изменений)
|
|
32
|
-
// --- НАЧАЛО СУЩЕСТВУЮЩИХ ФУНКЦИЙ ---
|
|
33
|
-
|
|
34
|
-
function parseSize(sizeStr) {
|
|
35
|
-
const units = { B: 1, KB: 1024, MB: 1024 ** 2, GB: 1024 ** 3 };
|
|
36
|
-
const match = sizeStr.match(/^(\d+(?:\.\d+)?)\s*(B|KB|MB|GB)?$/i);
|
|
37
|
-
if (!match) throw new Error(`Invalid size format: ${sizeStr}`);
|
|
38
|
-
const [, size, unit = 'B'] = match;
|
|
39
|
-
return Math.floor(parseFloat(size) * units[unit.toUpperCase()]);
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
function formatSize(bytes) {
|
|
43
|
-
const units = ['B', 'KB', 'MB', 'GB'];
|
|
44
|
-
let size = bytes;
|
|
45
|
-
let unitIndex = 0;
|
|
46
|
-
while (size >= 1024 && unitIndex < units.length - 1) {
|
|
47
|
-
size /= 1024;
|
|
48
|
-
unitIndex++;
|
|
49
|
-
}
|
|
50
|
-
return `${size.toFixed(1)} ${units[unitIndex]}`;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
function matchesPattern(filePath, patterns) {
|
|
54
|
-
const fileName = path.basename(filePath);
|
|
55
|
-
return patterns.some(pattern => {
|
|
56
|
-
const regexPattern = '^' + pattern.replace(/[.+?^${}()|[\]\\]/g, '\\$&').replace(/\*/g, '.*') + '$';
|
|
57
|
-
try {
|
|
58
|
-
const regex = new RegExp(regexPattern);
|
|
59
|
-
return regex.test(fileName);
|
|
60
|
-
} catch (e) {
|
|
61
|
-
console.warn(`⚠️ Invalid regex pattern in config: "${pattern}"`);
|
|
62
|
-
return false;
|
|
63
|
-
}
|
|
64
|
-
});
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
async function loadConfig(configPath) {
|
|
69
|
-
let config = { ...DEFAULT_CONFIG };
|
|
70
|
-
|
|
71
|
-
if (configPath) {
|
|
72
|
-
try {
|
|
73
|
-
const configModule = await import(path.resolve(configPath));
|
|
74
|
-
config = { ...config, ...configModule.default };
|
|
75
|
-
console.log(`✅ Configuration loaded from: ${configPath}`);
|
|
76
|
-
} catch (error) {
|
|
77
|
-
console.warn(`⚠️ Warning: Could not load config file: ${configPath}`);
|
|
78
|
-
}
|
|
79
|
-
} else {
|
|
80
|
-
const possibleConfigs = [
|
|
81
|
-
'.ecksnapshot.config.js',
|
|
82
|
-
'.ecksnapshot.config.mjs',
|
|
83
|
-
'ecksnapshot.config.js'
|
|
84
|
-
];
|
|
85
|
-
|
|
86
|
-
for (const configFile of possibleConfigs) {
|
|
87
|
-
try {
|
|
88
|
-
await fs.access(configFile);
|
|
89
|
-
const configModule = await import(path.resolve(configFile));
|
|
90
|
-
config = { ...config, ...configModule.default };
|
|
91
|
-
console.log(`✅ Configuration loaded from: ${configFile}`);
|
|
92
|
-
break;
|
|
93
|
-
} catch {
|
|
94
|
-
// Config file doesn't exist, continue
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
return config;
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
async function checkGitAvailability() {
|
|
103
|
-
try {
|
|
104
|
-
await execa('git', ['--version']);
|
|
105
|
-
} catch (error) {
|
|
106
|
-
throw new Error('Git is not installed or not available in PATH');
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
async function checkGitRepository(repoPath) {
|
|
111
|
-
try {
|
|
112
|
-
await execa('git', ['rev-parse', '--git-dir'], { cwd: repoPath });
|
|
113
|
-
} catch (error) {
|
|
114
|
-
throw new Error(`Not a git repository: ${repoPath}`);
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
async function loadGitignore(repoPath) {
|
|
119
|
-
try {
|
|
120
|
-
const gitignoreContent = await fs.readFile(path.join(repoPath, '.gitignore'), 'utf-8');
|
|
121
|
-
const ig = ignore().add(gitignoreContent);
|
|
122
|
-
console.log('✅ .gitignore patterns loaded');
|
|
123
|
-
return ig;
|
|
124
|
-
} catch {
|
|
125
|
-
console.log('ℹ️ No .gitignore file found or could not be read');
|
|
126
|
-
return ignore();
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
async function readFileWithSizeCheck(filePath, maxFileSize) {
|
|
131
|
-
try {
|
|
132
|
-
const stats = await fs.stat(filePath);
|
|
133
|
-
if (stats.size > maxFileSize) {
|
|
134
|
-
throw new Error(`File too large: ${formatSize(stats.size)}`);
|
|
135
|
-
}
|
|
136
|
-
return await fs.readFile(filePath, 'utf-8');
|
|
137
|
-
} catch (error) {
|
|
138
|
-
if (error.message.includes('too large')) throw error;
|
|
139
|
-
throw new Error(`Could not read file: ${error.message}`);
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
async function generateDirectoryTree(dir, prefix = '', allFiles, depth = 0, maxDepth = 10, config) {
|
|
144
|
-
if (depth > maxDepth) return '';
|
|
145
|
-
|
|
146
|
-
try {
|
|
147
|
-
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
148
|
-
const sortedEntries = entries.sort((a, b) => {
|
|
149
|
-
if (a.isDirectory() && !b.isDirectory()) return -1;
|
|
150
|
-
if (!a.isDirectory() && b.isDirectory()) return 1;
|
|
151
|
-
return a.name.localeCompare(b.name);
|
|
152
|
-
});
|
|
153
|
-
|
|
154
|
-
let tree = '';
|
|
155
|
-
const validEntries = [];
|
|
156
|
-
|
|
157
|
-
for (const entry of sortedEntries) {
|
|
158
|
-
if (config.dirsToIgnore.some(d => entry.name.includes(d.replace('/', '')))) continue;
|
|
159
|
-
|
|
160
|
-
const fullPath = path.join(dir, entry.name);
|
|
161
|
-
const relativePath = path.relative(process.cwd(), fullPath).replace(/\\/g, '/');
|
|
162
|
-
|
|
163
|
-
if (entry.isDirectory() || allFiles.includes(relativePath)) {
|
|
164
|
-
validEntries.push({ entry, fullPath, relativePath });
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
for (let i = 0; i < validEntries.length; i++) {
|
|
169
|
-
const { entry, fullPath, relativePath } = validEntries[i];
|
|
170
|
-
const isLast = i === validEntries.length - 1;
|
|
171
|
-
|
|
172
|
-
const connector = isLast ? '└── ' : '├── ';
|
|
173
|
-
const nextPrefix = prefix + (isLast ? ' ' : '│ ');
|
|
174
|
-
|
|
175
|
-
if (entry.isDirectory()) {
|
|
176
|
-
tree += `${prefix}${connector}${entry.name}/\n`;
|
|
177
|
-
tree += await generateDirectoryTree(fullPath, nextPrefix, allFiles, depth + 1, maxDepth, config);
|
|
178
|
-
} else {
|
|
179
|
-
tree += `${prefix}${connector}${entry.name}\n`;
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
return tree;
|
|
183
|
-
} catch (error) {
|
|
184
|
-
console.warn(`⚠️ Warning: Could not read directory: ${dir}`);
|
|
185
|
-
return '';
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
async function processFile(filePath, config, gitignore, stats) {
|
|
190
|
-
const fileName = path.basename(filePath);
|
|
191
|
-
const fileExt = path.extname(filePath) || 'no-extension';
|
|
192
|
-
const normalizedPath = filePath.replace(/\\/g, '/');
|
|
193
|
-
|
|
194
|
-
let skipReason = null;
|
|
195
|
-
|
|
196
|
-
if (config.dirsToIgnore.some(dir => normalizedPath.startsWith(dir))) {
|
|
197
|
-
skipReason = 'ignored-directory';
|
|
198
|
-
} else if (config.extensionsToIgnore.includes(path.extname(filePath))) {
|
|
199
|
-
skipReason = 'ignored-extension';
|
|
200
|
-
} else if (matchesPattern(filePath, config.filesToIgnore)) {
|
|
201
|
-
skipReason = 'ignored-pattern';
|
|
202
|
-
} else if (gitignore.ignores(normalizedPath)) {
|
|
203
|
-
skipReason = 'gitignore';
|
|
204
|
-
} else if (isBinaryPath(filePath)) {
|
|
205
|
-
skipReason = 'binary-file';
|
|
206
|
-
} else if (!config.includeHidden && fileName.startsWith('.')) {
|
|
207
|
-
skipReason = 'hidden-file';
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
if (skipReason) {
|
|
211
|
-
stats.skippedFiles++;
|
|
212
|
-
stats.skippedFileTypes.set(fileExt, (stats.skippedFileTypes.get(fileExt) || 0) + 1);
|
|
213
|
-
stats.skipReasons.set(skipReason, (stats.skipReasons.get(skipReason) || 0) + 1);
|
|
214
|
-
|
|
215
|
-
if (!stats.skippedFilesDetails.has(skipReason)) {
|
|
216
|
-
stats.skippedFilesDetails.set(skipReason, []);
|
|
217
|
-
}
|
|
218
|
-
stats.skippedFilesDetails.get(skipReason).push({ file: filePath, ext: fileExt });
|
|
219
|
-
|
|
220
|
-
if (skipReason === 'binary-file') stats.binaryFiles++;
|
|
221
|
-
return { skipped: true, reason: skipReason };
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
try {
|
|
225
|
-
const content = await readFileWithSizeCheck(filePath, parseSize(config.maxFileSize));
|
|
226
|
-
const fileContent = `--- File: /${normalizedPath} ---\n\n${content}\n\n`;
|
|
227
|
-
|
|
228
|
-
stats.includedFiles++;
|
|
229
|
-
stats.includedFileTypes.set(fileExt, (stats.includedFileTypes.get(fileExt) || 0) + 1);
|
|
230
|
-
|
|
231
|
-
return { content: fileContent, size: fileContent.length };
|
|
232
|
-
} catch (error) {
|
|
233
|
-
const errorReason = error.message.includes('too large') ? 'file-too-large' : 'read-error';
|
|
234
|
-
|
|
235
|
-
stats.errors.push({ file: filePath, error: error.message });
|
|
236
|
-
stats.skippedFiles++;
|
|
237
|
-
stats.skippedFileTypes.set(fileExt, (stats.skippedFileTypes.get(fileExt) || 0) + 1);
|
|
238
|
-
stats.skipReasons.set(errorReason, (stats.skipReasons.get(errorReason) || 0) + 1);
|
|
239
|
-
|
|
240
|
-
if (!stats.skippedFilesDetails.has(errorReason)) {
|
|
241
|
-
stats.skippedFilesDetails.set(errorReason, []);
|
|
242
|
-
}
|
|
243
|
-
stats.skippedFilesDetails.get(errorReason).push({ file: filePath, ext: fileExt });
|
|
244
|
-
|
|
245
|
-
if (error.message.includes('too large')) {
|
|
246
|
-
stats.largeFiles++;
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
return { skipped: true, reason: error.message };
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
// --- КОНЕЦ СУЩЕСТВУЮЩИХ ФУНКЦИЙ ---
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
// --- ОСНОВНЫЕ ФУНКЦИИ ДЛЯ КОМАНД ---
|
|
256
|
-
|
|
257
|
-
async function createRepoSnapshot(repoPath, options) {
|
|
258
|
-
// ... (эта функция остается без изменений)
|
|
259
|
-
const absoluteRepoPath = path.resolve(repoPath);
|
|
260
|
-
const absoluteOutputPath = path.resolve(options.output);
|
|
261
|
-
const originalCwd = process.cwd();
|
|
262
|
-
|
|
263
|
-
console.log(`🚀 Starting snapshot for repository: ${absoluteRepoPath}`);
|
|
264
|
-
console.log(`📁 Snapshots will be saved to: ${absoluteOutputPath}`);
|
|
265
|
-
|
|
266
|
-
try {
|
|
267
|
-
const config = await loadConfig(options.config);
|
|
268
|
-
config.maxFileSize = options.maxFileSize || config.maxFileSize;
|
|
269
|
-
config.maxTotalSize = options.maxTotalSize || config.maxTotalSize;
|
|
270
|
-
config.maxDepth = options.maxDepth || config.maxDepth;
|
|
271
|
-
config.includeHidden = options.includeHidden || false;
|
|
272
|
-
|
|
273
|
-
await checkGitAvailability();
|
|
274
|
-
await checkGitRepository(absoluteRepoPath);
|
|
275
|
-
|
|
276
|
-
process.chdir(absoluteRepoPath);
|
|
277
|
-
console.log('✅ Successfully changed working directory');
|
|
278
|
-
|
|
279
|
-
const gitignore = await loadGitignore(absoluteRepoPath);
|
|
280
|
-
|
|
281
|
-
console.log('📋 Fetching file list from Git...');
|
|
282
|
-
const { stdout } = await execa('git', ['ls-files']);
|
|
283
|
-
const allFiles = stdout.split('\n').filter(Boolean);
|
|
284
|
-
console.log(`📊 Found ${allFiles.length} total files in the repository`);
|
|
285
|
-
|
|
286
|
-
const stats = {
|
|
287
|
-
totalFiles: allFiles.length,
|
|
288
|
-
includedFiles: 0,
|
|
289
|
-
skippedFiles: 0,
|
|
290
|
-
binaryFiles: 0,
|
|
291
|
-
largeFiles: 0,
|
|
292
|
-
errors: [],
|
|
293
|
-
includedFileTypes: new Map(),
|
|
294
|
-
skippedFileTypes: new Map(),
|
|
295
|
-
skipReasons: new Map(),
|
|
296
|
-
skippedFilesDetails: new Map()
|
|
297
|
-
};
|
|
298
|
-
|
|
299
|
-
let snapshotContent = '';
|
|
300
|
-
|
|
301
|
-
if (options.tree) {
|
|
302
|
-
console.log('🌳 Generating directory tree...');
|
|
303
|
-
const tree = await generateDirectoryTree(absoluteRepoPath, '', allFiles, 0, config.maxDepth, config);
|
|
304
|
-
snapshotContent += 'Directory Structure:\n\n';
|
|
305
|
-
snapshotContent += tree;
|
|
306
|
-
snapshotContent += '\n\n';
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
console.log('📝 Processing files...');
|
|
310
|
-
const limit = pLimit(config.concurrency);
|
|
311
|
-
const progressBar = options.verbose ? null : new SingleBar({
|
|
312
|
-
format: 'Progress |{bar}| {percentage}% | {value}/{total} files | ETA: {eta}s',
|
|
313
|
-
barCompleteChar: '\u2588',
|
|
314
|
-
barIncompleteChar: '\u2591',
|
|
315
|
-
hideCursor: true
|
|
316
|
-
}, Presets.shades_classic);
|
|
317
|
-
|
|
318
|
-
if (progressBar) progressBar.start(allFiles.length, 0);
|
|
319
|
-
|
|
320
|
-
const filePromises = allFiles.map((filePath, index) =>
|
|
321
|
-
limit(async () => {
|
|
322
|
-
const result = await processFile(filePath, config, gitignore, stats);
|
|
323
|
-
|
|
324
|
-
if (progressBar) {
|
|
325
|
-
progressBar.update(index + 1);
|
|
326
|
-
} else if (options.verbose) {
|
|
327
|
-
if (result.skipped) {
|
|
328
|
-
console.log(`⏭️ Skipping: ${filePath} (${result.reason})`);
|
|
329
|
-
} else {
|
|
330
|
-
console.log(`✅ Processed: ${filePath}`);
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
return result;
|
|
335
|
-
})
|
|
336
|
-
);
|
|
337
|
-
|
|
338
|
-
const results = await Promise.allSettled(filePromises);
|
|
339
|
-
if (progressBar) progressBar.stop();
|
|
340
|
-
|
|
341
|
-
const contentArray = [];
|
|
342
|
-
let totalSize = 0;
|
|
343
|
-
const maxTotalSize = parseSize(config.maxTotalSize);
|
|
344
|
-
|
|
345
|
-
for (const result of results) {
|
|
346
|
-
if (result.status === 'rejected') {
|
|
347
|
-
console.warn(`⚠️ Promise rejected: ${result.reason}`);
|
|
348
|
-
continue;
|
|
349
|
-
}
|
|
350
|
-
if (result.value && result.value.content) {
|
|
351
|
-
if (totalSize + result.value.size > maxTotalSize) {
|
|
352
|
-
console.warn(`⚠️ Warning: Approaching size limit. Some files may be excluded.`);
|
|
353
|
-
break;
|
|
354
|
-
}
|
|
355
|
-
contentArray.push(result.value.content);
|
|
356
|
-
totalSize += result.value.size;
|
|
357
|
-
}
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
snapshotContent += contentArray.join('');
|
|
361
|
-
const totalChars = snapshotContent.length;
|
|
362
|
-
const estimatedTokens = Math.round(totalChars / 4);
|
|
363
|
-
|
|
364
|
-
const timestamp = new Date().toISOString().slice(0, 19).replace('T', '_').replace(/:/g, '-');
|
|
365
|
-
const repoName = path.basename(absoluteRepoPath);
|
|
366
|
-
const extension = options.format === 'json' ? 'json' : 'txt';
|
|
367
|
-
let outputFilename = `${repoName}_snapshot_${timestamp}.${extension}`;
|
|
368
|
-
|
|
369
|
-
if (options.compress) {
|
|
370
|
-
outputFilename += '.gz';
|
|
371
|
-
}
|
|
372
|
-
|
|
373
|
-
const fullOutputFilePath = path.join(absoluteOutputPath, outputFilename);
|
|
374
|
-
|
|
375
|
-
let finalContent = snapshotContent;
|
|
376
|
-
if (options.format === 'json') {
|
|
377
|
-
const jsonData = {
|
|
378
|
-
repository: repoName,
|
|
379
|
-
timestamp: new Date().toISOString(),
|
|
380
|
-
stats: {
|
|
381
|
-
...stats,
|
|
382
|
-
includedFileTypes: Object.fromEntries(stats.includedFileTypes),
|
|
383
|
-
skippedFileTypes: Object.fromEntries(stats.skippedFileTypes),
|
|
384
|
-
skipReasons: Object.fromEntries(stats.skipReasons),
|
|
385
|
-
skippedFilesDetails: Object.fromEntries(
|
|
386
|
-
Array.from(stats.skippedFilesDetails.entries()).map(([reason, files]) => [
|
|
387
|
-
reason,
|
|
388
|
-
files.map(({file, ext}) => ({file, ext}))
|
|
389
|
-
])
|
|
390
|
-
)
|
|
391
|
-
},
|
|
392
|
-
content: snapshotContent
|
|
393
|
-
};
|
|
394
|
-
finalContent = JSON.stringify(jsonData, null, 2);
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
await fs.mkdir(absoluteOutputPath, { recursive: true });
|
|
398
|
-
|
|
399
|
-
if (options.compress) {
|
|
400
|
-
const compressed = await gzip(finalContent);
|
|
401
|
-
await fs.writeFile(fullOutputFilePath, compressed);
|
|
402
|
-
} else {
|
|
403
|
-
await fs.writeFile(fullOutputFilePath, finalContent);
|
|
404
|
-
}
|
|
405
|
-
|
|
406
|
-
console.log('\n📊 Snapshot Summary');
|
|
407
|
-
console.log('='.repeat(50));
|
|
408
|
-
console.log(`🎉 Snapshot created successfully!`);
|
|
409
|
-
console.log(`📄 File saved to: ${fullOutputFilePath}`);
|
|
410
|
-
console.log(`📈 Included text files: ${stats.includedFiles} of ${stats.totalFiles}`);
|
|
411
|
-
console.log(`⏭️ Skipped files: ${stats.skippedFiles}`);
|
|
412
|
-
console.log(`🔢 Binary files skipped: ${stats.binaryFiles}`);
|
|
413
|
-
console.log(`📏 Large files skipped: ${stats.largeFiles}`);
|
|
414
|
-
if (options.tree) console.log('🌳 Directory tree included');
|
|
415
|
-
if (options.compress) console.log('🗜️ File compressed with gzip');
|
|
416
|
-
console.log(`📊 Total characters: ${totalChars.toLocaleString('en-US')}`);
|
|
417
|
-
console.log(`🎯 Estimated tokens: ~${estimatedTokens.toLocaleString('en-US')}`);
|
|
418
|
-
console.log(`💾 File size: ${formatSize(totalChars)}`);
|
|
419
|
-
|
|
420
|
-
if (stats.includedFileTypes.size > 0) {
|
|
421
|
-
console.log('\n📋 Included File Types Distribution:');
|
|
422
|
-
const sortedIncludedTypes = Array.from(stats.includedFileTypes.entries())
|
|
423
|
-
.sort(([,a], [,b]) => b - a)
|
|
424
|
-
.slice(0, 10);
|
|
425
|
-
|
|
426
|
-
for (const [ext, count] of sortedIncludedTypes) {
|
|
427
|
-
console.log(` ${ext}: ${count} files`);
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
if (stats.skippedFileTypes.size > 0) {
|
|
432
|
-
console.log('\n⏭️ Skipped File Types Distribution:');
|
|
433
|
-
const sortedSkippedTypes = Array.from(stats.skippedFileTypes.entries())
|
|
434
|
-
.sort(([,a], [,b]) => b - a)
|
|
435
|
-
.slice(0, 10);
|
|
436
|
-
|
|
437
|
-
for (const [ext, count] of sortedSkippedTypes) {
|
|
438
|
-
console.log(` ${ext}: ${count} files`);
|
|
439
|
-
}
|
|
440
|
-
}
|
|
441
|
-
|
|
442
|
-
if (stats.skipReasons.size > 0) {
|
|
443
|
-
console.log('\n📊 Skip Reasons:');
|
|
444
|
-
const sortedReasons = Array.from(stats.skipReasons.entries())
|
|
445
|
-
.sort(([,a], [,b]) => b - a);
|
|
446
|
-
|
|
447
|
-
const reasonLabels = {
|
|
448
|
-
'ignored-directory': 'Ignored directories',
|
|
449
|
-
'ignored-extension': 'Ignored extensions',
|
|
450
|
-
'ignored-pattern': 'Ignored patterns',
|
|
451
|
-
'gitignore': 'Gitignore rules',
|
|
452
|
-
'binary-file': 'Binary files',
|
|
453
|
-
'hidden-file': 'Hidden files',
|
|
454
|
-
'file-too-large': 'Files too large',
|
|
455
|
-
'read-error': 'Read errors'
|
|
456
|
-
};
|
|
457
|
-
|
|
458
|
-
for (const [reason, count] of sortedReasons) {
|
|
459
|
-
const label = reasonLabels[reason] || reason;
|
|
460
|
-
console.log(` ${label}: ${count} files`);
|
|
461
|
-
|
|
462
|
-
if (stats.skippedFilesDetails.has(reason)) {
|
|
463
|
-
const files = stats.skippedFilesDetails.get(reason);
|
|
464
|
-
const filesToShow = files.slice(0, 10);
|
|
465
|
-
|
|
466
|
-
for (const {file, ext} of filesToShow) {
|
|
467
|
-
console.log(` • ${file} (${ext})`);
|
|
468
|
-
}
|
|
469
|
-
|
|
470
|
-
if (files.length > 10) {
|
|
471
|
-
console.log(` ... and ${files.length - 10} more files`);
|
|
472
|
-
}
|
|
473
|
-
}
|
|
474
|
-
console.log();
|
|
475
|
-
}
|
|
476
|
-
}
|
|
477
|
-
|
|
478
|
-
if (stats.errors.length > 0) {
|
|
479
|
-
console.log('\n⚠️ Errors encountered:');
|
|
480
|
-
stats.errors.slice(0, 5).forEach(({ file, error }) => {
|
|
481
|
-
console.log(` ${file}: ${error}`);
|
|
482
|
-
});
|
|
483
|
-
if (stats.errors.length > 5) {
|
|
484
|
-
console.log(` ... and ${stats.errors.length - 5} more errors`);
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
|
|
488
|
-
console.log('='.repeat(50));
|
|
489
|
-
|
|
490
|
-
} catch (error) {
|
|
491
|
-
console.error('\n❌ An error occurred:');
|
|
492
|
-
if (error.code === 'ENOENT' && error.path && error.path.includes('.git')) {
|
|
493
|
-
console.error(`Error: The path "${absoluteRepoPath}" does not seem to be a Git repository.`);
|
|
494
|
-
} else if (error.message.includes('Git is not installed')) {
|
|
495
|
-
console.error('Error: Git is not installed or not available in PATH.');
|
|
496
|
-
console.error('Please install Git and ensure it\'s available in your system PATH.');
|
|
497
|
-
} else if (error.message.includes('Not a git repository')) {
|
|
498
|
-
console.error(error.message);
|
|
499
|
-
console.error('Please run this command from within a Git repository or provide a valid repository path.');
|
|
500
|
-
} else {
|
|
501
|
-
console.error(error.message);
|
|
502
|
-
if (options.verbose) {
|
|
503
|
-
console.error(error.stack);
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
process.exit(1);
|
|
507
|
-
} finally {
|
|
508
|
-
process.chdir(originalCwd);
|
|
509
|
-
}
|
|
510
|
-
}
|
|
511
|
-
|
|
512
|
-
async function restoreSnapshot(snapshotFile, targetDir, options) {
|
|
513
|
-
const absoluteSnapshotPath = path.resolve(snapshotFile);
|
|
514
|
-
const absoluteTargetDir = path.resolve(targetDir);
|
|
515
|
-
|
|
516
|
-
console.log(`🔄 Starting restore from snapshot: ${absoluteSnapshotPath}`);
|
|
517
|
-
console.log(`📁 Target directory: ${absoluteTargetDir}`);
|
|
518
|
-
|
|
519
|
-
try {
|
|
520
|
-
let rawContent;
|
|
521
|
-
if (snapshotFile.endsWith('.gz')) {
|
|
522
|
-
const compressedBuffer = await fs.readFile(absoluteSnapshotPath);
|
|
523
|
-
rawContent = (await gunzip(compressedBuffer)).toString('utf-8');
|
|
524
|
-
console.log('✅ Decompressed gzipped snapshot');
|
|
525
|
-
} else {
|
|
526
|
-
rawContent = await fs.readFile(absoluteSnapshotPath, 'utf-8');
|
|
527
|
-
}
|
|
528
|
-
|
|
529
|
-
let filesToRestore;
|
|
530
|
-
try {
|
|
531
|
-
const jsonData = JSON.parse(rawContent);
|
|
532
|
-
if (jsonData.content) {
|
|
533
|
-
console.log('📄 Detected JSON format, extracting content');
|
|
534
|
-
filesToRestore = parseSnapshotContent(jsonData.content);
|
|
535
|
-
} else {
|
|
536
|
-
throw new Error('JSON format detected, but no "content" key found');
|
|
537
|
-
}
|
|
538
|
-
} catch (e) {
|
|
539
|
-
console.log('📄 Treating snapshot as plain text format');
|
|
540
|
-
filesToRestore = parseSnapshotContent(rawContent);
|
|
541
|
-
}
|
|
542
|
-
|
|
543
|
-
if (filesToRestore.length === 0) {
|
|
544
|
-
console.warn('⚠️ No files found to restore in the snapshot');
|
|
545
|
-
return;
|
|
546
|
-
}
|
|
547
|
-
|
|
548
|
-
// Apply filters if specified
|
|
549
|
-
if (options.include || options.exclude) {
|
|
550
|
-
filesToRestore = filterFilesToRestore(filesToRestore, options);
|
|
551
|
-
if (filesToRestore.length === 0) {
|
|
552
|
-
console.warn('⚠️ No files remaining after applying filters');
|
|
553
|
-
return;
|
|
554
|
-
}
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
// Validate file paths for security
|
|
558
|
-
const invalidFiles = validateFilePaths(filesToRestore, absoluteTargetDir);
|
|
559
|
-
if (invalidFiles.length > 0) {
|
|
560
|
-
console.error('❌ Invalid file paths detected (potential directory traversal):');
|
|
561
|
-
invalidFiles.forEach(file => console.error(` ${file}`));
|
|
562
|
-
process.exit(1);
|
|
563
|
-
}
|
|
564
|
-
|
|
565
|
-
console.log(`📊 Found ${filesToRestore.length} files to restore`);
|
|
566
|
-
|
|
567
|
-
if (options.dryRun) {
|
|
568
|
-
console.log('\n🔍 Dry run mode - files that would be restored:');
|
|
569
|
-
filesToRestore.forEach(file => {
|
|
570
|
-
const fullPath = path.join(absoluteTargetDir, file.path);
|
|
571
|
-
console.log(` ${fullPath}`);
|
|
572
|
-
});
|
|
573
|
-
return;
|
|
574
|
-
}
|
|
575
|
-
|
|
576
|
-
if (!options.force) {
|
|
577
|
-
const { confirm } = await inquirer.prompt([{
|
|
578
|
-
type: 'confirm',
|
|
579
|
-
name: 'confirm',
|
|
580
|
-
message: `You are about to write ${filesToRestore.length} files to ${absoluteTargetDir}. Existing files will be overwritten. Continue?`,
|
|
581
|
-
default: false
|
|
582
|
-
}]);
|
|
583
|
-
|
|
584
|
-
if (!confirm) {
|
|
585
|
-
console.log('🚫 Restore operation cancelled by user');
|
|
586
|
-
return;
|
|
587
|
-
}
|
|
588
|
-
}
|
|
589
|
-
|
|
590
|
-
await fs.mkdir(absoluteTargetDir, { recursive: true });
|
|
591
|
-
|
|
592
|
-
const stats = {
|
|
593
|
-
totalFiles: filesToRestore.length,
|
|
594
|
-
restoredFiles: 0,
|
|
595
|
-
failedFiles: 0,
|
|
596
|
-
errors: []
|
|
597
|
-
};
|
|
598
|
-
|
|
599
|
-
const progressBar = options.verbose ? null : new SingleBar({
|
|
600
|
-
format: 'Restoring |{bar}| {percentage}% | {value}/{total} files',
|
|
601
|
-
barCompleteChar: '\u2588',
|
|
602
|
-
barIncompleteChar: '\u2591',
|
|
603
|
-
hideCursor: true
|
|
604
|
-
}, Presets.shades_classic);
|
|
605
|
-
|
|
606
|
-
if (progressBar) progressBar.start(filesToRestore.length, 0);
|
|
607
|
-
|
|
608
|
-
const limit = pLimit(options.concurrency || 10);
|
|
609
|
-
const filePromises = filesToRestore.map((file, index) =>
|
|
610
|
-
limit(async () => {
|
|
611
|
-
try {
|
|
612
|
-
const fullPath = path.join(absoluteTargetDir, file.path);
|
|
613
|
-
const dir = path.dirname(fullPath);
|
|
614
|
-
|
|
615
|
-
await fs.mkdir(dir, { recursive: true });
|
|
616
|
-
await fs.writeFile(fullPath, file.content, 'utf-8');
|
|
617
|
-
|
|
618
|
-
stats.restoredFiles++;
|
|
619
|
-
|
|
620
|
-
if (progressBar) {
|
|
621
|
-
progressBar.update(index + 1);
|
|
622
|
-
} else if (options.verbose) {
|
|
623
|
-
console.log(`✅ Restored: ${file.path}`);
|
|
624
|
-
}
|
|
625
|
-
|
|
626
|
-
return { success: true, file: file.path };
|
|
627
|
-
} catch (error) {
|
|
628
|
-
stats.failedFiles++;
|
|
629
|
-
stats.errors.push({ file: file.path, error: error.message });
|
|
630
|
-
|
|
631
|
-
if (options.verbose) {
|
|
632
|
-
console.log(`❌ Failed to restore: ${file.path} - ${error.message}`);
|
|
633
|
-
}
|
|
634
|
-
|
|
635
|
-
return { success: false, file: file.path, error: error.message };
|
|
636
|
-
}
|
|
637
|
-
})
|
|
638
|
-
);
|
|
639
|
-
|
|
640
|
-
await Promise.allSettled(filePromises);
|
|
641
|
-
if (progressBar) progressBar.stop();
|
|
642
|
-
|
|
643
|
-
console.log('\n📊 Restore Summary');
|
|
644
|
-
console.log('='.repeat(50));
|
|
645
|
-
console.log(`🎉 Restore completed!`);
|
|
646
|
-
console.log(`✅ Successfully restored: ${stats.restoredFiles} files`);
|
|
647
|
-
if (stats.failedFiles > 0) {
|
|
648
|
-
console.log(`❌ Failed to restore: ${stats.failedFiles} files`);
|
|
649
|
-
if (stats.errors.length > 0) {
|
|
650
|
-
console.log('\n⚠️ Errors encountered:');
|
|
651
|
-
stats.errors.slice(0, 5).forEach(({ file, error }) => {
|
|
652
|
-
console.log(` ${file}: ${error}`);
|
|
653
|
-
});
|
|
654
|
-
if (stats.errors.length > 5) {
|
|
655
|
-
console.log(` ... and ${stats.errors.length - 5} more errors`);
|
|
656
|
-
}
|
|
657
|
-
}
|
|
658
|
-
}
|
|
659
|
-
console.log(`📁 Target directory: ${absoluteTargetDir}`);
|
|
660
|
-
console.log('='.repeat(50));
|
|
661
|
-
|
|
662
|
-
} catch (error) {
|
|
663
|
-
console.error('\n❌ An error occurred during restore:');
|
|
664
|
-
console.error(error.message);
|
|
665
|
-
if (options.verbose) {
|
|
666
|
-
console.error(error.stack);
|
|
667
|
-
}
|
|
668
|
-
process.exit(1);
|
|
669
|
-
}
|
|
670
|
-
}
|
|
671
|
-
|
|
672
|
-
function parseSnapshotContent(content) {
|
|
673
|
-
const files = [];
|
|
674
|
-
const fileRegex = /--- File: \/(.+) ---/g;
|
|
675
|
-
const sections = content.split(fileRegex);
|
|
676
|
-
|
|
677
|
-
for (let i = 1; i < sections.length; i += 2) {
|
|
678
|
-
const filePath = sections[i].trim();
|
|
679
|
-
let fileContent = sections[i + 1] || '';
|
|
680
|
-
|
|
681
|
-
if (fileContent.startsWith('\n\n')) {
|
|
682
|
-
fileContent = fileContent.substring(2);
|
|
683
|
-
}
|
|
684
|
-
if (fileContent.endsWith('\n\n')) {
|
|
685
|
-
fileContent = fileContent.substring(0, fileContent.length - 2);
|
|
686
|
-
}
|
|
687
|
-
|
|
688
|
-
files.push({ path: filePath, content: fileContent });
|
|
689
|
-
}
|
|
690
|
-
|
|
691
|
-
return files;
|
|
692
|
-
}
|
|
693
|
-
|
|
694
|
-
function filterFilesToRestore(files, options) {
|
|
695
|
-
let filtered = files;
|
|
696
|
-
|
|
697
|
-
if (options.include) {
|
|
698
|
-
const includePatterns = Array.isArray(options.include) ? options.include : [options.include];
|
|
699
|
-
filtered = filtered.filter(file =>
|
|
700
|
-
includePatterns.some(pattern => {
|
|
701
|
-
const regex = new RegExp(pattern.replace(/\*/g, '.*'));
|
|
702
|
-
return regex.test(file.path);
|
|
703
|
-
})
|
|
704
|
-
);
|
|
705
|
-
}
|
|
706
|
-
|
|
707
|
-
if (options.exclude) {
|
|
708
|
-
const excludePatterns = Array.isArray(options.exclude) ? options.exclude : [options.exclude];
|
|
709
|
-
filtered = filtered.filter(file =>
|
|
710
|
-
!excludePatterns.some(pattern => {
|
|
711
|
-
const regex = new RegExp(pattern.replace(/\*/g, '.*'));
|
|
712
|
-
return regex.test(file.path);
|
|
713
|
-
})
|
|
714
|
-
);
|
|
715
|
-
}
|
|
716
|
-
|
|
717
|
-
return filtered;
|
|
718
|
-
}
|
|
719
|
-
|
|
720
|
-
function validateFilePaths(files, targetDir) {
|
|
721
|
-
const invalidFiles = [];
|
|
722
|
-
|
|
723
|
-
for (const file of files) {
|
|
724
|
-
const normalizedPath = path.normalize(file.path);
|
|
725
|
-
|
|
726
|
-
if (normalizedPath.includes('..') ||
|
|
727
|
-
normalizedPath.startsWith('/') ||
|
|
728
|
-
normalizedPath.includes('\0') ||
|
|
729
|
-
/[<>:"|?*]/.test(normalizedPath)) {
|
|
730
|
-
invalidFiles.push(file.path);
|
|
731
|
-
}
|
|
732
|
-
}
|
|
733
|
-
|
|
734
|
-
return invalidFiles;
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
// --- CLI SETUP ---
|
|
739
|
-
const program = new Command();
|
|
740
|
-
|
|
741
|
-
program
|
|
742
|
-
.name('eck-snapshot')
|
|
743
|
-
.description('A CLI tool to create and restore single-file text snapshots of a Git repository.')
|
|
744
|
-
.version('2.1.0');
|
|
745
|
-
|
|
746
|
-
// Snapshot command (existing)
|
|
747
|
-
program
|
|
748
|
-
.command('snapshot', { isDefault: true })
|
|
749
|
-
.description('Create a snapshot of a Git repository (default command).')
|
|
750
|
-
.argument('[repoPath]', 'Path to the git repository to snapshot.', process.cwd())
|
|
751
|
-
.option('-o, --output <dir>', 'Output directory for the snapshot file.', path.join(process.cwd(), 'snapshots'))
|
|
752
|
-
.option('--no-tree', 'Do not include the directory tree in the snapshot.')
|
|
753
|
-
.option('-v, --verbose', 'Show detailed processing information, including skipped files.')
|
|
754
|
-
.option('--max-file-size <size>', 'Maximum file size to include (e.g., 10MB)', '10MB')
|
|
755
|
-
.option('--max-total-size <size>', 'Maximum total snapshot size (e.g., 100MB)', '100MB')
|
|
756
|
-
.option('--max-depth <number>', 'Maximum directory depth for tree generation', (val) => parseInt(val), 10)
|
|
757
|
-
.option('--config <path>', 'Path to configuration file')
|
|
758
|
-
.option('--compress', 'Compress output file with gzip')
|
|
759
|
-
.option('--include-hidden', 'Include hidden files (starting with .)')
|
|
760
|
-
.option('--format <type>', 'Output format: txt, json', 'txt')
|
|
761
|
-
.action((repoPath, options) => createRepoSnapshot(repoPath, options));
|
|
762
|
-
|
|
763
|
-
program
|
|
764
|
-
.command('restore')
|
|
765
|
-
.description('Restore files and directories from a snapshot file')
|
|
766
|
-
.argument('<snapshot_file>', 'Path to the snapshot file (.txt, .json, or .gz)')
|
|
767
|
-
.argument('[target_directory]', 'Directory to restore the files into', process.cwd())
|
|
768
|
-
.option('-f, --force', 'Force overwrite of existing files without confirmation')
|
|
769
|
-
.option('-v, --verbose', 'Show detailed processing information')
|
|
770
|
-
.option('--dry-run', 'Show what would be restored without actually writing files')
|
|
771
|
-
.option('--include <patterns...>', 'Include only files matching these patterns (supports wildcards)')
|
|
772
|
-
.option('--exclude <patterns...>', 'Exclude files matching these patterns (supports wildcards)')
|
|
773
|
-
.option('--concurrency <number>', 'Number of concurrent file operations', (val) => parseInt(val), 10)
|
|
774
|
-
.action((snapshotFile, targetDir, options) => restoreSnapshot(snapshotFile, targetDir, options));
|
|
775
|
-
|
|
776
|
-
program.parse(process.argv);
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import dotenv from 'dotenv';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import { fileURLToPath } from 'url';
|
|
6
|
+
|
|
7
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
8
|
+
const __dirname = path.dirname(__filename);
|
|
9
|
+
|
|
10
|
+
const envPath = path.join(__dirname, '.env');
|
|
11
|
+
dotenv.config({ path: envPath });
|
|
12
|
+
|
|
13
|
+
const { run } = await import('./src/cli/cli.js');
|
|
14
|
+
run();
|