stegdoc 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,346 @@
1
+ const path = require('path');
2
+ const fs = require('fs');
3
+ const chalk = require('chalk');
4
+ const ora = require('ora');
5
+ const AdmZip = require('adm-zip');
6
+ const { createDocxWithBase64 } = require('../lib/docx-handler');
7
+ const { createXlsxWithBase64 } = require('../lib/xlsx-handler');
8
+ const { createMetadata, serializeMetadata } = require('../lib/metadata');
9
+ const { generateHash, generateContentHash, parseSizeToBytes, formatBytes, generateFilename } = require('../lib/utils');
10
+ const { encrypt, packEncryptionMeta } = require('../lib/crypto');
11
+ const { compress, isCompressedMime } = require('../lib/compression');
12
+ const { resetTimeWindow } = require('../lib/decoy-generator');
13
+ const { shouldRunInteractive, promptEncodeOptions } = require('../lib/interactive');
14
+
15
+ /**
16
+ * Zip a folder into a buffer
17
+ * @param {string} folderPath - Path to folder
18
+ * @returns {Buffer} Zip buffer
19
+ */
20
+ function zipFolder(folderPath) {
21
+ const zip = new AdmZip();
22
+ zip.addLocalFolder(folderPath);
23
+ return zip.toBuffer();
24
+ }
25
+
26
+ /**
27
+ * Encode a file to XLSX/DOCX format with optional AES encryption and compression
28
+ * @param {string} inputFile - Path to input file
29
+ * @param {object} options - Command options
30
+ * @param {string} options.outputDir - Output directory
31
+ * @param {string} options.chunkSize - Chunk size (e.g., "5MB")
32
+ * @param {string} options.format - Output format ('xlsx' or 'docx')
33
+ * @param {string} options.password - Encryption password (optional)
34
+ * @param {boolean} options.force - Overwrite existing files without asking
35
+ * @param {boolean} options.quiet - Minimal output
36
+ * @param {boolean} options.yes - Skip interactive prompts, use defaults
37
+ */
38
+ async function encodeCommand(inputFile, options) {
39
+ // Check if we should run interactive mode
40
+ if (shouldRunInteractive(options, 'encode')) {
41
+ const filename = path.basename(inputFile);
42
+ console.log(chalk.bold(`\nEncoding: ${filename}`));
43
+
44
+ const interactiveOptions = await promptEncodeOptions(filename);
45
+ options = { ...options, ...interactiveOptions };
46
+ console.log(); // Add spacing before starting
47
+ }
48
+
49
+ const quiet = options.quiet || false;
50
+ const spinner = quiet ? { start: () => {}, succeed: () => {}, fail: () => {}, info: () => {}, text: '' } : ora('Starting encoding process...').start();
51
+ const createdFiles = []; // Track created files for cleanup on failure
52
+
53
+ // Reset time window for fresh timestamps in this encode session
54
+ resetTimeWindow();
55
+
56
+ try {
57
+ // Validate input exists
58
+ if (!fs.existsSync(inputFile)) {
59
+ throw new Error(`Path not found: ${inputFile}`);
60
+ }
61
+
62
+ // Check if input is a directory
63
+ const isDirectory = fs.statSync(inputFile).isDirectory();
64
+
65
+ // Determine format (default to xlsx)
66
+ const format = (options.format || 'xlsx').toLowerCase();
67
+ if (format !== 'xlsx' && format !== 'docx') {
68
+ throw new Error('Invalid format. Use "xlsx" or "docx".');
69
+ }
70
+
71
+ const useEncryption = !!options.password;
72
+
73
+ // Store chunk input for later processing (after we know content size)
74
+ const chunkInput = (options.chunkSize || '').toString().trim();
75
+
76
+ let fileBuffer;
77
+ let filename;
78
+ let extension;
79
+ let size;
80
+
81
+ if (isDirectory) {
82
+ // Zip the folder
83
+ spinner.text = 'Zipping folder...';
84
+ const folderName = path.basename(inputFile);
85
+ fileBuffer = zipFolder(inputFile);
86
+ filename = `${folderName}.zip`;
87
+ extension = '.zip';
88
+ size = fileBuffer.length;
89
+ spinner.succeed && spinner.succeed(`Folder zipped: ${folderName}/ → ${formatBytes(size)}`);
90
+ } else {
91
+ // Read file as buffer
92
+ spinner.text = 'Reading file...';
93
+ fileBuffer = fs.readFileSync(inputFile);
94
+ filename = path.basename(inputFile);
95
+ extension = path.extname(inputFile);
96
+ size = fileBuffer.length;
97
+ spinner.succeed && spinner.succeed(`File read: ${filename} (${formatBytes(size)})`);
98
+ }
99
+
100
+ // Generate content hash for integrity verification
101
+ const contentHash = generateContentHash(fileBuffer);
102
+
103
+ // Detect if file is already compressed
104
+ spinner.text = 'Checking file type...';
105
+ let fileType = null;
106
+ let useCompression = true;
107
+
108
+ try {
109
+ // file-type is ESM-only, need dynamic import
110
+ const { fileTypeFromBuffer } = await import('file-type');
111
+ fileType = await fileTypeFromBuffer(fileBuffer);
112
+
113
+ if (fileType && isCompressedMime(fileType.mime)) {
114
+ useCompression = false;
115
+ spinner.info && spinner.info(`Skipping compression (${fileType.ext} is already compressed)`);
116
+ }
117
+ } catch {
118
+ // If file-type detection fails, still try compression
119
+ }
120
+
121
+ // Compress if beneficial
122
+ let processedBuffer = fileBuffer;
123
+ if (useCompression) {
124
+ spinner.text = 'Compressing...';
125
+ const compressedBuffer = await compress(fileBuffer);
126
+
127
+ // Only use compression if it actually reduces size
128
+ if (compressedBuffer.length < fileBuffer.length) {
129
+ const savedPercent = ((1 - compressedBuffer.length / fileBuffer.length) * 100).toFixed(1);
130
+ processedBuffer = compressedBuffer;
131
+ spinner.succeed && spinner.succeed(`Compressed: ${formatBytes(fileBuffer.length)} → ${formatBytes(compressedBuffer.length)} (${savedPercent}% saved)`);
132
+ } else {
133
+ useCompression = false;
134
+ spinner.info && spinner.info('Compression skipped (no size benefit)');
135
+ }
136
+ }
137
+
138
+ // Convert to base64
139
+ const base64 = processedBuffer.toString('base64');
140
+
141
+ let contentToStore;
142
+ let encryptionMeta = null;
143
+
144
+ if (useEncryption) {
145
+ spinner.text = 'Encrypting content...';
146
+
147
+ // Encrypt the base64 content
148
+ const { ciphertext, iv, salt, authTag } = encrypt(base64, options.password);
149
+ encryptionMeta = packEncryptionMeta({ iv, salt, authTag });
150
+ contentToStore = ciphertext;
151
+
152
+ spinner.succeed && spinner.succeed('Content encrypted with AES-256-GCM');
153
+ } else {
154
+ contentToStore = base64;
155
+ spinner.info && spinner.info('No password provided - content will NOT be encrypted');
156
+ }
157
+
158
+ spinner.text = 'Preparing output...';
159
+
160
+ // Generate hash for this encoding session
161
+ const hash = generateHash();
162
+
163
+ // Determine output directory
164
+ const outputDir = options.outputDir || process.cwd();
165
+
166
+ // Now parse chunk size (we need content size for "X parts" format)
167
+ const contentSize = contentToStore.length;
168
+ const defaultChunkSize = 5 * 1024 * 1024; // 5MB default
169
+ let chunkSizeBytes;
170
+ const chunkInputLower = chunkInput.toLowerCase();
171
+
172
+ if (chunkInputLower === '0' || chunkInputLower === 'max' || chunkInputLower === 'single' || chunkInputLower === 'none' || chunkInputLower === '') {
173
+ chunkSizeBytes = Infinity; // No splitting
174
+ } else if (/^\d+\s*parts?$/i.test(chunkInput)) {
175
+ // "X parts" format - divide content evenly
176
+ const numParts = parseInt(chunkInput, 10);
177
+ if (numParts < 1) {
178
+ throw new Error('Number of parts must be at least 1');
179
+ }
180
+ chunkSizeBytes = Math.ceil(contentSize / numParts);
181
+ spinner.info && spinner.info(`Splitting into ${numParts} parts (~${formatBytes(chunkSizeBytes)} content each)`);
182
+ } else if (chunkInput) {
183
+ chunkSizeBytes = parseSizeToBytes(chunkInput);
184
+ } else {
185
+ chunkSizeBytes = defaultChunkSize;
186
+ }
187
+
188
+ // Check if we need to split
189
+ const needsSplit = contentSize > chunkSizeBytes;
190
+
191
+ // Helper to check file exists and handle overwrite
192
+ const checkOverwrite = (filePath) => {
193
+ if (fs.existsSync(filePath) && !options.force) {
194
+ throw new Error(`File already exists: ${filePath}. Use --force to overwrite.`);
195
+ }
196
+ };
197
+
198
+ if (needsSplit) {
199
+ // Split into chunks
200
+ const chunks = [];
201
+ let offset = 0;
202
+ while (offset < contentToStore.length) {
203
+ chunks.push(contentToStore.slice(offset, offset + chunkSizeBytes));
204
+ offset += chunkSizeBytes;
205
+ }
206
+ const totalParts = chunks.length;
207
+
208
+ spinner.succeed && spinner.succeed(`File will be split into ${totalParts} parts`);
209
+
210
+ // Create output file for each chunk
211
+ for (let i = 0; i < chunks.length; i++) {
212
+ const partNumber = i + 1;
213
+ const partSpinner = quiet ? spinner : ora(`Creating part ${partNumber} of ${totalParts}...`).start();
214
+
215
+ const metadata = createMetadata({
216
+ originalFilename: filename,
217
+ originalExtension: extension,
218
+ hash,
219
+ partNumber,
220
+ totalParts,
221
+ originalSize: size,
222
+ format,
223
+ encrypted: useEncryption,
224
+ compressed: useCompression,
225
+ contentHash,
226
+ });
227
+
228
+ const outputFilename = generateFilename(hash, partNumber, totalParts, format);
229
+ const outputPath = path.join(outputDir, outputFilename);
230
+
231
+ checkOverwrite(outputPath);
232
+
233
+ if (format === 'xlsx') {
234
+ await createXlsxWithBase64({
235
+ base64Content: chunks[i],
236
+ encryptionMeta: encryptionMeta || '',
237
+ metadata: serializeMetadata(metadata),
238
+ outputPath,
239
+ });
240
+ } else {
241
+ // For DOCX, include encryption meta in the content if encrypted
242
+ const docxContent = useEncryption
243
+ ? `${encryptionMeta}|||${chunks[i]}`
244
+ : chunks[i];
245
+ await createDocxWithBase64({
246
+ base64Content: docxContent,
247
+ metadata,
248
+ outputPath,
249
+ });
250
+ }
251
+
252
+ createdFiles.push(outputPath);
253
+ partSpinner.succeed && partSpinner.succeed(`Created: ${outputFilename} (${formatBytes(chunks[i].length)})`);
254
+ }
255
+
256
+ if (!quiet) {
257
+ console.log();
258
+ console.log(chalk.green.bold('✓ Encoding complete!'));
259
+ console.log(chalk.cyan(` Format: ${format.toUpperCase()}`));
260
+ console.log(chalk.cyan(` Hash: ${hash}`));
261
+ console.log(chalk.cyan(` Parts: ${totalParts}`));
262
+ console.log(chalk.cyan(` Encrypted: ${useEncryption ? 'Yes' : 'No'}`));
263
+ console.log(chalk.cyan(` Compressed: ${useCompression ? 'Yes' : 'No'}`));
264
+ console.log(chalk.cyan(` Location: ${outputDir}`));
265
+ if (useEncryption) {
266
+ console.log(chalk.yellow(` Remember your password - it cannot be recovered!`));
267
+ }
268
+ }
269
+ } else {
270
+ // Single file
271
+ spinner.text = `Creating ${format.toUpperCase()} file...`;
272
+
273
+ const metadata = createMetadata({
274
+ originalFilename: filename,
275
+ originalExtension: extension,
276
+ hash,
277
+ partNumber: null,
278
+ totalParts: null,
279
+ originalSize: size,
280
+ format,
281
+ encrypted: useEncryption,
282
+ compressed: useCompression,
283
+ contentHash,
284
+ });
285
+
286
+ const outputFilename = generateFilename(hash, null, null, format);
287
+ const outputPath = path.join(outputDir, outputFilename);
288
+
289
+ checkOverwrite(outputPath);
290
+
291
+ if (format === 'xlsx') {
292
+ await createXlsxWithBase64({
293
+ base64Content: contentToStore,
294
+ encryptionMeta: encryptionMeta || '',
295
+ metadata: serializeMetadata(metadata),
296
+ outputPath,
297
+ });
298
+ } else {
299
+ // For DOCX, include encryption meta in the content if encrypted
300
+ const docxContent = useEncryption
301
+ ? `${encryptionMeta}|||${contentToStore}`
302
+ : contentToStore;
303
+ await createDocxWithBase64({
304
+ base64Content: docxContent,
305
+ metadata,
306
+ outputPath,
307
+ });
308
+ }
309
+
310
+ createdFiles.push(outputPath);
311
+ spinner.succeed && spinner.succeed('Encoding complete!');
312
+
313
+ if (!quiet) {
314
+ console.log();
315
+ console.log(chalk.green.bold('✓ File encoded successfully!'));
316
+ console.log(chalk.cyan(` Format: ${format.toUpperCase()}`));
317
+ console.log(chalk.cyan(` Hash: ${hash}`));
318
+ console.log(chalk.cyan(` Output: ${outputFilename}`));
319
+ console.log(chalk.cyan(` Encrypted: ${useEncryption ? 'Yes' : 'No'}`));
320
+ console.log(chalk.cyan(` Compressed: ${useCompression ? 'Yes' : 'No'}`));
321
+ console.log(chalk.cyan(` Location: ${outputDir}`));
322
+ if (useEncryption) {
323
+ console.log(chalk.yellow(` Remember your password - it cannot be recovered!`));
324
+ }
325
+ }
326
+ }
327
+ } catch (error) {
328
+ spinner.fail && spinner.fail('Encoding failed');
329
+
330
+ // Cleanup partially created files
331
+ for (const file of createdFiles) {
332
+ try {
333
+ if (fs.existsSync(file)) {
334
+ fs.unlinkSync(file);
335
+ }
336
+ } catch {
337
+ // Ignore cleanup errors
338
+ }
339
+ }
340
+
341
+ console.error(chalk.red(`Error: ${error.message}`));
342
+ process.exit(1);
343
+ }
344
+ }
345
+
346
+ module.exports = encodeCommand;
@@ -0,0 +1,113 @@
1
+ const path = require('path');
2
+ const chalk = require('chalk');
3
+ const ora = require('ora');
4
+ const { readDocxBase64 } = require('../lib/docx-handler');
5
+ const { readXlsxBase64 } = require('../lib/xlsx-handler');
6
+ const { validateMetadata, isMultiPart } = require('../lib/metadata');
7
+ const { detectFormat, formatBytes } = require('../lib/utils');
8
+ const { extractContent, findMultiPartFiles } = require('../lib/file-utils');
9
+
10
+ /**
11
+ * Show information about an encoded file without decoding
12
+ * @param {string} inputFile - Path to input file
13
+ * @param {object} options - Command options
14
+ */
15
+ async function infoCommand(inputFile, options) {
16
+ const spinner = ora('Reading file metadata...').start();
17
+
18
+ try {
19
+ // Detect format from extension
20
+ const format = detectFormat(inputFile);
21
+ if (!format) {
22
+ throw new Error('Unknown file format. Supported formats: .xlsx, .docx');
23
+ }
24
+
25
+ // Read file
26
+ let readResult;
27
+ if (format === 'xlsx') {
28
+ readResult = await readXlsxBase64(inputFile);
29
+ } else {
30
+ readResult = await readDocxBase64(inputFile);
31
+ }
32
+
33
+ const { encryptionMeta, metadata } = extractContent(readResult, format);
34
+
35
+ // Validate metadata
36
+ validateMetadata(metadata);
37
+
38
+ const isEncrypted = metadata.encrypted || (encryptionMeta && encryptionMeta.length > 0);
39
+ const isCompressed = metadata.compressed || false;
40
+
41
+ spinner.succeed('File metadata read successfully');
42
+ console.log();
43
+
44
+ // Display info
45
+ console.log(chalk.bold.white('File Information:'));
46
+ console.log(chalk.cyan(` Format: ${format.toUpperCase()}`));
47
+ console.log(chalk.cyan(` Tool version: ${metadata.version || '1.x'}`));
48
+ console.log();
49
+
50
+ console.log(chalk.bold.white('Original File:'));
51
+ console.log(chalk.cyan(` Filename: ${metadata.originalFilename}`));
52
+ console.log(chalk.cyan(` Extension: ${metadata.originalExtension}`));
53
+ console.log(chalk.cyan(` Size: ${formatBytes(metadata.originalSize)}`));
54
+ console.log();
55
+
56
+ console.log(chalk.bold.white('Encoding Options:'));
57
+ console.log(chalk.cyan(` Encrypted: ${isEncrypted ? chalk.yellow('Yes') : 'No'}`));
58
+ console.log(chalk.cyan(` Compressed: ${isCompressed ? chalk.green('Yes') : 'No'}`));
59
+ console.log(chalk.cyan(` Encoded on: ${metadata.encodingDate || 'Unknown'}`));
60
+
61
+ if (metadata.contentHash) {
62
+ console.log(chalk.cyan(` Content hash: ${metadata.contentHash.slice(0, 16)}...`));
63
+ }
64
+ console.log();
65
+
66
+ // Multi-part info
67
+ if (isMultiPart(metadata)) {
68
+ console.log(chalk.bold.white('Multi-part File:'));
69
+ console.log(chalk.cyan(` This is part: ${metadata.partNumber} of ${metadata.totalParts}`));
70
+ console.log(chalk.cyan(` Hash: ${metadata.hash}`));
71
+
72
+ // Find other parts
73
+ const inputDir = path.dirname(inputFile);
74
+ const allParts = findMultiPartFiles(inputDir, metadata.hash, format);
75
+
76
+ console.log();
77
+ console.log(chalk.bold.white('Parts found in directory:'));
78
+
79
+ for (let i = 1; i <= metadata.totalParts; i++) {
80
+ const part = allParts.find(p => p.partNumber === i);
81
+ if (part) {
82
+ console.log(chalk.green(` ✓ Part ${i}: ${part.filename}`));
83
+ } else {
84
+ console.log(chalk.red(` ✗ Part ${i}: MISSING`));
85
+ }
86
+ }
87
+
88
+ if (allParts.length === metadata.totalParts) {
89
+ console.log();
90
+ console.log(chalk.green.bold('All parts found - ready to decode'));
91
+ } else {
92
+ console.log();
93
+ console.log(chalk.yellow.bold(`Missing ${metadata.totalParts - allParts.length} part(s)`));
94
+ }
95
+ } else {
96
+ console.log(chalk.bold.white('Single File:'));
97
+ console.log(chalk.cyan(` Hash: ${metadata.hash}`));
98
+ console.log();
99
+ console.log(chalk.green.bold('Ready to decode'));
100
+ }
101
+
102
+ if (isEncrypted) {
103
+ console.log(chalk.yellow('\nNote: Password required for decoding'));
104
+ }
105
+
106
+ } catch (error) {
107
+ spinner.fail('Failed to read file info');
108
+ console.error(chalk.red(`Error: ${error.message}`));
109
+ process.exit(1);
110
+ }
111
+ }
112
+
113
+ module.exports = infoCommand;
@@ -0,0 +1,169 @@
1
+ const path = require('path');
2
+ const chalk = require('chalk');
3
+ const ora = require('ora');
4
+ const { readDocxBase64 } = require('../lib/docx-handler');
5
+ const { readXlsxBase64 } = require('../lib/xlsx-handler');
6
+ const { validateMetadata, isMultiPart } = require('../lib/metadata');
7
+ const { detectFormat, formatBytes } = require('../lib/utils');
8
+ const { decrypt, unpackEncryptionMeta } = require('../lib/crypto');
9
+ const { extractContent, findMultiPartFiles } = require('../lib/file-utils');
10
+
11
+ /**
12
+ * Verify that a file can be decoded without actually writing output
13
+ * @param {string} inputFile - Path to input file
14
+ * @param {object} options - Command options
15
+ * @param {string} options.password - Decryption password (if encrypted)
16
+ */
17
+ async function verifyCommand(inputFile, options) {
18
+ const spinner = ora('Verifying file...').start();
19
+ const issues = [];
20
+
21
+ try {
22
+ // Detect format from extension
23
+ const format = detectFormat(inputFile);
24
+ if (!format) {
25
+ throw new Error('Unknown file format. Supported formats: .xlsx, .docx');
26
+ }
27
+
28
+ spinner.text = `Reading ${format.toUpperCase()} file...`;
29
+
30
+ // Read file
31
+ let readResult;
32
+ if (format === 'xlsx') {
33
+ readResult = await readXlsxBase64(inputFile);
34
+ } else {
35
+ readResult = await readDocxBase64(inputFile);
36
+ }
37
+
38
+ const { encryptedContent, encryptionMeta, metadata } = extractContent(readResult, format);
39
+
40
+ // Validate metadata
41
+ try {
42
+ validateMetadata(metadata);
43
+ spinner.succeed('Metadata valid');
44
+ } catch (e) {
45
+ issues.push(`Metadata: ${e.message}`);
46
+ spinner.warn('Metadata issues found');
47
+ }
48
+
49
+ const isEncrypted = metadata.encrypted || (encryptionMeta && encryptionMeta.length > 0);
50
+
51
+ // Check multi-part
52
+ if (isMultiPart(metadata)) {
53
+ spinner.text = 'Checking multi-part files...';
54
+
55
+ const inputDir = path.dirname(inputFile);
56
+ const allParts = findMultiPartFiles(inputDir, metadata.hash, format);
57
+
58
+ if (allParts.length !== metadata.totalParts) {
59
+ const missing = [];
60
+ const foundParts = new Set(allParts.map(p => p.partNumber));
61
+
62
+ for (let i = 1; i <= metadata.totalParts; i++) {
63
+ if (!foundParts.has(i)) {
64
+ missing.push(i);
65
+ }
66
+ }
67
+
68
+ issues.push(`Missing parts: ${missing.join(', ')}`);
69
+ spinner.warn(`Found ${allParts.length}/${metadata.totalParts} parts`);
70
+ } else {
71
+ spinner.succeed(`All ${metadata.totalParts} parts found`);
72
+ }
73
+ }
74
+
75
+ // Check password for encrypted files
76
+ if (isEncrypted) {
77
+ if (!options.password) {
78
+ issues.push('File is encrypted but no password provided');
79
+ spinner.warn('Encryption check skipped (no password)');
80
+ } else {
81
+ spinner.text = 'Verifying decryption...';
82
+
83
+ try {
84
+ const { iv, salt, authTag } = unpackEncryptionMeta(encryptionMeta);
85
+
86
+ // For multi-part files, we need to merge all parts before decryption
87
+ // AES-GCM requires the complete ciphertext to verify the auth tag
88
+ let fullContent = encryptedContent;
89
+
90
+ if (isMultiPart(metadata)) {
91
+ const inputDir = path.dirname(inputFile);
92
+ const allParts = findMultiPartFiles(inputDir, metadata.hash, format);
93
+
94
+ if (allParts.length === metadata.totalParts) {
95
+ // Read and merge all parts
96
+ const contentParts = [];
97
+ for (const part of allParts) {
98
+ let partResult;
99
+ if (format === 'xlsx') {
100
+ partResult = await readXlsxBase64(part.path);
101
+ } else {
102
+ partResult = await readDocxBase64(part.path);
103
+ }
104
+ const { encryptedContent: partContent } = extractContent(partResult, format);
105
+ contentParts.push(partContent);
106
+ }
107
+ fullContent = contentParts.join('');
108
+ }
109
+ }
110
+
111
+ // Decrypt the full content to verify password
112
+ decrypt(fullContent, options.password, iv, salt, authTag);
113
+ spinner.succeed('Decryption password valid');
114
+ } catch (e) {
115
+ issues.push('Decryption failed - wrong password or corrupted data');
116
+ spinner.fail('Decryption check failed');
117
+ }
118
+ }
119
+ }
120
+
121
+ // Summary
122
+ console.log();
123
+
124
+ if (issues.length === 0) {
125
+ console.log(chalk.green.bold('✓ File verification passed!'));
126
+ console.log(chalk.cyan(` Original file: ${metadata.originalFilename}`));
127
+ console.log(chalk.cyan(` Size: ${formatBytes(metadata.originalSize)}`));
128
+ console.log(chalk.cyan(` Encrypted: ${isEncrypted ? 'Yes' : 'No'}`));
129
+ console.log(chalk.cyan(` Compressed: ${metadata.compressed ? 'Yes' : 'No'}`));
130
+
131
+ if (isMultiPart(metadata)) {
132
+ console.log(chalk.cyan(` Parts: ${metadata.totalParts}`));
133
+ }
134
+
135
+ console.log();
136
+ console.log(chalk.green('File is ready to decode.'));
137
+ } else {
138
+ console.log(chalk.yellow.bold('⚠ Verification completed with issues:'));
139
+ console.log();
140
+
141
+ for (const issue of issues) {
142
+ console.log(chalk.yellow(` • ${issue}`));
143
+ }
144
+
145
+ console.log();
146
+
147
+ // Determine if still decodable
148
+ const hasBlockingIssue = issues.some(i =>
149
+ i.includes('Missing parts') ||
150
+ i.includes('Decryption failed') ||
151
+ i.includes('Metadata')
152
+ );
153
+
154
+ if (hasBlockingIssue) {
155
+ console.log(chalk.red('File cannot be decoded until issues are resolved.'));
156
+ process.exit(1);
157
+ } else {
158
+ console.log(chalk.yellow('File may still be decodable. Run decode to attempt.'));
159
+ }
160
+ }
161
+
162
+ } catch (error) {
163
+ spinner.fail('Verification failed');
164
+ console.error(chalk.red(`Error: ${error.message}`));
165
+ process.exit(1);
166
+ }
167
+ }
168
+
169
+ module.exports = verifyCommand;