stegdoc 3.0.2 → 5.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/commands/decode.js +485 -215
- package/src/commands/encode.js +567 -346
- package/src/commands/info.js +118 -113
- package/src/commands/verify.js +207 -169
- package/src/index.js +89 -87
- package/src/lib/compression.js +177 -97
- package/src/lib/crypto.js +172 -118
- package/src/lib/decoy-generator.js +306 -306
- package/src/lib/docx-handler.js +587 -161
- package/src/lib/docx-templates.js +355 -0
- package/src/lib/file-handler.js +113 -113
- package/src/lib/file-utils.js +160 -150
- package/src/lib/interactive.js +190 -190
- package/src/lib/log-generator.js +764 -0
- package/src/lib/metadata.js +151 -111
- package/src/lib/streams.js +197 -0
- package/src/lib/utils.js +227 -227
- package/src/lib/xlsx-handler.js +597 -359
- package/src/lib/xml-utils.js +115 -115
package/src/commands/decode.js
CHANGED
|
@@ -1,215 +1,485 @@
|
|
|
1
|
-
const path = require('path');
|
|
2
|
-
const fs = require('fs');
|
|
3
|
-
const
|
|
4
|
-
const
|
|
5
|
-
const
|
|
6
|
-
const {
|
|
7
|
-
const {
|
|
8
|
-
const {
|
|
9
|
-
const {
|
|
10
|
-
const {
|
|
11
|
-
const {
|
|
12
|
-
const {
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
*
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
//
|
|
43
|
-
const
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
if (
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
1
|
+
const path = require('path');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const { finished } = require('stream/promises');
|
|
4
|
+
const chalk = require('chalk');
|
|
5
|
+
const ora = require('ora');
|
|
6
|
+
const { readDocxBase64, } = require('../lib/docx-handler');
|
|
7
|
+
const { readXlsxBase64, readXlsxV5 } = require('../lib/xlsx-handler');
|
|
8
|
+
const { validateMetadata, isMultiPart, isStreamingFormat, isLogEmbedFormat, parseMetadata } = require('../lib/metadata');
|
|
9
|
+
const { detectFormat, formatBytes, generateContentHash } = require('../lib/utils');
|
|
10
|
+
const { decrypt, unpackEncryptionMeta, createDecryptStream } = require('../lib/crypto');
|
|
11
|
+
const { decompress, createDecompressStream, decompressBrotli, createBrotliDecompressStream } = require('../lib/compression');
|
|
12
|
+
const { promptPassword, promptOverwrite } = require('../lib/interactive');
|
|
13
|
+
const { extractContent, findMultiPartFiles, mergeBase64Chunks } = require('../lib/file-utils');
|
|
14
|
+
const { HashPassthrough } = require('../lib/streams');
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Read file based on format
|
|
18
|
+
*/
|
|
19
|
+
async function readFile(filePath, format) {
|
|
20
|
+
if (format === 'xlsx') {
|
|
21
|
+
return await readXlsxBase64(filePath);
|
|
22
|
+
} else {
|
|
23
|
+
return await readDocxBase64(filePath);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Decode a DOCX/XLSX file back to original format
|
|
29
|
+
*/
|
|
30
|
+
async function decodeCommand(inputFile, options) {
|
|
31
|
+
const quiet = options.quiet || false;
|
|
32
|
+
const spinner = quiet ? { start: () => {}, succeed: () => {}, fail: () => {}, info: () => {}, warn: () => {}, text: '' } : ora('Starting decoding process...').start();
|
|
33
|
+
|
|
34
|
+
try {
|
|
35
|
+
const format = detectFormat(inputFile);
|
|
36
|
+
if (!format) {
|
|
37
|
+
throw new Error('Unknown file format. Supported formats: .xlsx, .docx');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
spinner.text = `Reading ${format.toUpperCase()} file...`;
|
|
41
|
+
|
|
42
|
+
// Read the first file
|
|
43
|
+
const readResult = await readFile(inputFile, format);
|
|
44
|
+
|
|
45
|
+
// Route based on format version
|
|
46
|
+
if (readResult.formatVersion === 'v5') {
|
|
47
|
+
await decodeV5(inputFile, format, readResult, options, spinner, quiet);
|
|
48
|
+
} else {
|
|
49
|
+
// Legacy v3/v4 path
|
|
50
|
+
const { encryptedContent, encryptionMeta, metadata } = extractContent(readResult, format);
|
|
51
|
+
validateMetadata(metadata);
|
|
52
|
+
|
|
53
|
+
const isEncrypted = metadata.encrypted || (encryptionMeta && encryptionMeta.length > 0);
|
|
54
|
+
const isCompressed = metadata.compressed || false;
|
|
55
|
+
|
|
56
|
+
spinner.succeed && spinner.succeed(`${format.toUpperCase()} file read successfully`);
|
|
57
|
+
|
|
58
|
+
if (!quiet) {
|
|
59
|
+
console.log(chalk.cyan(` Original file: ${metadata.originalFilename}`));
|
|
60
|
+
console.log(chalk.cyan(` Original size: ${formatBytes(metadata.originalSize)}`));
|
|
61
|
+
console.log(chalk.cyan(` Encrypted: ${isEncrypted ? 'Yes' : 'No'}`));
|
|
62
|
+
console.log(chalk.cyan(` Compressed: ${isCompressed ? 'Yes' : 'No'}`));
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (isEncrypted && !options.password) {
|
|
66
|
+
if (quiet || options.yes) {
|
|
67
|
+
throw new Error('Password is required for encrypted files. Use -p or --password to specify.');
|
|
68
|
+
}
|
|
69
|
+
options.password = await promptPassword();
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
let outputPath = resolveOutputPath(options, metadata);
|
|
73
|
+
|
|
74
|
+
if (fs.existsSync(outputPath) && !options.force) {
|
|
75
|
+
if (quiet || options.yes) {
|
|
76
|
+
throw new Error(`File already exists: ${outputPath}. Use --force to overwrite.`);
|
|
77
|
+
}
|
|
78
|
+
const shouldOverwrite = await promptOverwrite(outputPath);
|
|
79
|
+
if (!shouldOverwrite) {
|
|
80
|
+
console.log(chalk.yellow('Operation cancelled.'));
|
|
81
|
+
process.exit(0);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (isStreamingFormat(metadata) && format === 'xlsx') {
|
|
86
|
+
await decodeStreaming(inputFile, format, metadata, encryptedContent, encryptionMeta, isEncrypted, isCompressed, options, outputPath, spinner, quiet);
|
|
87
|
+
} else {
|
|
88
|
+
await decodeLegacy(inputFile, format, metadata, encryptedContent, encryptionMeta, isEncrypted, isCompressed, options, outputPath, spinner, quiet);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
} catch (error) {
|
|
92
|
+
spinner.fail && spinner.fail('Decoding failed');
|
|
93
|
+
console.error(chalk.red(`Error: ${error.message}`));
|
|
94
|
+
process.exit(1);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Resolve output path from options and metadata
|
|
100
|
+
*/
|
|
101
|
+
function resolveOutputPath(options, metadata) {
|
|
102
|
+
if (options.output) {
|
|
103
|
+
if (fs.existsSync(options.output) && fs.statSync(options.output).isDirectory()) {
|
|
104
|
+
return path.join(options.output, metadata.originalFilename);
|
|
105
|
+
} else if (!path.extname(options.output) && !fs.existsSync(options.output)) {
|
|
106
|
+
fs.mkdirSync(options.output, { recursive: true });
|
|
107
|
+
return path.join(options.output, metadata.originalFilename);
|
|
108
|
+
} else {
|
|
109
|
+
return options.output;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
return path.join(process.cwd(), metadata.originalFilename);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// ─── v5 Log-Embed Decode ────────────────────────────────────────────────────
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Decode a v5 log-embed XLSX file
|
|
119
|
+
*/
|
|
120
|
+
async function decodeV5(inputFile, format, firstReadResult, options, spinner, quiet) {
|
|
121
|
+
const metadata = firstReadResult.metadata;
|
|
122
|
+
validateMetadata(metadata);
|
|
123
|
+
|
|
124
|
+
const isEncrypted = metadata.encrypted || false;
|
|
125
|
+
const isCompressed = metadata.compressed || false;
|
|
126
|
+
const compressionAlgo = metadata.compressionAlgo || 'brotli';
|
|
127
|
+
|
|
128
|
+
spinner.succeed && spinner.succeed(`${format.toUpperCase()} file read (v5 log-embed format)`);
|
|
129
|
+
|
|
130
|
+
if (!quiet) {
|
|
131
|
+
console.log(chalk.cyan(` Original file: ${metadata.originalFilename}`));
|
|
132
|
+
console.log(chalk.cyan(` Original size: ${formatBytes(metadata.originalSize)}`));
|
|
133
|
+
console.log(chalk.cyan(` Encrypted: ${isEncrypted ? 'Yes' : 'No'}`));
|
|
134
|
+
console.log(chalk.cyan(` Compressed: ${isCompressed ? `Yes (${compressionAlgo})` : 'No'}`));
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
if (isEncrypted && !options.password) {
|
|
138
|
+
if (quiet || options.yes) {
|
|
139
|
+
throw new Error('Password is required for encrypted files. Use -p or --password to specify.');
|
|
140
|
+
}
|
|
141
|
+
options.password = await promptPassword();
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
let outputPath = resolveOutputPath(options, metadata);
|
|
145
|
+
|
|
146
|
+
if (fs.existsSync(outputPath) && !options.force) {
|
|
147
|
+
if (quiet || options.yes) {
|
|
148
|
+
throw new Error(`File already exists: ${outputPath}. Use --force to overwrite.`);
|
|
149
|
+
}
|
|
150
|
+
const shouldOverwrite = await promptOverwrite(outputPath);
|
|
151
|
+
if (!shouldOverwrite) {
|
|
152
|
+
console.log(chalk.yellow('Operation cancelled.'));
|
|
153
|
+
process.exit(0);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Ensure output directory exists
|
|
158
|
+
const outputDir = path.dirname(outputPath);
|
|
159
|
+
if (!fs.existsSync(outputDir)) {
|
|
160
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Set up output pipeline: [decompress] → hash → file
|
|
164
|
+
const hashStream = new HashPassthrough();
|
|
165
|
+
const outputStream = fs.createWriteStream(outputPath);
|
|
166
|
+
|
|
167
|
+
let decompressStream = null;
|
|
168
|
+
if (isCompressed) {
|
|
169
|
+
if (compressionAlgo === 'brotli') {
|
|
170
|
+
decompressStream = createBrotliDecompressStream();
|
|
171
|
+
} else {
|
|
172
|
+
decompressStream = createDecompressStream();
|
|
173
|
+
}
|
|
174
|
+
decompressStream.pipe(hashStream).pipe(outputStream);
|
|
175
|
+
} else {
|
|
176
|
+
hashStream.pipe(outputStream);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const writeTarget = isCompressed ? decompressStream : hashStream;
|
|
180
|
+
|
|
181
|
+
// Check for multi-part
|
|
182
|
+
const hasMultipleParts = isMultiPart(metadata) || metadata.partNumber !== null;
|
|
183
|
+
let totalPartsFound = 1;
|
|
184
|
+
|
|
185
|
+
if (hasMultipleParts) {
|
|
186
|
+
const inputDir = path.dirname(inputFile);
|
|
187
|
+
const allParts = findMultiPartFiles(inputDir, metadata.hash, format);
|
|
188
|
+
totalPartsFound = allParts.length;
|
|
189
|
+
|
|
190
|
+
if (metadata.totalParts !== null && totalPartsFound !== metadata.totalParts) {
|
|
191
|
+
throw new Error(
|
|
192
|
+
`Missing parts! Found ${totalPartsFound} of ${metadata.totalParts} parts. ` +
|
|
193
|
+
`Make sure all parts are in the same directory.`
|
|
194
|
+
);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
spinner.text = `Multi-part file detected (${totalPartsFound} parts)`;
|
|
198
|
+
spinner.succeed && spinner.succeed(`Found all ${totalPartsFound} parts`);
|
|
199
|
+
|
|
200
|
+
for (let i = 0; i < allParts.length; i++) {
|
|
201
|
+
const partSpinner = quiet ? spinner : ora(`Decoding part ${i + 1} of ${totalPartsFound}...`).start();
|
|
202
|
+
|
|
203
|
+
const partResult = await readFile(allParts[i].path, format);
|
|
204
|
+
|
|
205
|
+
let partPayload;
|
|
206
|
+
let partEncMeta;
|
|
207
|
+
|
|
208
|
+
if (partResult.formatVersion === 'v5') {
|
|
209
|
+
partPayload = partResult.payloadBuffer;
|
|
210
|
+
partEncMeta = partResult.encryptionMeta;
|
|
211
|
+
} else {
|
|
212
|
+
// Shouldn't happen for v5, but handle gracefully
|
|
213
|
+
const extracted = extractContent(partResult, format);
|
|
214
|
+
partPayload = Buffer.from(extracted.encryptedContent, 'base64');
|
|
215
|
+
partEncMeta = extracted.encryptionMeta;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if (isEncrypted) {
|
|
219
|
+
const { iv, salt, authTag } = unpackEncryptionMeta(partEncMeta);
|
|
220
|
+
const decipher = createDecryptStream(options.password, iv, salt, authTag);
|
|
221
|
+
try {
|
|
222
|
+
const decrypted = Buffer.concat([decipher.update(partPayload), decipher.final()]);
|
|
223
|
+
writeTarget.write(decrypted);
|
|
224
|
+
} catch (error) {
|
|
225
|
+
throw new Error('Decryption failed: Invalid password or corrupted data');
|
|
226
|
+
}
|
|
227
|
+
} else {
|
|
228
|
+
writeTarget.write(partPayload);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
partSpinner.succeed && partSpinner.succeed(`Part ${i + 1} decoded`);
|
|
232
|
+
}
|
|
233
|
+
} else {
|
|
234
|
+
// Single file
|
|
235
|
+
spinner.text = 'Decoding...';
|
|
236
|
+
|
|
237
|
+
const payloadBuffer = firstReadResult.payloadBuffer;
|
|
238
|
+
const encryptionMeta = firstReadResult.encryptionMeta;
|
|
239
|
+
|
|
240
|
+
if (isEncrypted) {
|
|
241
|
+
const { iv, salt, authTag } = unpackEncryptionMeta(encryptionMeta);
|
|
242
|
+
const decipher = createDecryptStream(options.password, iv, salt, authTag);
|
|
243
|
+
try {
|
|
244
|
+
const decrypted = Buffer.concat([decipher.update(payloadBuffer), decipher.final()]);
|
|
245
|
+
writeTarget.write(decrypted);
|
|
246
|
+
} catch (error) {
|
|
247
|
+
throw new Error('Decryption failed: Invalid password or corrupted data');
|
|
248
|
+
}
|
|
249
|
+
} else {
|
|
250
|
+
writeTarget.write(payloadBuffer);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// End pipeline and wait
|
|
255
|
+
writeTarget.end();
|
|
256
|
+
await finished(outputStream);
|
|
257
|
+
|
|
258
|
+
// Verify integrity
|
|
259
|
+
if (metadata.contentHash) {
|
|
260
|
+
spinner.text = 'Verifying integrity...';
|
|
261
|
+
const actualHash = hashStream.digest;
|
|
262
|
+
if (actualHash !== metadata.contentHash) {
|
|
263
|
+
try { fs.unlinkSync(outputPath); } catch { /* ignore */ }
|
|
264
|
+
throw new Error('Integrity check failed! The file may be corrupted or tampered with.');
|
|
265
|
+
}
|
|
266
|
+
spinner.succeed && spinner.succeed('Integrity verified (SHA-256 match)');
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
spinner.succeed && spinner.succeed('Decoding complete!');
|
|
270
|
+
|
|
271
|
+
if (!quiet) {
|
|
272
|
+
const outputSize = fs.statSync(outputPath).size;
|
|
273
|
+
console.log();
|
|
274
|
+
console.log(chalk.green.bold('✓ File decoded successfully!'));
|
|
275
|
+
console.log(chalk.cyan(` Original: ${metadata.originalFilename}`));
|
|
276
|
+
console.log(chalk.cyan(` Output: ${outputPath}`));
|
|
277
|
+
console.log(chalk.cyan(` Size: ${formatBytes(outputSize)}`));
|
|
278
|
+
if (hasMultipleParts) {
|
|
279
|
+
console.log(chalk.cyan(` Parts merged: ${totalPartsFound}`));
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// ─── v4 Streaming Decode ────────────────────────────────────────────────────
|
|
285
|
+
|
|
286
|
+
async function decodeStreaming(inputFile, format, metadata, encryptedContent, encryptionMeta, isEncrypted, isCompressed, options, outputPath, spinner, quiet) {
|
|
287
|
+
const outputDir = path.dirname(outputPath);
|
|
288
|
+
if (!fs.existsSync(outputDir)) {
|
|
289
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
const hashStream = new HashPassthrough();
|
|
293
|
+
const outputStream = fs.createWriteStream(outputPath);
|
|
294
|
+
|
|
295
|
+
let decompressStream = null;
|
|
296
|
+
if (isCompressed) {
|
|
297
|
+
decompressStream = createDecompressStream();
|
|
298
|
+
decompressStream.pipe(hashStream).pipe(outputStream);
|
|
299
|
+
} else {
|
|
300
|
+
hashStream.pipe(outputStream);
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
const writeTarget = isCompressed ? decompressStream : hashStream;
|
|
304
|
+
|
|
305
|
+
const hasMultipleParts = isMultiPart(metadata) || metadata.partNumber !== null;
|
|
306
|
+
let totalPartsFound = 1;
|
|
307
|
+
|
|
308
|
+
if (hasMultipleParts) {
|
|
309
|
+
const inputDir = path.dirname(inputFile);
|
|
310
|
+
const allParts = findMultiPartFiles(inputDir, metadata.hash, format);
|
|
311
|
+
totalPartsFound = allParts.length;
|
|
312
|
+
|
|
313
|
+
if (metadata.totalParts !== null && totalPartsFound !== metadata.totalParts) {
|
|
314
|
+
throw new Error(
|
|
315
|
+
`Missing parts! Found ${totalPartsFound} of ${metadata.totalParts} parts. ` +
|
|
316
|
+
`Make sure all parts are in the same directory.`
|
|
317
|
+
);
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
spinner.text = `Multi-part file detected (${totalPartsFound} parts)`;
|
|
321
|
+
spinner.succeed && spinner.succeed(`Found all ${totalPartsFound} parts`);
|
|
322
|
+
|
|
323
|
+
for (let i = 0; i < allParts.length; i++) {
|
|
324
|
+
const partSpinner = quiet ? spinner : ora(`Decoding part ${i + 1} of ${totalPartsFound}...`).start();
|
|
325
|
+
|
|
326
|
+
const partResult = await readFile(allParts[i].path, format);
|
|
327
|
+
const { encryptedContent: partContent, encryptionMeta: partEncMeta } = extractContent(partResult, format);
|
|
328
|
+
|
|
329
|
+
const binaryData = Buffer.from(partContent, 'base64');
|
|
330
|
+
|
|
331
|
+
if (isEncrypted) {
|
|
332
|
+
const { iv, salt, authTag } = unpackEncryptionMeta(partEncMeta);
|
|
333
|
+
const decipher = createDecryptStream(options.password, iv, salt, authTag);
|
|
334
|
+
try {
|
|
335
|
+
const decrypted = Buffer.concat([decipher.update(binaryData), decipher.final()]);
|
|
336
|
+
writeTarget.write(decrypted);
|
|
337
|
+
} catch (error) {
|
|
338
|
+
throw new Error('Decryption failed: Invalid password or corrupted data');
|
|
339
|
+
}
|
|
340
|
+
} else {
|
|
341
|
+
writeTarget.write(binaryData);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
partSpinner.succeed && partSpinner.succeed(`Part ${i + 1} decoded`);
|
|
345
|
+
}
|
|
346
|
+
} else {
|
|
347
|
+
spinner.text = 'Decoding...';
|
|
348
|
+
const binaryData = Buffer.from(encryptedContent, 'base64');
|
|
349
|
+
|
|
350
|
+
if (isEncrypted) {
|
|
351
|
+
const { iv, salt, authTag } = unpackEncryptionMeta(encryptionMeta);
|
|
352
|
+
const decipher = createDecryptStream(options.password, iv, salt, authTag);
|
|
353
|
+
try {
|
|
354
|
+
const decrypted = Buffer.concat([decipher.update(binaryData), decipher.final()]);
|
|
355
|
+
writeTarget.write(decrypted);
|
|
356
|
+
} catch (error) {
|
|
357
|
+
throw new Error('Decryption failed: Invalid password or corrupted data');
|
|
358
|
+
}
|
|
359
|
+
} else {
|
|
360
|
+
writeTarget.write(binaryData);
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
writeTarget.end();
|
|
365
|
+
await finished(outputStream);
|
|
366
|
+
|
|
367
|
+
if (metadata.contentHash) {
|
|
368
|
+
spinner.text = 'Verifying integrity...';
|
|
369
|
+
const actualHash = hashStream.digest;
|
|
370
|
+
if (actualHash !== metadata.contentHash) {
|
|
371
|
+
try { fs.unlinkSync(outputPath); } catch { /* ignore */ }
|
|
372
|
+
throw new Error('Integrity check failed! The file may be corrupted or tampered with.');
|
|
373
|
+
}
|
|
374
|
+
spinner.succeed && spinner.succeed('Integrity verified (SHA-256 match)');
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
spinner.succeed && spinner.succeed('Decoding complete!');
|
|
378
|
+
|
|
379
|
+
if (!quiet) {
|
|
380
|
+
const outputSize = fs.statSync(outputPath).size;
|
|
381
|
+
console.log();
|
|
382
|
+
console.log(chalk.green.bold('✓ File decoded successfully!'));
|
|
383
|
+
console.log(chalk.cyan(` Original: ${metadata.originalFilename}`));
|
|
384
|
+
console.log(chalk.cyan(` Output: ${outputPath}`));
|
|
385
|
+
console.log(chalk.cyan(` Size: ${formatBytes(outputSize)}`));
|
|
386
|
+
if (hasMultipleParts) {
|
|
387
|
+
console.log(chalk.cyan(` Parts merged: ${totalPartsFound}`));
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
// ─── v3 Legacy Decode ───────────────────────────────────────────────────────
|
|
393
|
+
|
|
394
|
+
async function decodeLegacy(inputFile, format, metadata, encryptedContent, encryptionMeta, isEncrypted, isCompressed, options, outputPath, spinner, quiet) {
|
|
395
|
+
let finalBase64;
|
|
396
|
+
|
|
397
|
+
if (isMultiPart(metadata)) {
|
|
398
|
+
spinner.start && (spinner.text = `Multi-part file detected (${metadata.totalParts} parts)`);
|
|
399
|
+
|
|
400
|
+
const inputDir = path.dirname(inputFile);
|
|
401
|
+
const allParts = findMultiPartFiles(inputDir, metadata.hash, format, metadata.totalParts);
|
|
402
|
+
|
|
403
|
+
if (allParts.length !== metadata.totalParts) {
|
|
404
|
+
throw new Error(
|
|
405
|
+
`Missing parts! Found ${allParts.length} of ${metadata.totalParts} parts. ` +
|
|
406
|
+
`Make sure all parts are in the same directory.`
|
|
407
|
+
);
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
spinner.succeed && spinner.succeed(`Found all ${metadata.totalParts} parts`);
|
|
411
|
+
|
|
412
|
+
const chunks = [];
|
|
413
|
+
for (let i = 0; i < allParts.length; i++) {
|
|
414
|
+
const partSpinner = quiet ? spinner : ora(`Reading part ${i + 1} of ${metadata.totalParts}...`).start();
|
|
415
|
+
const partResult = await readFile(allParts[i].path, format);
|
|
416
|
+
const { encryptedContent: partContent } = extractContent(partResult, format);
|
|
417
|
+
chunks.push(partContent);
|
|
418
|
+
partSpinner.succeed && partSpinner.succeed(`Part ${i + 1} read`);
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
spinner.text = 'Merging parts...';
|
|
422
|
+
const mergedContent = mergeBase64Chunks(chunks);
|
|
423
|
+
spinner.succeed && spinner.succeed('Parts merged successfully');
|
|
424
|
+
|
|
425
|
+
if (isEncrypted) {
|
|
426
|
+
spinner.text = 'Decrypting content...';
|
|
427
|
+
const { iv, salt, authTag } = unpackEncryptionMeta(encryptionMeta);
|
|
428
|
+
finalBase64 = decrypt(mergedContent, options.password, iv, salt, authTag);
|
|
429
|
+
spinner.succeed && spinner.succeed('Content decrypted');
|
|
430
|
+
} else {
|
|
431
|
+
finalBase64 = mergedContent;
|
|
432
|
+
}
|
|
433
|
+
} else {
|
|
434
|
+
if (isEncrypted) {
|
|
435
|
+
spinner.text = 'Decrypting content...';
|
|
436
|
+
const { iv, salt, authTag } = unpackEncryptionMeta(encryptionMeta);
|
|
437
|
+
finalBase64 = decrypt(encryptedContent, options.password, iv, salt, authTag);
|
|
438
|
+
spinner.succeed && spinner.succeed('Content decrypted');
|
|
439
|
+
} else {
|
|
440
|
+
finalBase64 = encryptedContent;
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
let fileBuffer;
|
|
445
|
+
if (isCompressed) {
|
|
446
|
+
spinner.text = 'Decompressing...';
|
|
447
|
+
const compressedBuffer = Buffer.from(finalBase64, 'base64');
|
|
448
|
+
fileBuffer = await decompress(compressedBuffer);
|
|
449
|
+
spinner.succeed && spinner.succeed(`Decompressed: ${formatBytes(compressedBuffer.length)} → ${formatBytes(fileBuffer.length)}`);
|
|
450
|
+
} else {
|
|
451
|
+
fileBuffer = Buffer.from(finalBase64, 'base64');
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
if (metadata.contentHash) {
|
|
455
|
+
spinner.text = 'Verifying integrity...';
|
|
456
|
+
const actualHash = generateContentHash(fileBuffer);
|
|
457
|
+
if (actualHash !== metadata.contentHash) {
|
|
458
|
+
throw new Error('Integrity check failed! The file may be corrupted or tampered with.');
|
|
459
|
+
}
|
|
460
|
+
spinner.succeed && spinner.succeed('Integrity verified (SHA-256 match)');
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
spinner.text = 'Writing output file...';
|
|
464
|
+
const outputDir = path.dirname(outputPath);
|
|
465
|
+
if (!fs.existsSync(outputDir)) {
|
|
466
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
fs.writeFileSync(outputPath, fileBuffer);
|
|
470
|
+
|
|
471
|
+
spinner.succeed && spinner.succeed('Decoding complete!');
|
|
472
|
+
|
|
473
|
+
if (!quiet) {
|
|
474
|
+
console.log();
|
|
475
|
+
console.log(chalk.green.bold('✓ File decoded successfully!'));
|
|
476
|
+
console.log(chalk.cyan(` Original: ${metadata.originalFilename}`));
|
|
477
|
+
console.log(chalk.cyan(` Output: ${outputPath}`));
|
|
478
|
+
console.log(chalk.cyan(` Size: ${formatBytes(fileBuffer.length)}`));
|
|
479
|
+
if (isMultiPart(metadata)) {
|
|
480
|
+
console.log(chalk.cyan(` Parts merged: ${metadata.totalParts}`));
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
module.exports = decodeCommand;
|