roxify 1.1.6 → 1.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -84,7 +84,7 @@ if (result.files) {
84
84
 
85
85
  ## Example: Progress Logging
86
86
 
87
- ```js
87
+ ````js
88
88
  import { encodeBinaryToPng, decodePngToBinary } from 'roxify';
89
89
 
90
90
  const data = Buffer.from('Large data to encode...');
@@ -100,13 +100,37 @@ const png = await encodeBinaryToPng(data, {
100
100
  },
101
101
  });
102
102
 
103
+
104
+ Node.js (detailed chunk progress example)
105
+
106
+ ```js
107
+ import { encodeBinaryToPng } from 'roxify';
108
+ import { readFileSync, writeFileSync } from 'fs';
109
+
110
+ const buf = readFileSync('test-data/testVideo.mp4');
111
+ const png = await encodeBinaryToPng(buf, {
112
+ showProgress: false,
113
+ onProgress(info) {
114
+ if (info.phase === 'compress_progress' && info.loaded && info.total) {
115
+ const percent = Math.round((info.loaded / info.total) * 100);
116
+ console.log(`[progress] ${info.phase} ${percent}% (${info.loaded}/${info.total} chunks)`);
117
+ } else {
118
+ console.log(`[progress] ${info.phase} ${info.loaded || ''}/${info.total || ''}`);
119
+ }
120
+ },
121
+ });
122
+
123
+ writeFileSync('out.png', png);
124
+ ````
125
+
103
126
  // Decode with progress logging
104
127
  const { buf } = await decodePngToBinary(png, {
105
- onProgress: (info) => {
106
- console.log(`Decoding phase: ${info.phase}`);
107
- },
128
+ onProgress: (info) => {
129
+ console.log(`Decoding phase: ${info.phase}`);
130
+ },
108
131
  });
109
- ```
132
+
133
+ ````
110
134
 
111
135
  **API:**
112
136
 
@@ -160,7 +184,7 @@ const result = await decodePngToBinary(png, { files: ['myfolder/file.txt'] });
160
184
  if (result.files) {
161
185
  fs.writeFileSync('extracted.txt', result.files[0].buf);
162
186
  }
163
- ```
187
+ ````
164
188
 
165
189
  ## Requirements
166
190
 
package/dist/cli.js CHANGED
@@ -4,7 +4,7 @@ import { mkdirSync, readFileSync, statSync, writeFileSync } from 'fs';
4
4
  import { basename, dirname, join, resolve } from 'path';
5
5
  import { DataFormatError, decodePngToBinary, encodeBinaryToPng, IncorrectPassphraseError, listFilesInPng, PassphraseRequiredError, } from './index.js';
6
6
  import { packPaths, unpackBuffer } from './pack.js';
7
- const VERSION = '1.1.5';
7
+ const VERSION = '1.1.7';
8
8
  function showHelp() {
9
9
  console.log(`
10
10
  ROX CLI — Encode/decode binary in PNG
@@ -133,11 +133,14 @@ async function encodeCommand(args) {
133
133
  process.exit(1);
134
134
  }
135
135
  const resolvedInputs = inputPaths.map((p) => resolve(p));
136
- const resolvedOutput = parsed.output ||
137
- outputPath ||
138
- (inputPaths.length === 1
139
- ? firstInput.replace(/(\.[^.]+)?$/, '.png')
140
- : 'archive.png');
136
+ let outputName = inputPaths.length === 1 ? basename(firstInput) : 'archive';
137
+ if (inputPaths.length === 1 && !statSync(resolvedInputs[0]).isDirectory()) {
138
+ outputName = outputName.replace(/(\.[^.]+)?$/, '.png');
139
+ }
140
+ else {
141
+ outputName += '.png';
142
+ }
143
+ const resolvedOutput = parsed.output || outputPath || outputName;
141
144
  let options = {};
142
145
  try {
143
146
  let inputBuffer;
@@ -174,13 +177,10 @@ async function encodeCommand(args) {
174
177
  }
175
178
  else {
176
179
  const resolvedInput = resolvedInputs[0];
177
- console.log(' ');
178
- console.log(`Reading: ${resolvedInput}`);
179
- console.log(' ');
180
180
  const st = statSync(resolvedInput);
181
181
  if (st.isDirectory()) {
182
182
  console.log(`Packing directory...`);
183
- const packResult = packPaths([resolvedInput]);
183
+ const packResult = packPaths([resolvedInput], resolvedInput);
184
184
  inputBuffer = packResult.buf;
185
185
  console.log(`Packed ${packResult.list.length} files -> ${(inputBuffer.length /
186
186
  1024 /
@@ -219,30 +219,59 @@ async function encodeCommand(args) {
219
219
  elapsed: '0',
220
220
  });
221
221
  const startEncode = Date.now();
222
+ let currentEncodePct = 0;
223
+ let currentEncodeStep = 'Starting';
224
+ const encodeHeartbeat = setInterval(() => {
225
+ encodeBar.update(Math.floor(currentEncodePct), {
226
+ step: currentEncodeStep,
227
+ elapsed: String(Math.floor((Date.now() - startEncode) / 1000)),
228
+ });
229
+ }, 1000);
222
230
  options.onProgress = (info) => {
223
231
  let pct = 0;
224
- if (info.phase === 'compress_progress') {
225
- pct = (info.loaded / info.total) * 50;
232
+ let stepLabel = 'Processing';
233
+ if (info.phase === 'compress_start') {
234
+ pct = 5;
235
+ stepLabel = 'Compressing';
236
+ }
237
+ else if (info.phase === 'compress_progress') {
238
+ pct = 5 + Math.floor((info.loaded / info.total) * 45);
239
+ stepLabel = 'Compressing';
226
240
  }
227
241
  else if (info.phase === 'compress_done') {
228
242
  pct = 50;
243
+ stepLabel = 'Compressed';
244
+ }
245
+ else if (info.phase === 'encrypt_start') {
246
+ pct = 60;
247
+ stepLabel = 'Encrypting';
229
248
  }
230
249
  else if (info.phase === 'encrypt_done') {
250
+ pct = 75;
251
+ stepLabel = 'Encrypted';
252
+ }
253
+ else if (info.phase === 'meta_prep_done') {
231
254
  pct = 80;
255
+ stepLabel = 'Preparing';
232
256
  }
233
257
  else if (info.phase === 'png_gen') {
234
258
  pct = 90;
259
+ stepLabel = 'Generating PNG';
235
260
  }
236
261
  else if (info.phase === 'done') {
237
262
  pct = 100;
263
+ stepLabel = 'Done';
238
264
  }
265
+ currentEncodePct = pct;
266
+ currentEncodeStep = stepLabel;
239
267
  encodeBar.update(Math.floor(pct), {
240
- step: info.phase.replace('_', ' '),
268
+ step: stepLabel,
241
269
  elapsed: String(Math.floor((Date.now() - startEncode) / 1000)),
242
270
  });
243
271
  };
244
272
  const output = await encodeBinaryToPng(inputBuffer, options);
245
273
  const encodeTime = Date.now() - startEncode;
274
+ clearInterval(encodeHeartbeat);
246
275
  encodeBar.update(100, {
247
276
  step: 'done',
248
277
  elapsed: String(Math.floor(encodeTime / 1000)),
@@ -277,10 +306,8 @@ async function decodeCommand(args) {
277
306
  process.exit(1);
278
307
  }
279
308
  const resolvedInput = resolve(inputPath);
309
+ const resolvedOutput = parsed.output || outputPath || 'decoded.bin';
280
310
  try {
281
- const inputBuffer = readFileSync(resolvedInput);
282
- console.log(' ');
283
- console.log(`Reading: ${resolvedInput}`);
284
311
  const options = {};
285
312
  if (parsed.passphrase) {
286
313
  options.passphrase = parsed.passphrase;
@@ -292,7 +319,6 @@ async function decodeCommand(args) {
292
319
  options.files = parsed.files;
293
320
  }
294
321
  console.log(' ');
295
- console.log(' ');
296
322
  console.log(`Decoding...`);
297
323
  console.log(' ');
298
324
  const decodeBar = new cliProgress.SingleBar({
@@ -303,17 +329,42 @@ async function decodeCommand(args) {
303
329
  elapsed: '0',
304
330
  });
305
331
  const startDecode = Date.now();
332
+ let currentPct = 50;
333
+ let currentStep = 'Decoding';
334
+ const heartbeat = setInterval(() => {
335
+ decodeBar.update(Math.floor(currentPct), {
336
+ step: currentStep,
337
+ elapsed: String(Math.floor((Date.now() - startDecode) / 1000)),
338
+ });
339
+ }, 1000);
306
340
  options.onProgress = (info) => {
307
- let pct = 50;
308
- if (info.phase === 'done')
309
- pct = 100;
310
- decodeBar.update(pct, {
311
- step: 'Decoding',
341
+ if (info.phase === 'decompress_start') {
342
+ currentPct = 50;
343
+ currentStep = 'Decompressing';
344
+ }
345
+ else if (info.phase === 'decompress_progress' &&
346
+ info.loaded &&
347
+ info.total) {
348
+ currentPct = 50 + Math.floor((info.loaded / info.total) * 40);
349
+ currentStep = `Decompressing (${info.loaded}/${info.total})`;
350
+ }
351
+ else if (info.phase === 'decompress_done') {
352
+ currentPct = 90;
353
+ currentStep = 'Decompressed';
354
+ }
355
+ else if (info.phase === 'done') {
356
+ currentPct = 100;
357
+ currentStep = 'Done';
358
+ }
359
+ decodeBar.update(Math.floor(currentPct), {
360
+ step: currentStep,
312
361
  elapsed: String(Math.floor((Date.now() - startDecode) / 1000)),
313
362
  });
314
363
  };
364
+ const inputBuffer = readFileSync(resolvedInput);
315
365
  const result = await decodePngToBinary(inputBuffer, options);
316
366
  const decodeTime = Date.now() - startDecode;
367
+ clearInterval(heartbeat);
317
368
  decodeBar.update(100, {
318
369
  step: 'done',
319
370
  elapsed: String(Math.floor(decodeTime / 1000)),
@@ -328,33 +379,43 @@ async function decodeCommand(args) {
328
379
  writeFileSync(fullPath, file.buf);
329
380
  }
330
381
  console.log(`\nSuccess!`);
331
- console.log(`Extracted ${result.files.length} files to ${baseDir === '.' ? 'current directory' : baseDir}`);
382
+ console.log(`Unpacked ${result.files.length} files to directory : ${resolve(baseDir)}`);
332
383
  console.log(`Time: ${decodeTime}ms`);
333
384
  }
334
385
  else if (result.buf) {
335
386
  const unpacked = unpackBuffer(result.buf);
336
387
  if (unpacked) {
337
- const baseDir = parsed.output || outputPath || result.meta?.name || '.';
388
+ const baseDir = parsed.output || outputPath || '.';
338
389
  for (const file of unpacked.files) {
339
390
  const fullPath = join(baseDir, file.path);
340
391
  const dir = dirname(fullPath);
341
392
  mkdirSync(dir, { recursive: true });
342
393
  writeFileSync(fullPath, file.buf);
343
394
  }
344
- console.log(`Unpacked ${unpacked.files.length} files to ${baseDir === '.' ? 'current directory' : baseDir}`);
395
+ console.log(`\nSuccess!`);
396
+ console.log(`Time: ${decodeTime}ms`);
397
+ console.log(`Unpacked ${unpacked.files.length} files to current directory`);
345
398
  }
346
399
  else {
347
- const resolvedOutput = parsed.output || outputPath || result.meta?.name || 'decoded.bin';
348
- writeFileSync(resolvedOutput, result.buf);
400
+ let finalOutput = resolvedOutput;
401
+ if (!parsed.output && !outputPath && result.meta?.name) {
402
+ finalOutput = result.meta.name;
403
+ }
404
+ writeFileSync(finalOutput, result.buf);
349
405
  console.log(`\nSuccess!`);
350
406
  if (result.meta?.name) {
351
407
  console.log(` Original name: ${result.meta.name}`);
352
408
  }
353
- console.log(` Output size: ${(result.buf.length / 1024 / 1024).toFixed(2)} MB`);
409
+ const outputSize = (result.buf.length / 1024 / 1024).toFixed(2);
410
+ console.log(` Output size: ${outputSize} MB`);
354
411
  console.log(` Time: ${decodeTime}ms`);
355
- console.log(` Saved: ${resolvedOutput}`);
412
+ console.log(` Saved: ${finalOutput}`);
356
413
  }
357
414
  }
415
+ else {
416
+ console.log(`\nSuccess!`);
417
+ console.log(`Time: ${decodeTime}ms`);
418
+ }
358
419
  console.log(' ');
359
420
  }
360
421
  catch (err) {
package/dist/index.d.ts CHANGED
@@ -127,6 +127,10 @@ export interface DecodeOptions {
127
127
  * Directory to save debug images (doubled.png, reconstructed.png).
128
128
  */
129
129
  debugDir?: string;
130
+ /**
131
+ * Path to write decoded output directly to disk (streamed) to avoid high memory usage.
132
+ */
133
+ outPath?: string;
130
134
  /**
131
135
  * List of files to extract selectively from archives.
132
136
  */
package/dist/index.js CHANGED
@@ -1,6 +1,8 @@
1
1
  import { compress as zstdCompress, decompress as zstdDecompress, } from '@mongodb-js/zstd';
2
2
  import cliProgress from 'cli-progress';
3
3
  import { createCipheriv, createDecipheriv, pbkdf2Sync, randomBytes, } from 'crypto';
4
+ import { createReadStream, createWriteStream, readFileSync, unlinkSync, } from 'fs';
5
+ import { tmpdir } from 'os';
4
6
  import { join } from 'path';
5
7
  import encode from 'png-chunks-encode';
6
8
  import extract from 'png-chunks-extract';
@@ -42,6 +44,15 @@ const MARKER_COLORS = [
42
44
  ];
43
45
  const MARKER_START = MARKER_COLORS;
44
46
  const MARKER_END = [...MARKER_COLORS].reverse();
47
+ const CHUNK_SIZE = 8 * 1024;
48
+ async function writeInChunks(ws, buf, chunkSize = CHUNK_SIZE) {
49
+ for (let i = 0; i < buf.length; i += chunkSize) {
50
+ const part = buf.slice(i, i + chunkSize);
51
+ if (!ws.write(part))
52
+ await new Promise((resolve) => ws.once('drain', resolve));
53
+ await new Promise((resolve) => setTimeout(resolve, 50));
54
+ }
55
+ }
45
56
  const COMPRESSION_MARKERS = {
46
57
  zstd: [{ r: 0, g: 255, b: 0 }],
47
58
  };
@@ -54,6 +65,144 @@ function colorsToBytes(colors) {
54
65
  }
55
66
  return buf;
56
67
  }
68
+ function deltaEncode(data) {
69
+ if (data.length === 0)
70
+ return data;
71
+ const out = Buffer.alloc(data.length);
72
+ out[0] = data[0];
73
+ for (let i = 1; i < data.length; i++) {
74
+ out[i] = (data[i] - data[i - 1] + 256) & 0xff;
75
+ }
76
+ return out;
77
+ }
78
+ function deltaDecode(data) {
79
+ if (data.length === 0)
80
+ return data;
81
+ const out = Buffer.alloc(data.length);
82
+ out[0] = data[0];
83
+ for (let i = 1; i < data.length; i++) {
84
+ out[i] = (out[i - 1] + data[i]) & 0xff;
85
+ }
86
+ return out;
87
+ }
88
+ async function parallelZstdCompress(payload, level = 22, onProgress) {
89
+ const chunkSize = 1024 * 1024 * 1024;
90
+ if (payload.length <= chunkSize) {
91
+ return Buffer.from(await zstdCompress(payload, level));
92
+ }
93
+ const chunks = [];
94
+ const promises = [];
95
+ const totalChunks = Math.ceil(payload.length / chunkSize);
96
+ let completedChunks = 0;
97
+ for (let i = 0; i < payload.length; i += chunkSize) {
98
+ const chunk = payload.slice(i, Math.min(i + chunkSize, payload.length));
99
+ promises.push(zstdCompress(chunk, level).then((compressed) => {
100
+ completedChunks++;
101
+ if (onProgress) {
102
+ onProgress(completedChunks, totalChunks);
103
+ }
104
+ return Buffer.from(compressed);
105
+ }));
106
+ }
107
+ const compressedChunks = await Promise.all(promises);
108
+ const chunkSizes = Buffer.alloc(compressedChunks.length * 4);
109
+ for (let i = 0; i < compressedChunks.length; i++) {
110
+ chunkSizes.writeUInt32BE(compressedChunks[i].length, i * 4);
111
+ }
112
+ const header = Buffer.alloc(8);
113
+ header.writeUInt32BE(0x5a535444, 0);
114
+ header.writeUInt32BE(compressedChunks.length, 4);
115
+ return Buffer.concat([header, chunkSizes, ...compressedChunks]);
116
+ }
117
+ async function parallelZstdDecompress(payload, onProgress, onChunk, outPath) {
118
+ if (payload.length < 8) {
119
+ onProgress?.({ phase: 'decompress_start', total: 1 });
120
+ const d = Buffer.from(await zstdDecompress(payload));
121
+ if (onChunk)
122
+ await onChunk(d, 0, 1);
123
+ onProgress?.({ phase: 'decompress_progress', loaded: 1, total: 1 });
124
+ onProgress?.({ phase: 'decompress_done', loaded: 1, total: 1 });
125
+ if (outPath) {
126
+ const ws = createWriteStream(outPath);
127
+ await writeInChunks(ws, d);
128
+ ws.end();
129
+ }
130
+ return onChunk ? Buffer.alloc(0) : d;
131
+ }
132
+ const magic = payload.readUInt32BE(0);
133
+ if (magic !== 0x5a535444) {
134
+ onProgress?.({ phase: 'decompress_start', total: 1 });
135
+ const d = Buffer.from(await zstdDecompress(payload));
136
+ onProgress?.({ phase: 'decompress_progress', loaded: 1, total: 1 });
137
+ onProgress?.({ phase: 'decompress_done', loaded: 1, total: 1 });
138
+ if (outPath) {
139
+ const ws = createWriteStream(outPath);
140
+ await writeInChunks(ws, d);
141
+ ws.end();
142
+ }
143
+ return d;
144
+ }
145
+ const numChunks = payload.readUInt32BE(4);
146
+ const chunkSizes = [];
147
+ let offset = 8;
148
+ for (let i = 0; i < numChunks; i++) {
149
+ chunkSizes.push(payload.readUInt32BE(offset));
150
+ offset += 4;
151
+ }
152
+ onProgress?.({ phase: 'decompress_start', total: numChunks });
153
+ const tempFiles = [];
154
+ for (let i = 0; i < numChunks; i++) {
155
+ const size = chunkSizes[i];
156
+ const chunk = payload.slice(offset, offset + size);
157
+ offset += size;
158
+ const tempFile = join(tmpdir(), `rox_chunk_${Date.now()}_${i}.tmp`);
159
+ const wsChunk = createWriteStream(tempFile);
160
+ const dec = Buffer.from(await zstdDecompress(chunk));
161
+ if (onChunk) {
162
+ await onChunk(dec, i + 1, numChunks);
163
+ unlinkSync(tempFile);
164
+ }
165
+ else {
166
+ await writeInChunks(wsChunk, dec);
167
+ await new Promise((res) => wsChunk.end(() => res()));
168
+ tempFiles.push(tempFile);
169
+ }
170
+ onProgress?.({
171
+ phase: 'decompress_progress',
172
+ loaded: i + 1,
173
+ total: numChunks,
174
+ });
175
+ }
176
+ onProgress?.({
177
+ phase: 'decompress_done',
178
+ loaded: numChunks,
179
+ total: numChunks,
180
+ });
181
+ if (onChunk) {
182
+ return Buffer.alloc(0);
183
+ }
184
+ if (outPath || tempFiles.length > 0) {
185
+ const finalPath = outPath || join(tmpdir(), `rox_final_${Date.now()}.tmp`);
186
+ const ws = createWriteStream(finalPath);
187
+ for (const tempFile of tempFiles) {
188
+ const rs = createReadStream(tempFile);
189
+ await new Promise((resolve, reject) => {
190
+ rs.on('data', (chunk) => ws.write(chunk));
191
+ rs.on('end', resolve);
192
+ rs.on('error', reject);
193
+ });
194
+ unlinkSync(tempFile);
195
+ }
196
+ await new Promise((res) => ws.end(() => res()));
197
+ if (!outPath) {
198
+ const finalBuf = readFileSync(finalPath);
199
+ unlinkSync(finalPath);
200
+ return finalBuf;
201
+ }
202
+ return Buffer.alloc(0);
203
+ }
204
+ return Buffer.alloc(0);
205
+ }
57
206
  function applyXor(buf, passphrase) {
58
207
  const key = Buffer.from(passphrase, 'utf8');
59
208
  const out = Buffer.alloc(buf.length);
@@ -65,14 +214,8 @@ function applyXor(buf, passphrase) {
65
214
  function tryBrotliDecompress(payload) {
66
215
  return Buffer.from(zlib.brotliDecompressSync(payload));
67
216
  }
68
- async function tryZstdDecompress(payload) {
69
- try {
70
- const result = await zstdDecompress(payload);
71
- return Buffer.from(result);
72
- }
73
- catch {
74
- return payload;
75
- }
217
+ async function tryZstdDecompress(payload, onProgress, onChunk, outPath) {
218
+ return await parallelZstdDecompress(payload, onProgress, onChunk, outPath);
76
219
  }
77
220
  function tryDecryptIfNeeded(buf, passphrase) {
78
221
  if (!buf || buf.length === 0)
@@ -434,20 +577,17 @@ export async function encodeBinaryToPng(input, opts = {}) {
434
577
  const compression = opts.compression || 'zstd';
435
578
  if (opts.onProgress)
436
579
  opts.onProgress({ phase: 'compress_start', total: payload.length });
437
- const compressedChunks = [];
438
- const chunkSize = 1024 * 1024;
439
- for (let i = 0; i < payload.length; i += chunkSize) {
440
- const chunk = payload.slice(i, Math.min(i + chunkSize, payload.length));
441
- const compressedChunk = Buffer.from(await zstdCompress(chunk, 22));
442
- compressedChunks.push(compressedChunk);
443
- if (opts.onProgress)
580
+ const useDelta = mode !== 'screenshot';
581
+ const deltaEncoded = useDelta ? deltaEncode(payload) : payload;
582
+ payload = await parallelZstdCompress(deltaEncoded, 1, (loaded, total) => {
583
+ if (opts.onProgress) {
444
584
  opts.onProgress({
445
585
  phase: 'compress_progress',
446
- loaded: i + chunk.length,
447
- total: payload.length,
586
+ loaded,
587
+ total,
448
588
  });
449
- }
450
- payload = Buffer.concat(compressedChunks);
589
+ }
590
+ });
451
591
  if (opts.onProgress)
452
592
  opts.onProgress({ phase: 'compress_done', loaded: payload.length });
453
593
  if (opts.passphrase && !opts.encrypt) {
@@ -480,6 +620,8 @@ export async function encodeBinaryToPng(input, opts = {}) {
480
620
  const enc = Buffer.concat([cipher.update(payload), cipher.final()]);
481
621
  const tag = cipher.getAuthTag();
482
622
  payload = Buffer.concat([Buffer.from([ENC_AES]), salt, iv, tag, enc]);
623
+ if (opts.onProgress)
624
+ opts.onProgress({ phase: 'encrypt_done' });
483
625
  }
484
626
  else if (encChoice === 'xor') {
485
627
  const xored = applyXor(payload, opts.passphrase);
@@ -520,7 +662,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
520
662
  const nameLen = nameBuf.length;
521
663
  const payloadLenBuf = Buffer.alloc(4);
522
664
  payloadLenBuf.writeUInt32BE(payload.length, 0);
523
- const version = 2;
665
+ const version = 1;
524
666
  const metaPixel = Buffer.concat([
525
667
  Buffer.from([version]),
526
668
  Buffer.from([nameLen]),
@@ -542,15 +684,21 @@ export async function encodeBinaryToPng(input, opts = {}) {
542
684
  ]);
543
685
  const bytesPerPixel = 3;
544
686
  const dataPixels = Math.ceil(dataWithMarkers.length / 3);
545
- let logicalWidth = Math.ceil(Math.sqrt(dataPixels));
546
- if (logicalWidth < MARKER_END.length) {
547
- logicalWidth = MARKER_END.length;
548
- }
549
- const dataRows = Math.ceil(dataPixels / logicalWidth);
550
- const pixelsInLastRow = dataPixels % logicalWidth;
551
- const spaceInLastRow = pixelsInLastRow === 0 ? logicalWidth : logicalWidth - pixelsInLastRow;
552
- const needsExtraRow = spaceInLastRow < MARKER_END.length;
553
- const logicalHeight = needsExtraRow ? dataRows + 1 : dataRows;
687
+ const totalPixels = dataPixels + MARKER_END.length;
688
+ const maxWidth = 16384;
689
+ let side = Math.ceil(Math.sqrt(totalPixels));
690
+ if (side < MARKER_END.length)
691
+ side = MARKER_END.length;
692
+ let logicalWidth;
693
+ let logicalHeight;
694
+ if (side <= maxWidth) {
695
+ logicalWidth = side;
696
+ logicalHeight = side;
697
+ }
698
+ else {
699
+ logicalWidth = Math.min(maxWidth, totalPixels);
700
+ logicalHeight = Math.ceil(totalPixels / logicalWidth);
701
+ }
554
702
  const scale = 1;
555
703
  const width = logicalWidth * scale;
556
704
  const height = logicalHeight * scale;
@@ -566,8 +714,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
566
714
  g = MARKER_END[markerIdx].g;
567
715
  b = MARKER_END[markerIdx].b;
568
716
  }
569
- else if (ly < dataRows ||
570
- (ly === dataRows && linearIdx < dataPixels)) {
717
+ else if (linearIdx < dataPixels) {
571
718
  const srcIdx = linearIdx * 3;
572
719
  r = srcIdx < dataWithMarkers.length ? dataWithMarkers[srcIdx] : 0;
573
720
  g =
@@ -593,7 +740,9 @@ export async function encodeBinaryToPng(input, opts = {}) {
593
740
  }
594
741
  if (opts.onProgress)
595
742
  opts.onProgress({ phase: 'png_gen' });
596
- let bufScr = await sharp(raw, { raw: { width, height, channels: 3 } })
743
+ let bufScr = await sharp(raw, {
744
+ raw: { width, height, channels: 3 },
745
+ })
597
746
  .png({
598
747
  compressionLevel: 9,
599
748
  palette: false,
@@ -633,9 +782,10 @@ export async function encodeBinaryToPng(input, opts = {}) {
633
782
  const full = Buffer.concat([PIXEL_MAGIC, metaPixel]);
634
783
  const bytesPerPixel = 3;
635
784
  const nPixels = Math.ceil((full.length + 8) / 3);
636
- const side = Math.ceil(Math.sqrt(nPixels));
637
- const width = Math.max(1, Math.min(side, 65535));
638
- const height = Math.ceil(nPixels / width);
785
+ const desiredSide = Math.ceil(Math.sqrt(nPixels));
786
+ const sideClamped = Math.max(1, Math.min(desiredSide, 65535));
787
+ const width = sideClamped;
788
+ const height = sideClamped === desiredSide ? sideClamped : Math.ceil(nPixels / width);
639
789
  const dimHeader = Buffer.alloc(8);
640
790
  dimHeader.writeUInt32BE(width, 0);
641
791
  dimHeader.writeUInt32BE(height, 4);
@@ -771,15 +921,28 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
771
921
  try {
772
922
  const info = await sharp(pngBuf).metadata();
773
923
  if (info.width && info.height) {
774
- const doubledBuffer = await sharp(pngBuf)
775
- .resize({
776
- width: info.width * 2,
777
- height: info.height * 2,
778
- kernel: 'nearest',
779
- })
780
- .png()
781
- .toBuffer();
782
- processedBuf = await cropAndReconstitute(doubledBuffer, opts.debugDir);
924
+ const MAX_RAW_BYTES = 150 * 1024 * 1024;
925
+ const rawBytesEstimate = info.width * info.height * 4;
926
+ if (rawBytesEstimate > MAX_RAW_BYTES) {
927
+ throw new DataFormatError(`Image too large to decode in-process (${Math.round(rawBytesEstimate / 1024 / 1024)} MB). Increase Node heap or use a smaller image/compact mode.`);
928
+ }
929
+ const MAX_DOUBLE_BYTES = 200 * 1024 * 1024;
930
+ const doubledPixels = info.width * 2 * (info.height * 2);
931
+ const doubledBytesEstimate = doubledPixels * 4;
932
+ if (false) {
933
+ const doubledBuffer = await sharp(pngBuf)
934
+ .resize({
935
+ width: info.width * 2,
936
+ height: info.height * 2,
937
+ kernel: 'nearest',
938
+ })
939
+ .png()
940
+ .toBuffer();
941
+ processedBuf = await cropAndReconstitute(doubledBuffer, opts.debugDir);
942
+ }
943
+ else {
944
+ processedBuf = pngBuf;
945
+ }
783
946
  }
784
947
  }
785
948
  catch (e) { }
@@ -796,16 +959,54 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
796
959
  }
797
960
  const rawPayload = d.slice(idx);
798
961
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
962
+ if (opts.outPath) {
963
+ const ws = createWriteStream(opts.outPath, { highWaterMark: 64 * 1024 });
964
+ let headerBuf = Buffer.alloc(0);
965
+ let headerSkipped = false;
966
+ await tryZstdDecompress(payload, (info) => {
967
+ if (opts.onProgress)
968
+ opts.onProgress(info);
969
+ }, async (decChunk) => {
970
+ if (!headerSkipped) {
971
+ if (decChunk.length < MAGIC.length) {
972
+ headerBuf = Buffer.concat([headerBuf, decChunk]);
973
+ return;
974
+ }
975
+ const mag = decChunk.slice(0, MAGIC.length);
976
+ if (!mag.equals(MAGIC)) {
977
+ ws.close();
978
+ throw new Error('Invalid ROX format (ROX direct: missing ROX1 magic after decompression)');
979
+ }
980
+ const toWriteBuf = decChunk.slice(MAGIC.length);
981
+ if (toWriteBuf.length > 0) {
982
+ await writeInChunks(ws, toWriteBuf, 16 * 1024);
983
+ }
984
+ headerBuf = Buffer.alloc(0);
985
+ headerSkipped = true;
986
+ }
987
+ else {
988
+ await writeInChunks(ws, decChunk, 64 * 1024);
989
+ }
990
+ });
991
+ await new Promise((res) => ws.end(() => res()));
992
+ if (opts.onProgress)
993
+ opts.onProgress({ phase: 'done' });
994
+ progressBar?.stop();
995
+ return { meta: { name } };
996
+ }
799
997
  if (opts.onProgress)
800
- opts.onProgress({ phase: 'decompress' });
998
+ opts.onProgress({ phase: 'decompress_start' });
801
999
  try {
802
- payload = await tryZstdDecompress(payload);
1000
+ payload = await tryZstdDecompress(payload, (info) => {
1001
+ if (opts.onProgress)
1002
+ opts.onProgress(info);
1003
+ });
803
1004
  }
804
1005
  catch (e) {
805
1006
  const errMsg = e instanceof Error ? e.message : String(e);
806
1007
  if (opts.passphrase)
807
- throw new Error('Incorrect passphrase (ROX format, zstd failed: ' + errMsg + ')');
808
- throw new Error('ROX format zstd decompression failed: ' + errMsg);
1008
+ throw new IncorrectPassphraseError('Incorrect passphrase (compact mode, zstd failed: ' + errMsg + ')');
1009
+ throw new DataFormatError('Compact mode zstd decompression failed: ' + errMsg);
809
1010
  }
810
1011
  if (!payload.slice(0, MAGIC.length).equals(MAGIC)) {
811
1012
  throw new Error('Invalid ROX format (ROX direct: missing ROX1 magic after decompression)');
@@ -855,10 +1056,48 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
855
1056
  if (rawPayload.length === 0)
856
1057
  throw new DataFormatError('Compact mode payload empty');
857
1058
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
1059
+ if (opts.outPath) {
1060
+ const ws = createWriteStream(opts.outPath, { highWaterMark: 64 * 1024 });
1061
+ let headerBuf = Buffer.alloc(0);
1062
+ let headerSkipped = false;
1063
+ await tryZstdDecompress(payload, (info) => {
1064
+ if (opts.onProgress)
1065
+ opts.onProgress(info);
1066
+ }, async (decChunk) => {
1067
+ if (!headerSkipped) {
1068
+ if (decChunk.length < MAGIC.length) {
1069
+ headerBuf = Buffer.concat([headerBuf, decChunk]);
1070
+ return;
1071
+ }
1072
+ const mag = decChunk.slice(0, MAGIC.length);
1073
+ if (!mag.equals(MAGIC)) {
1074
+ ws.close();
1075
+ throw new DataFormatError('Invalid ROX format (compact mode: missing ROX1 magic after decompression)');
1076
+ }
1077
+ const toWriteBuf = decChunk.slice(MAGIC.length);
1078
+ if (toWriteBuf.length > 0) {
1079
+ await writeInChunks(ws, toWriteBuf);
1080
+ }
1081
+ headerBuf = Buffer.alloc(0);
1082
+ headerSkipped = true;
1083
+ }
1084
+ else {
1085
+ await writeInChunks(ws, decChunk, 64 * 1024);
1086
+ }
1087
+ });
1088
+ await new Promise((res) => ws.end(() => res()));
1089
+ if (opts.onProgress)
1090
+ opts.onProgress({ phase: 'done' });
1091
+ progressBar?.stop();
1092
+ return { meta: { name } };
1093
+ }
858
1094
  if (opts.onProgress)
859
- opts.onProgress({ phase: 'decompress' });
1095
+ opts.onProgress({ phase: 'decompress_start' });
860
1096
  try {
861
- payload = await tryZstdDecompress(payload);
1097
+ payload = await tryZstdDecompress(payload, (info) => {
1098
+ if (opts.onProgress)
1099
+ opts.onProgress(info);
1100
+ });
862
1101
  }
863
1102
  catch (e) {
864
1103
  const errMsg = e instanceof Error ? e.message : String(e);
@@ -936,7 +1175,7 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
936
1175
  logicalData = rawRGB;
937
1176
  }
938
1177
  else {
939
- const reconstructed = await cropAndReconstitute(data, opts.debugDir);
1178
+ const reconstructed = await cropAndReconstitute(processedBuf, opts.debugDir);
940
1179
  const { data: rdata, info: rinfo } = await sharp(reconstructed)
941
1180
  .ensureAlpha()
942
1181
  .raw()
@@ -982,7 +1221,26 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
982
1221
  const rawPayload = logicalData.slice(idx, idx + payloadLen);
983
1222
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
984
1223
  try {
985
- payload = await tryZstdDecompress(payload);
1224
+ if (opts.outPath) {
1225
+ await tryZstdDecompress(payload, (info) => {
1226
+ if (opts.onProgress)
1227
+ opts.onProgress(info);
1228
+ }, undefined, opts.outPath);
1229
+ }
1230
+ else {
1231
+ payload = await tryZstdDecompress(payload, (info) => {
1232
+ if (opts.onProgress)
1233
+ opts.onProgress(info);
1234
+ });
1235
+ }
1236
+ if (version === 3) {
1237
+ if (opts.outPath) {
1238
+ throw new Error('outPath not supported with delta encoding yet');
1239
+ }
1240
+ else {
1241
+ payload = deltaDecode(payload);
1242
+ }
1243
+ }
986
1244
  }
987
1245
  catch (e) { }
988
1246
  if (!payload.slice(0, MAGIC.length).equals(MAGIC)) {
@@ -1211,7 +1469,70 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
1211
1469
  const rawPayload = pixelBytes.slice(idx, idx + payloadLen);
1212
1470
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
1213
1471
  try {
1214
- payload = await tryZstdDecompress(payload);
1472
+ if (opts.outPath) {
1473
+ const ws = createWriteStream(opts.outPath, {
1474
+ highWaterMark: 64 * 1024,
1475
+ });
1476
+ let headerBuf = Buffer.alloc(0);
1477
+ let headerSkipped = false;
1478
+ let lastOutByte = null;
1479
+ await tryZstdDecompress(payload, (info) => {
1480
+ if (opts.onProgress)
1481
+ opts.onProgress(info);
1482
+ }, async (decChunk, idxChunk, totalChunks) => {
1483
+ let outChunk = decChunk;
1484
+ if (version === 3) {
1485
+ const out = Buffer.alloc(decChunk.length);
1486
+ for (let i = 0; i < decChunk.length; i++) {
1487
+ if (i === 0) {
1488
+ out[0] =
1489
+ typeof lastOutByte === 'number'
1490
+ ? (lastOutByte + decChunk[0]) & 0xff
1491
+ : decChunk[0];
1492
+ }
1493
+ else {
1494
+ out[i] = (out[i - 1] + decChunk[i]) & 0xff;
1495
+ }
1496
+ }
1497
+ lastOutByte = out[out.length - 1];
1498
+ outChunk = out;
1499
+ }
1500
+ if (!headerSkipped) {
1501
+ if (outChunk.length < MAGIC.length) {
1502
+ headerBuf = Buffer.concat([headerBuf, outChunk]);
1503
+ return;
1504
+ }
1505
+ const mag = outChunk.slice(0, MAGIC.length);
1506
+ if (!mag.equals(MAGIC)) {
1507
+ ws.close();
1508
+ throw new DataFormatError('Invalid ROX format (pixel mode: missing ROX1 magic after decompression)');
1509
+ }
1510
+ const toWriteBuf = outChunk.slice(MAGIC.length);
1511
+ if (toWriteBuf.length > 0) {
1512
+ await writeInChunks(ws, toWriteBuf, 64 * 1024);
1513
+ }
1514
+ headerBuf = Buffer.alloc(0);
1515
+ headerSkipped = true;
1516
+ }
1517
+ else {
1518
+ await writeInChunks(ws, outChunk, 64 * 1024);
1519
+ }
1520
+ });
1521
+ await new Promise((res, rej) => ws.end(() => res()));
1522
+ if (opts.onProgress)
1523
+ opts.onProgress({ phase: 'done' });
1524
+ progressBar?.stop();
1525
+ return { meta: { name } };
1526
+ }
1527
+ else {
1528
+ payload = await tryZstdDecompress(payload, (info) => {
1529
+ if (opts.onProgress)
1530
+ opts.onProgress(info);
1531
+ });
1532
+ if (version === 3) {
1533
+ payload = deltaDecode(payload);
1534
+ }
1535
+ }
1215
1536
  }
1216
1537
  catch (e) {
1217
1538
  const errMsg = e instanceof Error ? e.message : String(e);
package/dist/pack.d.ts CHANGED
@@ -14,3 +14,13 @@ export declare function unpackBuffer(buf: Buffer, fileList?: string[]): {
14
14
  buf: Buffer;
15
15
  }[];
16
16
  } | null;
17
+ /**
18
+ * Stream-unpack a packed buffer file on disk into a directory without
19
+ * loading the whole archive into memory.
20
+ */
21
+ export declare function unpackFileToDir(filePath: string, baseDir: string, fileList?: string[]): {
22
+ files: {
23
+ path: string;
24
+ size: number;
25
+ }[];
26
+ };
package/dist/pack.js CHANGED
@@ -1,5 +1,5 @@
1
- import { readFileSync, readdirSync, statSync } from 'fs';
2
- import { join, relative, resolve, sep } from 'path';
1
+ import { closeSync, createWriteStream, mkdirSync, openSync, readFileSync, readSync, readdirSync, statSync, } from 'fs';
2
+ import { dirname, join, relative, resolve, sep } from 'path';
3
3
  function collectFiles(paths) {
4
4
  const files = [];
5
5
  for (const p of paths) {
@@ -83,3 +83,60 @@ export function unpackBuffer(buf, fileList) {
83
83
  }
84
84
  return { files };
85
85
  }
86
+ /**
87
+ * Stream-unpack a packed buffer file on disk into a directory without
88
+ * loading the whole archive into memory.
89
+ */
90
+ export function unpackFileToDir(filePath, baseDir, fileList) {
91
+ const fd = openSync(filePath, 'r');
92
+ try {
93
+ const headerBuf = Buffer.alloc(8);
94
+ let bytes = readSync(fd, headerBuf, 0, 8, 0);
95
+ if (bytes < 8)
96
+ throw new Error('Invalid archive');
97
+ if (headerBuf.readUInt32BE(0) !== 0x524f5850)
98
+ throw new Error('Not a pack archive');
99
+ const fileCount = headerBuf.readUInt32BE(4);
100
+ let offset = 8;
101
+ const files = [];
102
+ for (let i = 0; i < fileCount; i++) {
103
+ const nameLenBuf = Buffer.alloc(2);
104
+ readSync(fd, nameLenBuf, 0, 2, offset);
105
+ const nameLen = nameLenBuf.readUInt16BE(0);
106
+ offset += 2;
107
+ const nameBuf = Buffer.alloc(nameLen);
108
+ readSync(fd, nameBuf, 0, nameLen, offset);
109
+ const name = nameBuf.toString('utf8');
110
+ offset += nameLen;
111
+ const sizeBuf = Buffer.alloc(8);
112
+ readSync(fd, sizeBuf, 0, 8, offset);
113
+ const size = Number(sizeBuf.readBigUInt64BE(0));
114
+ offset += 8;
115
+ if (fileList && !fileList.includes(name)) {
116
+ offset += size;
117
+ continue;
118
+ }
119
+ const outPath = join(baseDir, name);
120
+ const outDir = dirname(outPath);
121
+ mkdirSync(outDir, { recursive: true });
122
+ const ws = createWriteStream(outPath);
123
+ let remaining = size;
124
+ const chunk = Buffer.alloc(64 * 1024);
125
+ while (remaining > 0) {
126
+ const toRead = Math.min(remaining, chunk.length);
127
+ const bytesRead = readSync(fd, chunk, 0, toRead, offset);
128
+ if (bytesRead <= 0)
129
+ break;
130
+ ws.write(chunk.slice(0, bytesRead));
131
+ offset += bytesRead;
132
+ remaining -= bytesRead;
133
+ }
134
+ ws.end();
135
+ files.push({ path: name, size });
136
+ }
137
+ return { files };
138
+ }
139
+ finally {
140
+ closeSync(fd);
141
+ }
142
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "roxify",
3
- "version": "1.1.6",
3
+ "version": "1.1.7",
4
4
  "description": "Encode binary data into PNG images with Zstd compression and decode them back. Supports CLI and programmatic API (Node.js ESM).",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -16,7 +16,7 @@
16
16
  "build": "tsc",
17
17
  "check-publish": "node ../scripts/check-publish.js roxify",
18
18
  "cli": "node dist/cli.js",
19
- "test": "node test/pack.test.js"
19
+ "test": "npm run build && node test/pack.test.js && node test/screenshot.test.js"
20
20
  },
21
21
  "keywords": [
22
22
  "steganography",