roxify 1.1.6 → 1.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -28,8 +28,8 @@ npm install roxify
28
28
 
29
29
  ```bash
30
30
  npx rox encode <inputName>.ext (<outputName>.png)
31
-
32
31
  npx rox decode <inputName>.png (<outputName>.ext)
32
+ npx rox list <inputName>.png
33
33
  ```
34
34
 
35
35
  If no output name is provided:
@@ -37,129 +37,70 @@ If no output name is provided:
37
37
  - Encoding: output defaults to `<inputName>.png`.
38
38
  - Decoding: if the image contains the original filename it will be restored; otherwise the output will be `decoded.bin`.
39
39
 
40
- **Commands:**
41
-
42
- - `encode <input>... [output]` — Encode file(s)/directory to PNG
43
- - `decode <input> [output]` — Decode PNG to file(s)
44
- - `list <input>` — List files in archive without decoding
45
-
46
40
  **Options:**
47
41
 
48
42
  - `-p, --passphrase <pass>` — Encrypt with AES-256-GCM
49
- - `-m, --mode <mode>` — Encoding mode: `screenshot` (default), `pixel`, `compact`, `chunk`
50
- - `-q, --quality <0-22>` — Roxify compression level (default: 22)
51
- - `--no-compress` — Disable compression
52
- - `--files <list>` — Extract only specified files (comma-separated, for archives)
53
43
  - `-v, --verbose` — Show detailed errors
54
44
 
55
45
  Run `npx rox help` for full options.
56
46
 
57
47
  ## API Usage
58
48
 
49
+ ### Basic Encoding and Decoding
50
+
59
51
  ```js
60
- import { encodeBinaryToPng, decodePngToBinary, listFilesInPng } from 'roxify';
52
+ import { readFileSync, writeFileSync } from 'fs';
53
+ import { encodeBinaryToPng } from 'roxify';
61
54
 
62
- // Encode a file
63
- const data = Buffer.from('Hello world');
64
- const png = await encodeBinaryToPng(data, {
65
- mode: 'screenshot',
66
- name: 'message.txt',
55
+ const fileName = 'input.bin';
56
+ const inputBuffer = readFileSync(fileName);
57
+ const pngBuffer = await encodeBinaryToPng(inputBuffer, {
58
+ name: fileName,
67
59
  });
60
+ writeFileSync('output.png', pngBuffer);
61
+ ```
68
62
 
69
- // Decode
70
- const { buf, meta } = await decodePngToBinary(png);
71
- console.log(buf.toString('utf8'));
72
- console.log(meta?.name);
73
-
74
- // List files in archive
75
- const files = listFilesInPng(png);
76
- console.log(files);
63
+ ```js
64
+ import { readFileSync, writeFileSync } from 'fs';
65
+ import { decodePngToBinary } from 'roxify';
77
66
 
78
- // Selective extraction
79
- const result = await decodePngToBinary(png, { files: ['file1.txt'] });
80
- if (result.files) {
81
- // result.files contains only the selected files
82
- }
67
+ const pngFromDisk = readFileSync('output.png');
68
+ const { buf, meta } = await decodePngToBinary(pngFromDisk);
69
+ writeFileSync(meta?.name ?? 'decoded.txt', buf);
83
70
  ```
84
71
 
85
- ## Example: Progress Logging
72
+ ### With Passphrase
86
73
 
87
74
  ```js
88
- import { encodeBinaryToPng, decodePngToBinary } from 'roxify';
89
-
90
- const data = Buffer.from('Large data to encode...');
75
+ const pngBuffer = await encodeBinaryToPng(inputBuffer, {
76
+ name: fileName,
77
+ passphrase: 'mysecret',
78
+ });
79
+ ```
91
80
 
92
- // Encode with progress logging
93
- const png = await encodeBinaryToPng(data, {
94
- onProgress: (info) => {
95
- console.log(`Encoding phase: ${info.phase}`);
96
- if (info.loaded && info.total) {
97
- const percent = Math.round((info.loaded / info.total) * 100);
98
- console.log(`Progress: ${percent}% (${info.loaded}/${info.total} bytes)`);
99
- }
100
- },
81
+ ```js
82
+ const { buf, meta } = await decodePngToBinary(pngFromDisk, {
83
+ passphrase: 'mysecret',
101
84
  });
85
+ ```
86
+
87
+ ### With Progress Logging
102
88
 
103
- // Decode with progress logging
104
- const { buf } = await decodePngToBinary(png, {
89
+ ```js
90
+ const pngBuffer = await encodeBinaryToPng(inputBuffer, {
91
+ name: fileName,
105
92
  onProgress: (info) => {
106
- console.log(`Decoding phase: ${info.phase}`);
93
+ console.log(`Phase: ${info.phase}, Loaded: ${info.loaded}/${info.total}`);
107
94
  },
108
95
  });
109
96
  ```
110
97
 
111
- **API:**
112
-
113
- - `encodeBinaryToPng(input: Buffer, opts?: EncodeOptions): Promise<Buffer>`
114
- - `decodePngToBinary(pngBuf: Buffer, opts?: DecodeOptions): Promise<DecodeResult>`
115
- - `listFilesInPng(pngBuf: Buffer): string[] | null`
116
-
117
- **EncodeOptions:**
118
-
119
- - `mode` — `'screenshot'` | `'pixel'` | `'compact'` | `'chunk'` (default: `'screenshot'`)
120
- - `name` — Original filename (embedded as metadata)
121
- - `passphrase` — Encryption passphrase (uses AES-256-GCM)
122
- - `compression` — `'Roxify'` | `'none'` (default: `'Roxify'`)
123
- - `brQuality` — Roxify compression level 0-22 (default: 22)
124
- - `showProgress` — Display progress bar (default: `false`)
125
- - `onProgress` — Callback for progress updates: `(info: { phase: string; loaded?: number; total?: number }) => void`
126
- - `includeFileList` — Include file list for archives (default: `true` for directories)
127
-
128
- **DecodeOptions:**
129
-
130
- - `passphrase` — Decryption passphrase
131
- - `files` — List of files to extract selectively (for archives)
132
- - `showProgress` — Display progress bar (default: `false`)
133
- - `onProgress` — Callback for progress updates: `(info: { phase: string; loaded?: number; total?: number }) => void`
134
-
135
- **DecodeResult:**
136
-
137
- - `buf?: Buffer` — Decoded data (if not selective extraction)
138
- - `files?: PackedFile[]` — Extracted files (if selective extraction)
139
- - `meta?: { name?: string }` — Metadata
140
-
141
- ## Example: Archive with Selective Extraction
142
-
143
98
  ```js
144
- import { encodeBinaryToPng, decodePngToBinary, listFilesInPng } from 'roxify';
145
-
146
- // Pack a directory
147
- const fs = require('fs');
148
- const dirData = packPaths(['myfolder']); // From pack.js
149
- const png = await encodeBinaryToPng(dirData.buf, {
150
- includeFileList: true,
151
- fileList: dirData.list,
99
+ const { buf, meta } = await decodePngToBinary(pngFromDisk, {
100
+ onProgress: (info) => {
101
+ console.log(`Phase: ${info.phase}, Loaded: ${info.loaded}/${info.total}`);
102
+ },
152
103
  });
153
-
154
- // List files without decoding
155
- const files = listFilesInPng(png);
156
- console.log('Files:', files);
157
-
158
- // Extract only one file
159
- const result = await decodePngToBinary(png, { files: ['myfolder/file.txt'] });
160
- if (result.files) {
161
- fs.writeFileSync('extracted.txt', result.files[0].buf);
162
- }
163
104
  ```
164
105
 
165
106
  ## Requirements
package/dist/cli.js CHANGED
@@ -4,7 +4,7 @@ import { mkdirSync, readFileSync, statSync, writeFileSync } from 'fs';
4
4
  import { basename, dirname, join, resolve } from 'path';
5
5
  import { DataFormatError, decodePngToBinary, encodeBinaryToPng, IncorrectPassphraseError, listFilesInPng, PassphraseRequiredError, } from './index.js';
6
6
  import { packPaths, unpackBuffer } from './pack.js';
7
- const VERSION = '1.1.5';
7
+ const VERSION = '1.1.7';
8
8
  function showHelp() {
9
9
  console.log(`
10
10
  ROX CLI — Encode/decode binary in PNG
@@ -133,11 +133,14 @@ async function encodeCommand(args) {
133
133
  process.exit(1);
134
134
  }
135
135
  const resolvedInputs = inputPaths.map((p) => resolve(p));
136
- const resolvedOutput = parsed.output ||
137
- outputPath ||
138
- (inputPaths.length === 1
139
- ? firstInput.replace(/(\.[^.]+)?$/, '.png')
140
- : 'archive.png');
136
+ let outputName = inputPaths.length === 1 ? basename(firstInput) : 'archive';
137
+ if (inputPaths.length === 1 && !statSync(resolvedInputs[0]).isDirectory()) {
138
+ outputName = outputName.replace(/(\.[^.]+)?$/, '.png');
139
+ }
140
+ else {
141
+ outputName += '.png';
142
+ }
143
+ const resolvedOutput = parsed.output || outputPath || outputName;
141
144
  let options = {};
142
145
  try {
143
146
  let inputBuffer;
@@ -174,13 +177,10 @@ async function encodeCommand(args) {
174
177
  }
175
178
  else {
176
179
  const resolvedInput = resolvedInputs[0];
177
- console.log(' ');
178
- console.log(`Reading: ${resolvedInput}`);
179
- console.log(' ');
180
180
  const st = statSync(resolvedInput);
181
181
  if (st.isDirectory()) {
182
182
  console.log(`Packing directory...`);
183
- const packResult = packPaths([resolvedInput]);
183
+ const packResult = packPaths([resolvedInput], resolvedInput);
184
184
  inputBuffer = packResult.buf;
185
185
  console.log(`Packed ${packResult.list.length} files -> ${(inputBuffer.length /
186
186
  1024 /
@@ -219,30 +219,59 @@ async function encodeCommand(args) {
219
219
  elapsed: '0',
220
220
  });
221
221
  const startEncode = Date.now();
222
+ let currentEncodePct = 0;
223
+ let currentEncodeStep = 'Starting';
224
+ const encodeHeartbeat = setInterval(() => {
225
+ encodeBar.update(Math.floor(currentEncodePct), {
226
+ step: currentEncodeStep,
227
+ elapsed: String(Math.floor((Date.now() - startEncode) / 1000)),
228
+ });
229
+ }, 1000);
222
230
  options.onProgress = (info) => {
223
231
  let pct = 0;
224
- if (info.phase === 'compress_progress') {
225
- pct = (info.loaded / info.total) * 50;
232
+ let stepLabel = 'Processing';
233
+ if (info.phase === 'compress_start') {
234
+ pct = 5;
235
+ stepLabel = 'Compressing';
236
+ }
237
+ else if (info.phase === 'compress_progress') {
238
+ pct = 5 + Math.floor((info.loaded / info.total) * 45);
239
+ stepLabel = 'Compressing';
226
240
  }
227
241
  else if (info.phase === 'compress_done') {
228
242
  pct = 50;
243
+ stepLabel = 'Compressed';
244
+ }
245
+ else if (info.phase === 'encrypt_start') {
246
+ pct = 60;
247
+ stepLabel = 'Encrypting';
229
248
  }
230
249
  else if (info.phase === 'encrypt_done') {
250
+ pct = 75;
251
+ stepLabel = 'Encrypted';
252
+ }
253
+ else if (info.phase === 'meta_prep_done') {
231
254
  pct = 80;
255
+ stepLabel = 'Preparing';
232
256
  }
233
257
  else if (info.phase === 'png_gen') {
234
258
  pct = 90;
259
+ stepLabel = 'Generating PNG';
235
260
  }
236
261
  else if (info.phase === 'done') {
237
262
  pct = 100;
263
+ stepLabel = 'Done';
238
264
  }
265
+ currentEncodePct = pct;
266
+ currentEncodeStep = stepLabel;
239
267
  encodeBar.update(Math.floor(pct), {
240
- step: info.phase.replace('_', ' '),
268
+ step: stepLabel,
241
269
  elapsed: String(Math.floor((Date.now() - startEncode) / 1000)),
242
270
  });
243
271
  };
244
272
  const output = await encodeBinaryToPng(inputBuffer, options);
245
273
  const encodeTime = Date.now() - startEncode;
274
+ clearInterval(encodeHeartbeat);
246
275
  encodeBar.update(100, {
247
276
  step: 'done',
248
277
  elapsed: String(Math.floor(encodeTime / 1000)),
@@ -277,10 +306,8 @@ async function decodeCommand(args) {
277
306
  process.exit(1);
278
307
  }
279
308
  const resolvedInput = resolve(inputPath);
309
+ const resolvedOutput = parsed.output || outputPath || 'decoded.bin';
280
310
  try {
281
- const inputBuffer = readFileSync(resolvedInput);
282
- console.log(' ');
283
- console.log(`Reading: ${resolvedInput}`);
284
311
  const options = {};
285
312
  if (parsed.passphrase) {
286
313
  options.passphrase = parsed.passphrase;
@@ -292,7 +319,6 @@ async function decodeCommand(args) {
292
319
  options.files = parsed.files;
293
320
  }
294
321
  console.log(' ');
295
- console.log(' ');
296
322
  console.log(`Decoding...`);
297
323
  console.log(' ');
298
324
  const decodeBar = new cliProgress.SingleBar({
@@ -303,17 +329,42 @@ async function decodeCommand(args) {
303
329
  elapsed: '0',
304
330
  });
305
331
  const startDecode = Date.now();
332
+ let currentPct = 50;
333
+ let currentStep = 'Decoding';
334
+ const heartbeat = setInterval(() => {
335
+ decodeBar.update(Math.floor(currentPct), {
336
+ step: currentStep,
337
+ elapsed: String(Math.floor((Date.now() - startDecode) / 1000)),
338
+ });
339
+ }, 1000);
306
340
  options.onProgress = (info) => {
307
- let pct = 50;
308
- if (info.phase === 'done')
309
- pct = 100;
310
- decodeBar.update(pct, {
311
- step: 'Decoding',
341
+ if (info.phase === 'decompress_start') {
342
+ currentPct = 50;
343
+ currentStep = 'Decompressing';
344
+ }
345
+ else if (info.phase === 'decompress_progress' &&
346
+ info.loaded &&
347
+ info.total) {
348
+ currentPct = 50 + Math.floor((info.loaded / info.total) * 40);
349
+ currentStep = `Decompressing (${info.loaded}/${info.total})`;
350
+ }
351
+ else if (info.phase === 'decompress_done') {
352
+ currentPct = 90;
353
+ currentStep = 'Decompressed';
354
+ }
355
+ else if (info.phase === 'done') {
356
+ currentPct = 100;
357
+ currentStep = 'Done';
358
+ }
359
+ decodeBar.update(Math.floor(currentPct), {
360
+ step: currentStep,
312
361
  elapsed: String(Math.floor((Date.now() - startDecode) / 1000)),
313
362
  });
314
363
  };
364
+ const inputBuffer = readFileSync(resolvedInput);
315
365
  const result = await decodePngToBinary(inputBuffer, options);
316
366
  const decodeTime = Date.now() - startDecode;
367
+ clearInterval(heartbeat);
317
368
  decodeBar.update(100, {
318
369
  step: 'done',
319
370
  elapsed: String(Math.floor(decodeTime / 1000)),
@@ -328,33 +379,43 @@ async function decodeCommand(args) {
328
379
  writeFileSync(fullPath, file.buf);
329
380
  }
330
381
  console.log(`\nSuccess!`);
331
- console.log(`Extracted ${result.files.length} files to ${baseDir === '.' ? 'current directory' : baseDir}`);
382
+ console.log(`Unpacked ${result.files.length} files to directory : ${resolve(baseDir)}`);
332
383
  console.log(`Time: ${decodeTime}ms`);
333
384
  }
334
385
  else if (result.buf) {
335
386
  const unpacked = unpackBuffer(result.buf);
336
387
  if (unpacked) {
337
- const baseDir = parsed.output || outputPath || result.meta?.name || '.';
388
+ const baseDir = parsed.output || outputPath || '.';
338
389
  for (const file of unpacked.files) {
339
390
  const fullPath = join(baseDir, file.path);
340
391
  const dir = dirname(fullPath);
341
392
  mkdirSync(dir, { recursive: true });
342
393
  writeFileSync(fullPath, file.buf);
343
394
  }
344
- console.log(`Unpacked ${unpacked.files.length} files to ${baseDir === '.' ? 'current directory' : baseDir}`);
395
+ console.log(`\nSuccess!`);
396
+ console.log(`Time: ${decodeTime}ms`);
397
+ console.log(`Unpacked ${unpacked.files.length} files to current directory`);
345
398
  }
346
399
  else {
347
- const resolvedOutput = parsed.output || outputPath || result.meta?.name || 'decoded.bin';
348
- writeFileSync(resolvedOutput, result.buf);
400
+ let finalOutput = resolvedOutput;
401
+ if (!parsed.output && !outputPath && result.meta?.name) {
402
+ finalOutput = result.meta.name;
403
+ }
404
+ writeFileSync(finalOutput, result.buf);
349
405
  console.log(`\nSuccess!`);
350
406
  if (result.meta?.name) {
351
407
  console.log(` Original name: ${result.meta.name}`);
352
408
  }
353
- console.log(` Output size: ${(result.buf.length / 1024 / 1024).toFixed(2)} MB`);
409
+ const outputSize = (result.buf.length / 1024 / 1024).toFixed(2);
410
+ console.log(` Output size: ${outputSize} MB`);
354
411
  console.log(` Time: ${decodeTime}ms`);
355
- console.log(` Saved: ${resolvedOutput}`);
412
+ console.log(` Saved: ${finalOutput}`);
356
413
  }
357
414
  }
415
+ else {
416
+ console.log(`\nSuccess!`);
417
+ console.log(`Time: ${decodeTime}ms`);
418
+ }
358
419
  console.log(' ');
359
420
  }
360
421
  catch (err) {
package/dist/index.d.ts CHANGED
@@ -127,6 +127,10 @@ export interface DecodeOptions {
127
127
  * Directory to save debug images (doubled.png, reconstructed.png).
128
128
  */
129
129
  debugDir?: string;
130
+ /**
131
+ * Path to write decoded output directly to disk (streamed) to avoid high memory usage.
132
+ */
133
+ outPath?: string;
130
134
  /**
131
135
  * List of files to extract selectively from archives.
132
136
  */
@@ -153,6 +157,19 @@ export declare function cropAndReconstitute(input: Buffer, debugDir?: string): P
153
157
  * @param input - Data to encode
154
158
  * @param opts - Encoding options
155
159
  * @public
160
+ * @example
161
+ * ```typescript
162
+ * import { readFileSync, writeFileSync } from 'fs';
163
+ * import { encodeBinaryToPng } from 'roxify';
164
+ *
165
+ * const fileName = 'input.bin'; //Path of your input file here
166
+ * const inputBuffer = readFileSync(fileName);
167
+ * const pngBuffer = await encodeBinaryToPng(inputBuffer, {
168
+ * name: fileName,
169
+ * });
170
+ * writeFileSync('output.png', pngBuffer);
171
+
172
+ * ```
156
173
  */
157
174
  export declare function encodeBinaryToPng(input: Buffer, opts?: EncodeOptions): Promise<Buffer>;
158
175
  /**
@@ -162,6 +179,13 @@ export declare function encodeBinaryToPng(input: Buffer, opts?: EncodeOptions):
162
179
  * @param pngBuf - PNG data
163
180
  * @param opts - Options (passphrase for encrypted inputs)
164
181
  * @public
182
+ * @example
183
+ * import { readFileSync, writeFileSync } from 'fs';
184
+ * import { decodePngToBinary } from 'roxify';
185
+ *
186
+ * const pngFromDisk = readFileSync('output.png'); //Path of the encoded PNG here
187
+ * const { buf, meta } = await decodePngToBinary(pngFromDisk);
188
+ * writeFileSync(meta?.name ?? 'decoded.txt', buf);
165
189
  */
166
190
  export declare function decodePngToBinary(pngBuf: Buffer, opts?: DecodeOptions): Promise<DecodeResult>;
167
191
  export { packPaths, unpackBuffer } from './pack.js';
package/dist/index.js CHANGED
@@ -1,6 +1,8 @@
1
1
  import { compress as zstdCompress, decompress as zstdDecompress, } from '@mongodb-js/zstd';
2
2
  import cliProgress from 'cli-progress';
3
3
  import { createCipheriv, createDecipheriv, pbkdf2Sync, randomBytes, } from 'crypto';
4
+ import { createReadStream, createWriteStream, readFileSync, unlinkSync, } from 'fs';
5
+ import { tmpdir } from 'os';
4
6
  import { join } from 'path';
5
7
  import encode from 'png-chunks-encode';
6
8
  import extract from 'png-chunks-extract';
@@ -42,6 +44,15 @@ const MARKER_COLORS = [
42
44
  ];
43
45
  const MARKER_START = MARKER_COLORS;
44
46
  const MARKER_END = [...MARKER_COLORS].reverse();
47
+ const CHUNK_SIZE = 8 * 1024;
48
+ async function writeInChunks(ws, buf, chunkSize = CHUNK_SIZE) {
49
+ for (let i = 0; i < buf.length; i += chunkSize) {
50
+ const part = buf.slice(i, i + chunkSize);
51
+ if (!ws.write(part))
52
+ await new Promise((resolve) => ws.once('drain', resolve));
53
+ await new Promise((resolve) => setTimeout(resolve, 50));
54
+ }
55
+ }
45
56
  const COMPRESSION_MARKERS = {
46
57
  zstd: [{ r: 0, g: 255, b: 0 }],
47
58
  };
@@ -54,6 +65,144 @@ function colorsToBytes(colors) {
54
65
  }
55
66
  return buf;
56
67
  }
68
+ function deltaEncode(data) {
69
+ if (data.length === 0)
70
+ return data;
71
+ const out = Buffer.alloc(data.length);
72
+ out[0] = data[0];
73
+ for (let i = 1; i < data.length; i++) {
74
+ out[i] = (data[i] - data[i - 1] + 256) & 0xff;
75
+ }
76
+ return out;
77
+ }
78
+ function deltaDecode(data) {
79
+ if (data.length === 0)
80
+ return data;
81
+ const out = Buffer.alloc(data.length);
82
+ out[0] = data[0];
83
+ for (let i = 1; i < data.length; i++) {
84
+ out[i] = (out[i - 1] + data[i]) & 0xff;
85
+ }
86
+ return out;
87
+ }
88
+ async function parallelZstdCompress(payload, level = 22, onProgress) {
89
+ const chunkSize = 1024 * 1024 * 1024;
90
+ if (payload.length <= chunkSize) {
91
+ return Buffer.from(await zstdCompress(payload, level));
92
+ }
93
+ const chunks = [];
94
+ const promises = [];
95
+ const totalChunks = Math.ceil(payload.length / chunkSize);
96
+ let completedChunks = 0;
97
+ for (let i = 0; i < payload.length; i += chunkSize) {
98
+ const chunk = payload.slice(i, Math.min(i + chunkSize, payload.length));
99
+ promises.push(zstdCompress(chunk, level).then((compressed) => {
100
+ completedChunks++;
101
+ if (onProgress) {
102
+ onProgress(completedChunks, totalChunks);
103
+ }
104
+ return Buffer.from(compressed);
105
+ }));
106
+ }
107
+ const compressedChunks = await Promise.all(promises);
108
+ const chunkSizes = Buffer.alloc(compressedChunks.length * 4);
109
+ for (let i = 0; i < compressedChunks.length; i++) {
110
+ chunkSizes.writeUInt32BE(compressedChunks[i].length, i * 4);
111
+ }
112
+ const header = Buffer.alloc(8);
113
+ header.writeUInt32BE(0x5a535444, 0);
114
+ header.writeUInt32BE(compressedChunks.length, 4);
115
+ return Buffer.concat([header, chunkSizes, ...compressedChunks]);
116
+ }
117
+ async function parallelZstdDecompress(payload, onProgress, onChunk, outPath) {
118
+ if (payload.length < 8) {
119
+ onProgress?.({ phase: 'decompress_start', total: 1 });
120
+ const d = Buffer.from(await zstdDecompress(payload));
121
+ if (onChunk)
122
+ await onChunk(d, 0, 1);
123
+ onProgress?.({ phase: 'decompress_progress', loaded: 1, total: 1 });
124
+ onProgress?.({ phase: 'decompress_done', loaded: 1, total: 1 });
125
+ if (outPath) {
126
+ const ws = createWriteStream(outPath);
127
+ await writeInChunks(ws, d);
128
+ ws.end();
129
+ }
130
+ return onChunk ? Buffer.alloc(0) : d;
131
+ }
132
+ const magic = payload.readUInt32BE(0);
133
+ if (magic !== 0x5a535444) {
134
+ onProgress?.({ phase: 'decompress_start', total: 1 });
135
+ const d = Buffer.from(await zstdDecompress(payload));
136
+ onProgress?.({ phase: 'decompress_progress', loaded: 1, total: 1 });
137
+ onProgress?.({ phase: 'decompress_done', loaded: 1, total: 1 });
138
+ if (outPath) {
139
+ const ws = createWriteStream(outPath);
140
+ await writeInChunks(ws, d);
141
+ ws.end();
142
+ }
143
+ return d;
144
+ }
145
+ const numChunks = payload.readUInt32BE(4);
146
+ const chunkSizes = [];
147
+ let offset = 8;
148
+ for (let i = 0; i < numChunks; i++) {
149
+ chunkSizes.push(payload.readUInt32BE(offset));
150
+ offset += 4;
151
+ }
152
+ onProgress?.({ phase: 'decompress_start', total: numChunks });
153
+ const tempFiles = [];
154
+ for (let i = 0; i < numChunks; i++) {
155
+ const size = chunkSizes[i];
156
+ const chunk = payload.slice(offset, offset + size);
157
+ offset += size;
158
+ const tempFile = join(tmpdir(), `rox_chunk_${Date.now()}_${i}.tmp`);
159
+ const wsChunk = createWriteStream(tempFile);
160
+ const dec = Buffer.from(await zstdDecompress(chunk));
161
+ if (onChunk) {
162
+ await onChunk(dec, i + 1, numChunks);
163
+ unlinkSync(tempFile);
164
+ }
165
+ else {
166
+ await writeInChunks(wsChunk, dec);
167
+ await new Promise((res) => wsChunk.end(() => res()));
168
+ tempFiles.push(tempFile);
169
+ }
170
+ onProgress?.({
171
+ phase: 'decompress_progress',
172
+ loaded: i + 1,
173
+ total: numChunks,
174
+ });
175
+ }
176
+ onProgress?.({
177
+ phase: 'decompress_done',
178
+ loaded: numChunks,
179
+ total: numChunks,
180
+ });
181
+ if (onChunk) {
182
+ return Buffer.alloc(0);
183
+ }
184
+ if (outPath || tempFiles.length > 0) {
185
+ const finalPath = outPath || join(tmpdir(), `rox_final_${Date.now()}.tmp`);
186
+ const ws = createWriteStream(finalPath);
187
+ for (const tempFile of tempFiles) {
188
+ const rs = createReadStream(tempFile);
189
+ await new Promise((resolve, reject) => {
190
+ rs.on('data', (chunk) => ws.write(chunk));
191
+ rs.on('end', resolve);
192
+ rs.on('error', reject);
193
+ });
194
+ unlinkSync(tempFile);
195
+ }
196
+ await new Promise((res) => ws.end(() => res()));
197
+ if (!outPath) {
198
+ const finalBuf = readFileSync(finalPath);
199
+ unlinkSync(finalPath);
200
+ return finalBuf;
201
+ }
202
+ return Buffer.alloc(0);
203
+ }
204
+ return Buffer.alloc(0);
205
+ }
57
206
  function applyXor(buf, passphrase) {
58
207
  const key = Buffer.from(passphrase, 'utf8');
59
208
  const out = Buffer.alloc(buf.length);
@@ -65,14 +214,8 @@ function applyXor(buf, passphrase) {
65
214
  function tryBrotliDecompress(payload) {
66
215
  return Buffer.from(zlib.brotliDecompressSync(payload));
67
216
  }
68
- async function tryZstdDecompress(payload) {
69
- try {
70
- const result = await zstdDecompress(payload);
71
- return Buffer.from(result);
72
- }
73
- catch {
74
- return payload;
75
- }
217
+ async function tryZstdDecompress(payload, onProgress, onChunk, outPath) {
218
+ return await parallelZstdDecompress(payload, onProgress, onChunk, outPath);
76
219
  }
77
220
  function tryDecryptIfNeeded(buf, passphrase) {
78
221
  if (!buf || buf.length === 0)
@@ -394,6 +537,19 @@ export async function cropAndReconstitute(input, debugDir) {
394
537
  * @param input - Data to encode
395
538
  * @param opts - Encoding options
396
539
  * @public
540
+ * @example
541
+ * ```typescript
542
+ * import { readFileSync, writeFileSync } from 'fs';
543
+ * import { encodeBinaryToPng } from 'roxify';
544
+ *
545
+ * const fileName = 'input.bin'; //Path of your input file here
546
+ * const inputBuffer = readFileSync(fileName);
547
+ * const pngBuffer = await encodeBinaryToPng(inputBuffer, {
548
+ * name: fileName,
549
+ * });
550
+ * writeFileSync('output.png', pngBuffer);
551
+
552
+ * ```
397
553
  */
398
554
  export async function encodeBinaryToPng(input, opts = {}) {
399
555
  let progressBar = null;
@@ -434,20 +590,17 @@ export async function encodeBinaryToPng(input, opts = {}) {
434
590
  const compression = opts.compression || 'zstd';
435
591
  if (opts.onProgress)
436
592
  opts.onProgress({ phase: 'compress_start', total: payload.length });
437
- const compressedChunks = [];
438
- const chunkSize = 1024 * 1024;
439
- for (let i = 0; i < payload.length; i += chunkSize) {
440
- const chunk = payload.slice(i, Math.min(i + chunkSize, payload.length));
441
- const compressedChunk = Buffer.from(await zstdCompress(chunk, 22));
442
- compressedChunks.push(compressedChunk);
443
- if (opts.onProgress)
593
+ const useDelta = mode !== 'screenshot';
594
+ const deltaEncoded = useDelta ? deltaEncode(payload) : payload;
595
+ payload = await parallelZstdCompress(deltaEncoded, 1, (loaded, total) => {
596
+ if (opts.onProgress) {
444
597
  opts.onProgress({
445
598
  phase: 'compress_progress',
446
- loaded: i + chunk.length,
447
- total: payload.length,
599
+ loaded,
600
+ total,
448
601
  });
449
- }
450
- payload = Buffer.concat(compressedChunks);
602
+ }
603
+ });
451
604
  if (opts.onProgress)
452
605
  opts.onProgress({ phase: 'compress_done', loaded: payload.length });
453
606
  if (opts.passphrase && !opts.encrypt) {
@@ -480,6 +633,8 @@ export async function encodeBinaryToPng(input, opts = {}) {
480
633
  const enc = Buffer.concat([cipher.update(payload), cipher.final()]);
481
634
  const tag = cipher.getAuthTag();
482
635
  payload = Buffer.concat([Buffer.from([ENC_AES]), salt, iv, tag, enc]);
636
+ if (opts.onProgress)
637
+ opts.onProgress({ phase: 'encrypt_done' });
483
638
  }
484
639
  else if (encChoice === 'xor') {
485
640
  const xored = applyXor(payload, opts.passphrase);
@@ -520,7 +675,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
520
675
  const nameLen = nameBuf.length;
521
676
  const payloadLenBuf = Buffer.alloc(4);
522
677
  payloadLenBuf.writeUInt32BE(payload.length, 0);
523
- const version = 2;
678
+ const version = 1;
524
679
  const metaPixel = Buffer.concat([
525
680
  Buffer.from([version]),
526
681
  Buffer.from([nameLen]),
@@ -542,15 +697,21 @@ export async function encodeBinaryToPng(input, opts = {}) {
542
697
  ]);
543
698
  const bytesPerPixel = 3;
544
699
  const dataPixels = Math.ceil(dataWithMarkers.length / 3);
545
- let logicalWidth = Math.ceil(Math.sqrt(dataPixels));
546
- if (logicalWidth < MARKER_END.length) {
547
- logicalWidth = MARKER_END.length;
548
- }
549
- const dataRows = Math.ceil(dataPixels / logicalWidth);
550
- const pixelsInLastRow = dataPixels % logicalWidth;
551
- const spaceInLastRow = pixelsInLastRow === 0 ? logicalWidth : logicalWidth - pixelsInLastRow;
552
- const needsExtraRow = spaceInLastRow < MARKER_END.length;
553
- const logicalHeight = needsExtraRow ? dataRows + 1 : dataRows;
700
+ const totalPixels = dataPixels + MARKER_END.length;
701
+ const maxWidth = 16384;
702
+ let side = Math.ceil(Math.sqrt(totalPixels));
703
+ if (side < MARKER_END.length)
704
+ side = MARKER_END.length;
705
+ let logicalWidth;
706
+ let logicalHeight;
707
+ if (side <= maxWidth) {
708
+ logicalWidth = side;
709
+ logicalHeight = side;
710
+ }
711
+ else {
712
+ logicalWidth = Math.min(maxWidth, totalPixels);
713
+ logicalHeight = Math.ceil(totalPixels / logicalWidth);
714
+ }
554
715
  const scale = 1;
555
716
  const width = logicalWidth * scale;
556
717
  const height = logicalHeight * scale;
@@ -566,8 +727,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
566
727
  g = MARKER_END[markerIdx].g;
567
728
  b = MARKER_END[markerIdx].b;
568
729
  }
569
- else if (ly < dataRows ||
570
- (ly === dataRows && linearIdx < dataPixels)) {
730
+ else if (linearIdx < dataPixels) {
571
731
  const srcIdx = linearIdx * 3;
572
732
  r = srcIdx < dataWithMarkers.length ? dataWithMarkers[srcIdx] : 0;
573
733
  g =
@@ -593,7 +753,9 @@ export async function encodeBinaryToPng(input, opts = {}) {
593
753
  }
594
754
  if (opts.onProgress)
595
755
  opts.onProgress({ phase: 'png_gen' });
596
- let bufScr = await sharp(raw, { raw: { width, height, channels: 3 } })
756
+ let bufScr = await sharp(raw, {
757
+ raw: { width, height, channels: 3 },
758
+ })
597
759
  .png({
598
760
  compressionLevel: 9,
599
761
  palette: false,
@@ -633,9 +795,10 @@ export async function encodeBinaryToPng(input, opts = {}) {
633
795
  const full = Buffer.concat([PIXEL_MAGIC, metaPixel]);
634
796
  const bytesPerPixel = 3;
635
797
  const nPixels = Math.ceil((full.length + 8) / 3);
636
- const side = Math.ceil(Math.sqrt(nPixels));
637
- const width = Math.max(1, Math.min(side, 65535));
638
- const height = Math.ceil(nPixels / width);
798
+ const desiredSide = Math.ceil(Math.sqrt(nPixels));
799
+ const sideClamped = Math.max(1, Math.min(desiredSide, 65535));
800
+ const width = sideClamped;
801
+ const height = sideClamped === desiredSide ? sideClamped : Math.ceil(nPixels / width);
639
802
  const dimHeader = Buffer.alloc(8);
640
803
  dimHeader.writeUInt32BE(width, 0);
641
804
  dimHeader.writeUInt32BE(height, 4);
@@ -737,6 +900,13 @@ export async function encodeBinaryToPng(input, opts = {}) {
737
900
  * @param pngBuf - PNG data
738
901
  * @param opts - Options (passphrase for encrypted inputs)
739
902
  * @public
903
+ * @example
904
+ * import { readFileSync, writeFileSync } from 'fs';
905
+ * import { decodePngToBinary } from 'roxify';
906
+ *
907
+ * const pngFromDisk = readFileSync('output.png'); //Path of the encoded PNG here
908
+ * const { buf, meta } = await decodePngToBinary(pngFromDisk);
909
+ * writeFileSync(meta?.name ?? 'decoded.txt', buf);
740
910
  */
741
911
  export async function decodePngToBinary(pngBuf, opts = {}) {
742
912
  let progressBar = null;
@@ -771,15 +941,28 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
771
941
  try {
772
942
  const info = await sharp(pngBuf).metadata();
773
943
  if (info.width && info.height) {
774
- const doubledBuffer = await sharp(pngBuf)
775
- .resize({
776
- width: info.width * 2,
777
- height: info.height * 2,
778
- kernel: 'nearest',
779
- })
780
- .png()
781
- .toBuffer();
782
- processedBuf = await cropAndReconstitute(doubledBuffer, opts.debugDir);
944
+ const MAX_RAW_BYTES = 150 * 1024 * 1024;
945
+ const rawBytesEstimate = info.width * info.height * 4;
946
+ if (rawBytesEstimate > MAX_RAW_BYTES) {
947
+ throw new DataFormatError(`Image too large to decode in-process (${Math.round(rawBytesEstimate / 1024 / 1024)} MB). Increase Node heap or use a smaller image/compact mode.`);
948
+ }
949
+ const MAX_DOUBLE_BYTES = 200 * 1024 * 1024;
950
+ const doubledPixels = info.width * 2 * (info.height * 2);
951
+ const doubledBytesEstimate = doubledPixels * 4;
952
+ if (false) {
953
+ const doubledBuffer = await sharp(pngBuf)
954
+ .resize({
955
+ width: info.width * 2,
956
+ height: info.height * 2,
957
+ kernel: 'nearest',
958
+ })
959
+ .png()
960
+ .toBuffer();
961
+ processedBuf = await cropAndReconstitute(doubledBuffer, opts.debugDir);
962
+ }
963
+ else {
964
+ processedBuf = pngBuf;
965
+ }
783
966
  }
784
967
  }
785
968
  catch (e) { }
@@ -796,16 +979,54 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
796
979
  }
797
980
  const rawPayload = d.slice(idx);
798
981
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
982
+ if (opts.outPath) {
983
+ const ws = createWriteStream(opts.outPath, { highWaterMark: 64 * 1024 });
984
+ let headerBuf = Buffer.alloc(0);
985
+ let headerSkipped = false;
986
+ await tryZstdDecompress(payload, (info) => {
987
+ if (opts.onProgress)
988
+ opts.onProgress(info);
989
+ }, async (decChunk) => {
990
+ if (!headerSkipped) {
991
+ if (decChunk.length < MAGIC.length) {
992
+ headerBuf = Buffer.concat([headerBuf, decChunk]);
993
+ return;
994
+ }
995
+ const mag = decChunk.slice(0, MAGIC.length);
996
+ if (!mag.equals(MAGIC)) {
997
+ ws.close();
998
+ throw new Error('Invalid ROX format (ROX direct: missing ROX1 magic after decompression)');
999
+ }
1000
+ const toWriteBuf = decChunk.slice(MAGIC.length);
1001
+ if (toWriteBuf.length > 0) {
1002
+ await writeInChunks(ws, toWriteBuf, 16 * 1024);
1003
+ }
1004
+ headerBuf = Buffer.alloc(0);
1005
+ headerSkipped = true;
1006
+ }
1007
+ else {
1008
+ await writeInChunks(ws, decChunk, 64 * 1024);
1009
+ }
1010
+ });
1011
+ await new Promise((res) => ws.end(() => res()));
1012
+ if (opts.onProgress)
1013
+ opts.onProgress({ phase: 'done' });
1014
+ progressBar?.stop();
1015
+ return { meta: { name } };
1016
+ }
799
1017
  if (opts.onProgress)
800
- opts.onProgress({ phase: 'decompress' });
1018
+ opts.onProgress({ phase: 'decompress_start' });
801
1019
  try {
802
- payload = await tryZstdDecompress(payload);
1020
+ payload = await tryZstdDecompress(payload, (info) => {
1021
+ if (opts.onProgress)
1022
+ opts.onProgress(info);
1023
+ });
803
1024
  }
804
1025
  catch (e) {
805
1026
  const errMsg = e instanceof Error ? e.message : String(e);
806
1027
  if (opts.passphrase)
807
- throw new Error('Incorrect passphrase (ROX format, zstd failed: ' + errMsg + ')');
808
- throw new Error('ROX format zstd decompression failed: ' + errMsg);
1028
+ throw new IncorrectPassphraseError('Incorrect passphrase (compact mode, zstd failed: ' + errMsg + ')');
1029
+ throw new DataFormatError('Compact mode zstd decompression failed: ' + errMsg);
809
1030
  }
810
1031
  if (!payload.slice(0, MAGIC.length).equals(MAGIC)) {
811
1032
  throw new Error('Invalid ROX format (ROX direct: missing ROX1 magic after decompression)');
@@ -855,10 +1076,48 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
855
1076
  if (rawPayload.length === 0)
856
1077
  throw new DataFormatError('Compact mode payload empty');
857
1078
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
1079
+ if (opts.outPath) {
1080
+ const ws = createWriteStream(opts.outPath, { highWaterMark: 64 * 1024 });
1081
+ let headerBuf = Buffer.alloc(0);
1082
+ let headerSkipped = false;
1083
+ await tryZstdDecompress(payload, (info) => {
1084
+ if (opts.onProgress)
1085
+ opts.onProgress(info);
1086
+ }, async (decChunk) => {
1087
+ if (!headerSkipped) {
1088
+ if (decChunk.length < MAGIC.length) {
1089
+ headerBuf = Buffer.concat([headerBuf, decChunk]);
1090
+ return;
1091
+ }
1092
+ const mag = decChunk.slice(0, MAGIC.length);
1093
+ if (!mag.equals(MAGIC)) {
1094
+ ws.close();
1095
+ throw new DataFormatError('Invalid ROX format (compact mode: missing ROX1 magic after decompression)');
1096
+ }
1097
+ const toWriteBuf = decChunk.slice(MAGIC.length);
1098
+ if (toWriteBuf.length > 0) {
1099
+ await writeInChunks(ws, toWriteBuf);
1100
+ }
1101
+ headerBuf = Buffer.alloc(0);
1102
+ headerSkipped = true;
1103
+ }
1104
+ else {
1105
+ await writeInChunks(ws, decChunk, 64 * 1024);
1106
+ }
1107
+ });
1108
+ await new Promise((res) => ws.end(() => res()));
1109
+ if (opts.onProgress)
1110
+ opts.onProgress({ phase: 'done' });
1111
+ progressBar?.stop();
1112
+ return { meta: { name } };
1113
+ }
858
1114
  if (opts.onProgress)
859
- opts.onProgress({ phase: 'decompress' });
1115
+ opts.onProgress({ phase: 'decompress_start' });
860
1116
  try {
861
- payload = await tryZstdDecompress(payload);
1117
+ payload = await tryZstdDecompress(payload, (info) => {
1118
+ if (opts.onProgress)
1119
+ opts.onProgress(info);
1120
+ });
862
1121
  }
863
1122
  catch (e) {
864
1123
  const errMsg = e instanceof Error ? e.message : String(e);
@@ -936,7 +1195,7 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
936
1195
  logicalData = rawRGB;
937
1196
  }
938
1197
  else {
939
- const reconstructed = await cropAndReconstitute(data, opts.debugDir);
1198
+ const reconstructed = await cropAndReconstitute(processedBuf, opts.debugDir);
940
1199
  const { data: rdata, info: rinfo } = await sharp(reconstructed)
941
1200
  .ensureAlpha()
942
1201
  .raw()
@@ -982,7 +1241,26 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
982
1241
  const rawPayload = logicalData.slice(idx, idx + payloadLen);
983
1242
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
984
1243
  try {
985
- payload = await tryZstdDecompress(payload);
1244
+ if (opts.outPath) {
1245
+ await tryZstdDecompress(payload, (info) => {
1246
+ if (opts.onProgress)
1247
+ opts.onProgress(info);
1248
+ }, undefined, opts.outPath);
1249
+ }
1250
+ else {
1251
+ payload = await tryZstdDecompress(payload, (info) => {
1252
+ if (opts.onProgress)
1253
+ opts.onProgress(info);
1254
+ });
1255
+ }
1256
+ if (version === 3) {
1257
+ if (opts.outPath) {
1258
+ throw new Error('outPath not supported with delta encoding yet');
1259
+ }
1260
+ else {
1261
+ payload = deltaDecode(payload);
1262
+ }
1263
+ }
986
1264
  }
987
1265
  catch (e) { }
988
1266
  if (!payload.slice(0, MAGIC.length).equals(MAGIC)) {
@@ -1211,7 +1489,70 @@ export async function decodePngToBinary(pngBuf, opts = {}) {
1211
1489
  const rawPayload = pixelBytes.slice(idx, idx + payloadLen);
1212
1490
  let payload = tryDecryptIfNeeded(rawPayload, opts.passphrase);
1213
1491
  try {
1214
- payload = await tryZstdDecompress(payload);
1492
+ if (opts.outPath) {
1493
+ const ws = createWriteStream(opts.outPath, {
1494
+ highWaterMark: 64 * 1024,
1495
+ });
1496
+ let headerBuf = Buffer.alloc(0);
1497
+ let headerSkipped = false;
1498
+ let lastOutByte = null;
1499
+ await tryZstdDecompress(payload, (info) => {
1500
+ if (opts.onProgress)
1501
+ opts.onProgress(info);
1502
+ }, async (decChunk, idxChunk, totalChunks) => {
1503
+ let outChunk = decChunk;
1504
+ if (version === 3) {
1505
+ const out = Buffer.alloc(decChunk.length);
1506
+ for (let i = 0; i < decChunk.length; i++) {
1507
+ if (i === 0) {
1508
+ out[0] =
1509
+ typeof lastOutByte === 'number'
1510
+ ? (lastOutByte + decChunk[0]) & 0xff
1511
+ : decChunk[0];
1512
+ }
1513
+ else {
1514
+ out[i] = (out[i - 1] + decChunk[i]) & 0xff;
1515
+ }
1516
+ }
1517
+ lastOutByte = out[out.length - 1];
1518
+ outChunk = out;
1519
+ }
1520
+ if (!headerSkipped) {
1521
+ if (outChunk.length < MAGIC.length) {
1522
+ headerBuf = Buffer.concat([headerBuf, outChunk]);
1523
+ return;
1524
+ }
1525
+ const mag = outChunk.slice(0, MAGIC.length);
1526
+ if (!mag.equals(MAGIC)) {
1527
+ ws.close();
1528
+ throw new DataFormatError('Invalid ROX format (pixel mode: missing ROX1 magic after decompression)');
1529
+ }
1530
+ const toWriteBuf = outChunk.slice(MAGIC.length);
1531
+ if (toWriteBuf.length > 0) {
1532
+ await writeInChunks(ws, toWriteBuf, 64 * 1024);
1533
+ }
1534
+ headerBuf = Buffer.alloc(0);
1535
+ headerSkipped = true;
1536
+ }
1537
+ else {
1538
+ await writeInChunks(ws, outChunk, 64 * 1024);
1539
+ }
1540
+ });
1541
+ await new Promise((res, rej) => ws.end(() => res()));
1542
+ if (opts.onProgress)
1543
+ opts.onProgress({ phase: 'done' });
1544
+ progressBar?.stop();
1545
+ return { meta: { name } };
1546
+ }
1547
+ else {
1548
+ payload = await tryZstdDecompress(payload, (info) => {
1549
+ if (opts.onProgress)
1550
+ opts.onProgress(info);
1551
+ });
1552
+ if (version === 3) {
1553
+ payload = deltaDecode(payload);
1554
+ }
1555
+ }
1215
1556
  }
1216
1557
  catch (e) {
1217
1558
  const errMsg = e instanceof Error ? e.message : String(e);
package/dist/pack.d.ts CHANGED
@@ -14,3 +14,13 @@ export declare function unpackBuffer(buf: Buffer, fileList?: string[]): {
14
14
  buf: Buffer;
15
15
  }[];
16
16
  } | null;
17
+ /**
18
+ * Stream-unpack a packed buffer file on disk into a directory without
19
+ * loading the whole archive into memory.
20
+ */
21
+ export declare function unpackFileToDir(filePath: string, baseDir: string, fileList?: string[]): {
22
+ files: {
23
+ path: string;
24
+ size: number;
25
+ }[];
26
+ };
package/dist/pack.js CHANGED
@@ -1,5 +1,5 @@
1
- import { readFileSync, readdirSync, statSync } from 'fs';
2
- import { join, relative, resolve, sep } from 'path';
1
+ import { closeSync, createWriteStream, mkdirSync, openSync, readFileSync, readSync, readdirSync, statSync, } from 'fs';
2
+ import { dirname, join, relative, resolve, sep } from 'path';
3
3
  function collectFiles(paths) {
4
4
  const files = [];
5
5
  for (const p of paths) {
@@ -83,3 +83,60 @@ export function unpackBuffer(buf, fileList) {
83
83
  }
84
84
  return { files };
85
85
  }
86
+ /**
87
+ * Stream-unpack a packed buffer file on disk into a directory without
88
+ * loading the whole archive into memory.
89
+ */
90
+ export function unpackFileToDir(filePath, baseDir, fileList) {
91
+ const fd = openSync(filePath, 'r');
92
+ try {
93
+ const headerBuf = Buffer.alloc(8);
94
+ let bytes = readSync(fd, headerBuf, 0, 8, 0);
95
+ if (bytes < 8)
96
+ throw new Error('Invalid archive');
97
+ if (headerBuf.readUInt32BE(0) !== 0x524f5850)
98
+ throw new Error('Not a pack archive');
99
+ const fileCount = headerBuf.readUInt32BE(4);
100
+ let offset = 8;
101
+ const files = [];
102
+ for (let i = 0; i < fileCount; i++) {
103
+ const nameLenBuf = Buffer.alloc(2);
104
+ readSync(fd, nameLenBuf, 0, 2, offset);
105
+ const nameLen = nameLenBuf.readUInt16BE(0);
106
+ offset += 2;
107
+ const nameBuf = Buffer.alloc(nameLen);
108
+ readSync(fd, nameBuf, 0, nameLen, offset);
109
+ const name = nameBuf.toString('utf8');
110
+ offset += nameLen;
111
+ const sizeBuf = Buffer.alloc(8);
112
+ readSync(fd, sizeBuf, 0, 8, offset);
113
+ const size = Number(sizeBuf.readBigUInt64BE(0));
114
+ offset += 8;
115
+ if (fileList && !fileList.includes(name)) {
116
+ offset += size;
117
+ continue;
118
+ }
119
+ const outPath = join(baseDir, name);
120
+ const outDir = dirname(outPath);
121
+ mkdirSync(outDir, { recursive: true });
122
+ const ws = createWriteStream(outPath);
123
+ let remaining = size;
124
+ const chunk = Buffer.alloc(64 * 1024);
125
+ while (remaining > 0) {
126
+ const toRead = Math.min(remaining, chunk.length);
127
+ const bytesRead = readSync(fd, chunk, 0, toRead, offset);
128
+ if (bytesRead <= 0)
129
+ break;
130
+ ws.write(chunk.slice(0, bytesRead));
131
+ offset += bytesRead;
132
+ remaining -= bytesRead;
133
+ }
134
+ ws.end();
135
+ files.push({ path: name, size });
136
+ }
137
+ return { files };
138
+ }
139
+ finally {
140
+ closeSync(fd);
141
+ }
142
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "roxify",
3
- "version": "1.1.6",
3
+ "version": "1.1.8",
4
4
  "description": "Encode binary data into PNG images with Zstd compression and decode them back. Supports CLI and programmatic API (Node.js ESM).",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -16,21 +16,24 @@
16
16
  "build": "tsc",
17
17
  "check-publish": "node ../scripts/check-publish.js roxify",
18
18
  "cli": "node dist/cli.js",
19
- "test": "node test/pack.test.js"
19
+ "test": "npm run build && node test/pack.test.js && node test/screenshot.test.js"
20
20
  },
21
21
  "keywords": [
22
22
  "steganography",
23
23
  "png",
24
- "brotli",
25
- "rox",
26
- "compress",
27
- "decompress",
24
+ "zstd",
25
+ "compression",
26
+ "encryption",
28
27
  "encode",
29
28
  "decode",
30
29
  "cli",
31
30
  "nodejs",
32
31
  "esm",
33
- "qrcode"
32
+ "data-embedding",
33
+ "file-archive",
34
+ "lossless",
35
+ "aes-gcm",
36
+ "binary-data"
34
37
  ],
35
38
  "author": "RoxCompressor",
36
39
  "license": "UNLICENSED",