roxify 1.2.9 β 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -1
- package/dist/cli.js +26 -2
- package/dist/hybrid-compression.d.ts +27 -0
- package/dist/hybrid-compression.js +90 -0
- package/dist/roxify-cli +0 -0
- package/dist/utils/encoder.js +1 -1
- package/libroxify_native.node +0 -0
- package/package.json +2 -2
- package/CHANGELOG.md +0 -74
package/README.md
CHANGED
|
@@ -13,10 +13,31 @@
|
|
|
13
13
|
- π¨ **Multiple modes**: Compact, chunk, pixel, and screenshot modes
|
|
14
14
|
- π¦ **CLI & API**: Use as command-line tool or JavaScript library
|
|
15
15
|
- π **Lossless**: Perfect roundtrip encoding/decoding
|
|
16
|
-
- π **Efficient**: Typically 0.01-0.05% of original size with Zstd compression
|
|
17
16
|
- π **Full TSDoc**: Complete TypeScript documentation
|
|
18
17
|
- π¦ **Rust Powered**: Optional native module for extreme performance (falls back to pure JS)
|
|
19
18
|
|
|
19
|
+
## Real-world benchmarks π§
|
|
20
|
+
|
|
21
|
+
**Highlights**
|
|
22
|
+
|
|
23
|
+
- Practical benchmarks on large codebase datasets showing significant compression and high throughput while handling many small files efficiently.
|
|
24
|
+
|
|
25
|
+
**Results**
|
|
26
|
+
|
|
27
|
+
| Dataset | Files | Original | Compressed | Ratio | Time | Throughput | Notes |
|
|
28
|
+
| -------- | ------: | -------: | ---------: | --------: | -----: | ---------: | ------------------------------------------- |
|
|
29
|
+
| 4,000 MB | 731,340 | 3.93 GB | 111.42 MB | **2.8%** | 26.9 s | 149.4 MB/s | gzip: 2.26 GB (57.5%); 7z: 1.87 GB (47.6%) |
|
|
30
|
+
| 1,000 MB | 141,522 | 1.03 GB | 205 MB | **19.4%** | ~6.2 s | β170 MB/s | shows benefits for many-small-file datasets |
|
|
31
|
+
|
|
32
|
+
### Methodology
|
|
33
|
+
|
|
34
|
+
- Compression: multithreaded Zstd (level 19) and Brotli (configurable).
|
|
35
|
+
- Setup: parallel I/O and multithreaded compression on modern SSD-backed systems.
|
|
36
|
+
- Measurements: wall-clock time; throughput = original size / time; comparisons against gzip and 7z with typical defaults.
|
|
37
|
+
- Reproducibility: full benchmark details, commands and raw data are available in `docs/BENCHMARK_FINAL_REPORT.md`.
|
|
38
|
+
|
|
39
|
+
These results demonstrate Roxify's strength for packaging large codebases and many-small-file archives where speed and a good compression/throughput trade-off matter.
|
|
40
|
+
|
|
20
41
|
## Documentation
|
|
21
42
|
|
|
22
43
|
- π **[CLI Documentation](./CLI.md)** - Complete command-line usage guide
|
package/dist/cli.js
CHANGED
|
@@ -1,10 +1,34 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import cliProgress from 'cli-progress';
|
|
3
3
|
import { mkdirSync, readFileSync, statSync, writeFileSync } from 'fs';
|
|
4
|
+
import { open } from 'fs/promises';
|
|
4
5
|
import { basename, dirname, join, resolve } from 'path';
|
|
5
6
|
import { DataFormatError, decodePngToBinary, encodeBinaryToPng, hasPassphraseInPng, IncorrectPassphraseError, listFilesInPng, PassphraseRequiredError, } from './index.js';
|
|
6
7
|
import { packPathsGenerator, unpackBuffer } from './pack.js';
|
|
7
8
|
const VERSION = '1.2.9';
|
|
9
|
+
async function readLargeFile(filePath) {
|
|
10
|
+
const st = statSync(filePath);
|
|
11
|
+
if (st.size <= 2 * 1024 * 1024 * 1024) {
|
|
12
|
+
return readFileSync(filePath);
|
|
13
|
+
}
|
|
14
|
+
const chunkSize = 64 * 1024 * 1024;
|
|
15
|
+
const chunks = [];
|
|
16
|
+
let position = 0;
|
|
17
|
+
const fd = await open(filePath, 'r');
|
|
18
|
+
try {
|
|
19
|
+
while (position < st.size) {
|
|
20
|
+
const currentChunkSize = Math.min(chunkSize, st.size - position);
|
|
21
|
+
const buffer = Buffer.alloc(currentChunkSize);
|
|
22
|
+
const { bytesRead } = await fd.read(buffer, 0, currentChunkSize, position);
|
|
23
|
+
chunks.push(buffer.slice(0, bytesRead));
|
|
24
|
+
position += bytesRead;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
finally {
|
|
28
|
+
await fd.close();
|
|
29
|
+
}
|
|
30
|
+
return Buffer.concat(chunks);
|
|
31
|
+
}
|
|
8
32
|
function showHelp() {
|
|
9
33
|
console.log(`
|
|
10
34
|
ROX CLI β Encode/decode binary in PNG
|
|
@@ -239,7 +263,7 @@ async function encodeCommand(args) {
|
|
|
239
263
|
}));
|
|
240
264
|
}
|
|
241
265
|
else {
|
|
242
|
-
inputData =
|
|
266
|
+
inputData = await readLargeFile(resolvedInput);
|
|
243
267
|
inputSizeVal = inputData.length;
|
|
244
268
|
displayName = basename(resolvedInput);
|
|
245
269
|
options.includeFileList = true;
|
|
@@ -410,7 +434,7 @@ async function decodeCommand(args) {
|
|
|
410
434
|
currentStep = 'Done';
|
|
411
435
|
}
|
|
412
436
|
};
|
|
413
|
-
const inputBuffer =
|
|
437
|
+
const inputBuffer = await readLargeFile(resolvedInput);
|
|
414
438
|
const result = await decodePngToBinary(inputBuffer, options);
|
|
415
439
|
const decodeTime = Date.now() - startDecode;
|
|
416
440
|
clearInterval(heartbeat);
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
export interface CompressionStats {
|
|
4
|
+
originalSize: number;
|
|
5
|
+
compressedSize: number;
|
|
6
|
+
ratio: number;
|
|
7
|
+
entropyBits: number;
|
|
8
|
+
blocksCount: number;
|
|
9
|
+
estimatedThroughput: number;
|
|
10
|
+
}
|
|
11
|
+
export interface GpuInfo {
|
|
12
|
+
available: boolean;
|
|
13
|
+
adapterInfo?: string;
|
|
14
|
+
}
|
|
15
|
+
export declare class HybridCompressor {
|
|
16
|
+
private gpuAvailable;
|
|
17
|
+
constructor();
|
|
18
|
+
compress(data: Buffer): Promise<Buffer>;
|
|
19
|
+
decompress(data: Buffer): Promise<Buffer>;
|
|
20
|
+
getStats(data: Buffer): CompressionStats;
|
|
21
|
+
getEntropy(data: Buffer): number;
|
|
22
|
+
getGpuStatus(): GpuInfo;
|
|
23
|
+
isGpuAvailable(): boolean;
|
|
24
|
+
}
|
|
25
|
+
export declare function compressBuffer(data: Buffer): Promise<Buffer>;
|
|
26
|
+
export declare function decompressBuffer(data: Buffer): Promise<Buffer>;
|
|
27
|
+
export declare function analyzeCompression(data: Buffer): void;
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
let check_gpu_status;
|
|
2
|
+
let entropy_estimate;
|
|
3
|
+
let get_compression_stats;
|
|
4
|
+
let hybrid_compress;
|
|
5
|
+
let hybrid_decompress;
|
|
6
|
+
try {
|
|
7
|
+
const native = require('../libroxify_native.node');
|
|
8
|
+
check_gpu_status = native.check_gpu_status;
|
|
9
|
+
entropy_estimate = native.entropy_estimate;
|
|
10
|
+
get_compression_stats = native.get_compression_stats;
|
|
11
|
+
hybrid_compress = native.hybrid_compress;
|
|
12
|
+
hybrid_decompress = native.hybrid_decompress;
|
|
13
|
+
}
|
|
14
|
+
catch (e) {
|
|
15
|
+
console.warn('Warning: Native module not loaded, using stubs');
|
|
16
|
+
}
|
|
17
|
+
export class HybridCompressor {
|
|
18
|
+
constructor() {
|
|
19
|
+
const status = check_gpu_status();
|
|
20
|
+
this.gpuAvailable = status.available;
|
|
21
|
+
if (this.gpuAvailable) {
|
|
22
|
+
console.log(`[HybridCompressor] GPU disponible`);
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
console.log(`[HybridCompressor] GPU indisponible, fallback CPU`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async compress(data) {
|
|
29
|
+
const start = performance.now();
|
|
30
|
+
const compressed = hybrid_compress(data);
|
|
31
|
+
const elapsed = (performance.now() - start) / 1000;
|
|
32
|
+
const throughput = data.length / elapsed / 1e6;
|
|
33
|
+
console.log(`[Compression] ${data.length} bytes β ${compressed.length} bytes ` +
|
|
34
|
+
`(${((compressed.length / data.length) * 100).toFixed(2)}%) ` +
|
|
35
|
+
`en ${elapsed.toFixed(3)}s (${throughput.toFixed(0)} Mo/s)`);
|
|
36
|
+
return compressed;
|
|
37
|
+
}
|
|
38
|
+
async decompress(data) {
|
|
39
|
+
const start = performance.now();
|
|
40
|
+
const decompressed = hybrid_decompress(data);
|
|
41
|
+
const elapsed = (performance.now() - start) / 1000;
|
|
42
|
+
console.log(`[DΓ©compression] ${data.length} bytes β ${decompressed.length} bytes ` +
|
|
43
|
+
`en ${elapsed.toFixed(3)}s`);
|
|
44
|
+
return decompressed;
|
|
45
|
+
}
|
|
46
|
+
getStats(data) {
|
|
47
|
+
const start = performance.now();
|
|
48
|
+
const stats = get_compression_stats(data);
|
|
49
|
+
const elapsed = (performance.now() - start) / 1000;
|
|
50
|
+
const throughput = data.length / elapsed / 1e6;
|
|
51
|
+
return {
|
|
52
|
+
originalSize: stats.original_size,
|
|
53
|
+
compressedSize: stats.compressed_size,
|
|
54
|
+
ratio: stats.ratio,
|
|
55
|
+
entropyBits: stats.entropy_bits,
|
|
56
|
+
blocksCount: stats.blocks_count,
|
|
57
|
+
estimatedThroughput: throughput,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
getEntropy(data) {
|
|
61
|
+
return entropy_estimate(data);
|
|
62
|
+
}
|
|
63
|
+
getGpuStatus() {
|
|
64
|
+
return check_gpu_status();
|
|
65
|
+
}
|
|
66
|
+
isGpuAvailable() {
|
|
67
|
+
return this.gpuAvailable;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
export async function compressBuffer(data) {
|
|
71
|
+
const compressor = new HybridCompressor();
|
|
72
|
+
return compressor.compress(data);
|
|
73
|
+
}
|
|
74
|
+
export async function decompressBuffer(data) {
|
|
75
|
+
const compressor = new HybridCompressor();
|
|
76
|
+
return compressor.decompress(data);
|
|
77
|
+
}
|
|
78
|
+
export function analyzeCompression(data) {
|
|
79
|
+
const compressor = new HybridCompressor();
|
|
80
|
+
console.log('\n=== Analyse de Compression ===');
|
|
81
|
+
console.log(`Taille originale: ${(data.length / 1e6).toFixed(2)} Mo`);
|
|
82
|
+
console.log(`Entropie: ${compressor.getEntropy(data).toFixed(2)} bits`);
|
|
83
|
+
const stats = compressor.getStats(data);
|
|
84
|
+
console.log(`\nStats de compression:`);
|
|
85
|
+
console.log(` Blocks: ${stats.blocksCount}`);
|
|
86
|
+
console.log(` Ratio: ${(stats.ratio * 100).toFixed(2)}%`);
|
|
87
|
+
console.log(` Entropie bits: ${stats.entropyBits.toFixed(2)}`);
|
|
88
|
+
console.log(` DΓ©bit estimΓ©: ${stats.estimatedThroughput.toFixed(0)} Mo/s`);
|
|
89
|
+
console.log(`\nGPU Status: ${compressor.isGpuAvailable() ? 'β Disponible' : 'β Indisponible'}`);
|
|
90
|
+
}
|
package/dist/roxify-cli
CHANGED
|
Binary file
|
package/dist/utils/encoder.js
CHANGED
|
@@ -53,7 +53,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
|
|
|
53
53
|
}
|
|
54
54
|
if (opts.onProgress)
|
|
55
55
|
opts.onProgress({ phase: 'compress_start', total: totalLen });
|
|
56
|
-
const compressionLevel = opts.compressionLevel ??
|
|
56
|
+
const compressionLevel = opts.compressionLevel ?? 7;
|
|
57
57
|
let payload = await parallelZstdCompress(payloadInput, compressionLevel, (loaded, total) => {
|
|
58
58
|
if (opts.onProgress) {
|
|
59
59
|
opts.onProgress({
|
package/libroxify_native.node
CHANGED
|
Binary file
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "roxify",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0",
|
|
4
4
|
"description": "Encode binary data into PNG images and decode them back. CLI and programmatic API with native Rust acceleration.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"build:cli": "cargo build --release --bin roxify_native && cp target/release/roxify_native dist/roxify-cli",
|
|
23
23
|
"build:all": "npm run build:native && npm run build && npm run build:cli",
|
|
24
24
|
"prepublishOnly": "npm run build:all",
|
|
25
|
-
"test": "node test/
|
|
25
|
+
"test": "node test/test-*.js",
|
|
26
26
|
"cli": "node dist/cli.js"
|
|
27
27
|
},
|
|
28
28
|
"keywords": [
|
package/CHANGELOG.md
DELETED
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
# Changelog
|
|
2
|
-
|
|
3
|
-
## [1.2.10] - 2026-01-09
|
|
4
|
-
|
|
5
|
-
### Performance ππ
|
|
6
|
-
|
|
7
|
-
- **MASSIVE file packing speedup**: 18,750 files (660MB) now in **7 seconds** (was 18s)
|
|
8
|
-
- Parallelized file reading with `fs.promises.readFile()` and `Promise.all()` batching
|
|
9
|
-
- Batch size optimized to 1000 files per parallel read
|
|
10
|
-
- Improved buffer concatenation strategy (array accumulation + single concat)
|
|
11
|
-
- Added error handling for unreadable files during parallel reads
|
|
12
|
-
|
|
13
|
-
### Benchmarks
|
|
14
|
-
|
|
15
|
-
- Single file 1GB: 389ms (2.63 GB/s)
|
|
16
|
-
- Directory 18,750 files (660MB): **6.8s** (97 MB/s including I/O overhead)
|
|
17
|
-
|
|
18
|
-
## [1.2.9] - 2026-01-09
|
|
19
|
-
|
|
20
|
-
### Performance π
|
|
21
|
-
|
|
22
|
-
- **EXTREME SPEED**: 1GB encode in 0.39s (**2.6 GB/s throughput**)
|
|
23
|
-
- Optimized PNG pixel copying from byte-by-byte loops to bulk Buffer.copy() operations
|
|
24
|
-
- Reduced PNG deflate overhead by using zlib level 0 (data already compressed with Zstd)
|
|
25
|
-
- Lowered large image threshold from 50M to 10M pixels for faster manual PNG generation
|
|
26
|
-
- Default Zstd compression level changed from 15 to 3 (much faster, still excellent ratio)
|
|
27
|
-
|
|
28
|
-
### Changed
|
|
29
|
-
|
|
30
|
-
- Added `compressionLevel` option to `EncodeOptions` (default: 3)
|
|
31
|
-
- Added `skipOptimization` option to disable zopfli PNG optimization
|
|
32
|
-
- CLI now disables PNG optimization by default for maximum speed
|
|
33
|
-
|
|
34
|
-
### Benchmarks
|
|
35
|
-
|
|
36
|
-
- 1KB: 14.77ms
|
|
37
|
-
- 100MB: 63.74ms (1.57 GB/s)
|
|
38
|
-
- 500MB: 203ms (2.46 GB/s)
|
|
39
|
-
- 1GB: 389ms (2.63 GB/s)
|
|
40
|
-
|
|
41
|
-
## [1.2.8] - 2026-01-09
|
|
42
|
-
|
|
43
|
-
### Added
|
|
44
|
-
|
|
45
|
-
- π¦ **Native Rust acceleration** via N-API for extreme performance
|
|
46
|
-
- Delta encoding/decoding with Rayon parallelization
|
|
47
|
-
- Multi-threaded Zstd compression (level 19) with `zstdmt` feature
|
|
48
|
-
- Fast CRC32 and Adler32 checksums
|
|
49
|
-
- Parallel pixel scanning for ROX1 magic and markers
|
|
50
|
-
- β‘ **Performance improvements**: Up to 1GB/s throughput on modern hardware
|
|
51
|
-
- 1GB encode: ~1.2s (863 MB/s)
|
|
52
|
-
- 1GB decode: ~1.0s (1031 MB/s)
|
|
53
|
-
- π **Automatic fallback**: Pure TypeScript implementation when native module unavailable
|
|
54
|
-
- π¦ **Unified repository**: Rust and TypeScript code in single npm package
|
|
55
|
-
|
|
56
|
-
### Changed
|
|
57
|
-
|
|
58
|
-
- Switched from `@mongodb-js/zstd` to native Rust zstd for better performance
|
|
59
|
-
- Updated package description to highlight native acceleration
|
|
60
|
-
- Compression ratio improved to 0.01-0.05% with Zstd level 19
|
|
61
|
-
|
|
62
|
-
### Technical
|
|
63
|
-
|
|
64
|
-
- Added `build:native` and `build:all` npm scripts
|
|
65
|
-
- Native module compiled to `libroxify_native.node` (1.8MB)
|
|
66
|
-
- Cargo workspace configured with `native/` directory
|
|
67
|
-
- Updated dependencies: Rust crates (napi, rayon, zstd, crc32fast, adler)
|
|
68
|
-
|
|
69
|
-
## [1.0.4] - Previous release
|
|
70
|
-
|
|
71
|
-
- Initial TypeScript implementation
|
|
72
|
-
- Brotli compression
|
|
73
|
-
- Multiple encoding modes (compact, chunk, pixel, screenshot)
|
|
74
|
-
- AES-256-GCM encryption support
|