roxify 1.4.1 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
- import cliProgress from 'cli-progress';
3
2
  import { mkdirSync, readFileSync, statSync, writeFileSync } from 'fs';
4
3
  import { open } from 'fs/promises';
5
4
  import { basename, dirname, join, resolve } from 'path';
6
5
  import { DataFormatError, decodePngToBinary, encodeBinaryToPng, hasPassphraseInPng, IncorrectPassphraseError, listFilesInPng, PassphraseRequiredError, } from './index.js';
7
6
  import { packPathsGenerator, unpackBuffer } from './pack.js';
7
+ import * as cliProgress from './stub-progress.js';
8
8
  import { encodeWithRustCLI, isRustBinaryAvailable, } from './utils/rust-cli-wrapper.js';
9
9
  const VERSION = '1.4.0';
10
10
  async function readLargeFile(filePath) {
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export interface CompressionStats {
4
2
  originalSize: number;
5
3
  compressedSize: number;
@@ -1,10 +1,10 @@
1
+ import { native } from './utils/native.js';
1
2
  let check_gpu_status;
2
3
  let entropy_estimate;
3
4
  let get_compression_stats;
4
5
  let hybrid_compress;
5
6
  let hybrid_decompress;
6
7
  try {
7
- const native = require('../libroxify_native.node');
8
8
  check_gpu_status = native.check_gpu_status;
9
9
  entropy_estimate = native.entropy_estimate;
10
10
  get_compression_stats = native.get_compression_stats;
package/dist/minpng.d.ts CHANGED
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare function encodeMinPng(rgb: Buffer, width: number, height: number): Promise<Buffer>;
4
2
  export declare function decodeMinPng(pngBuf: Buffer): Promise<{
5
3
  buf: Buffer;
package/dist/minpng.js CHANGED
@@ -1,6 +1,20 @@
1
- import { compress as zstdCompress, decompress as zstdDecompress, } from '@mongodb-js/zstd';
2
- import encode from 'png-chunks-encode';
3
1
  import { deflateSync } from 'zlib';
2
+ import { native } from './utils/native.js';
3
+ let nativeZstdCompress = null;
4
+ let nativeZstdDecompress = null;
5
+ let nativeEncodePngChunks = null;
6
+ try {
7
+ if (native?.nativeZstdCompress) {
8
+ nativeZstdCompress = native.nativeZstdCompress;
9
+ }
10
+ if (native?.nativeZstdDecompress) {
11
+ nativeZstdDecompress = native.nativeZstdDecompress;
12
+ }
13
+ if (native?.encodePngChunks) {
14
+ nativeEncodePngChunks = native.encodePngChunks;
15
+ }
16
+ }
17
+ catch (e) { }
4
18
  const PIXEL_MAGIC = Buffer.from('MNPG');
5
19
  const MARKER_START = [
6
20
  { r: 255, g: 0, b: 0 },
@@ -82,7 +96,10 @@ export async function encodeMinPng(rgb, width, height) {
82
96
  transformed[tIdx++] = b;
83
97
  }
84
98
  const transformedBuf = Buffer.from(transformed);
85
- const compressed = Buffer.from(await zstdCompress(transformedBuf, 19));
99
+ if (!nativeZstdCompress) {
100
+ throw new Error('Native zstd compression not available');
101
+ }
102
+ const compressed = Buffer.from(nativeZstdCompress(transformedBuf, 19));
86
103
  const header = Buffer.alloc(4 + 1 + 4 + 4);
87
104
  PIXEL_MAGIC.copy(header, 0);
88
105
  header[4] = 1;
@@ -142,16 +159,29 @@ export async function encodeMinPng(rgb, width, height) {
142
159
  { name: 'IDAT', data: idat },
143
160
  { name: 'IEND', data: Buffer.alloc(0) },
144
161
  ];
145
- return Buffer.from(encode(chunks));
162
+ if (nativeEncodePngChunks) {
163
+ return Buffer.from(nativeEncodePngChunks(chunks));
164
+ }
165
+ const PNG_SIG = Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]);
166
+ const output = [PNG_SIG];
167
+ for (const chunk of chunks) {
168
+ const type = Buffer.from(chunk.name, 'ascii');
169
+ const length = Buffer.alloc(4);
170
+ length.writeUInt32BE(chunk.data.length, 0);
171
+ const crcData = Buffer.concat([type, chunk.data]);
172
+ const crc = Buffer.alloc(4);
173
+ const crc32fast = native?.nativeCrc32;
174
+ const crcVal = crc32fast ? crc32fast(crcData) : 0;
175
+ crc.writeUInt32BE(crcVal, 0);
176
+ output.push(length, type, chunk.data, crc);
177
+ }
178
+ return Buffer.concat(output);
146
179
  }
147
180
  export async function decodeMinPng(pngBuf) {
148
- const sharp = await import('sharp');
149
- const { data, info } = await sharp
150
- .default(pngBuf)
151
- .raw()
152
- .toBuffer({ resolveWithObject: true });
153
- const currentWidth = info.width;
154
- const currentHeight = info.height;
181
+ const rawData = native.sharpToRaw(pngBuf);
182
+ const data = rawData.pixels;
183
+ const currentWidth = rawData.width;
184
+ const currentHeight = rawData.height;
155
185
  const rawRGB = Buffer.alloc(currentWidth * currentHeight * 3);
156
186
  for (let i = 0; i < currentWidth * currentHeight; i++) {
157
187
  rawRGB[i * 3] = data[i * 3];
@@ -190,7 +220,10 @@ export async function decodeMinPng(pngBuf) {
190
220
  if (compStart + compressedLen > rawRGB.length)
191
221
  return null;
192
222
  const compressed = rawRGB.subarray(compStart, compStart + compressedLen);
193
- const decompressed = Buffer.from(await zstdDecompress(compressed));
223
+ if (!nativeZstdDecompress) {
224
+ throw new Error('Native zstd decompression not available');
225
+ }
226
+ const decompressed = Buffer.from(nativeZstdDecompress(compressed));
194
227
  const indices = zigzagOrderIndices(origW, origH);
195
228
  const residualR = new Uint8Array(origW * origH);
196
229
  const residualG = new Uint8Array(origW * origH);
package/dist/pack.d.ts CHANGED
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export interface PackedFile {
4
2
  path: string;
5
3
  buf: Buffer;
package/dist/roxify-cli CHANGED
Binary file
@@ -0,0 +1,9 @@
1
+ export declare class SingleBar {
2
+ constructor(...args: any[]);
3
+ start(...args: any[]): void;
4
+ update(...args: any[]): void;
5
+ stop(...args: any[]): void;
6
+ }
7
+ export declare const Presets: {
8
+ shades_classic: {};
9
+ };
@@ -0,0 +1,9 @@
1
+ export class SingleBar {
2
+ constructor(...args) { }
3
+ start(...args) { }
4
+ update(...args) { }
5
+ stop(...args) { }
6
+ }
7
+ export const Presets = {
8
+ shades_classic: {},
9
+ };
@@ -1,14 +1,12 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare const CHUNK_TYPE = "rXDT";
4
- export declare const MAGIC: Buffer;
5
- export declare const PIXEL_MAGIC: Buffer;
6
- export declare const PIXEL_MAGIC_BLOCK: Buffer;
2
+ export declare const MAGIC: Buffer<ArrayBuffer>;
3
+ export declare const PIXEL_MAGIC: Buffer<ArrayBuffer>;
4
+ export declare const PIXEL_MAGIC_BLOCK: Buffer<ArrayBuffer>;
7
5
  export declare const ENC_NONE = 0;
8
6
  export declare const ENC_AES = 1;
9
7
  export declare const ENC_XOR = 2;
10
- export declare const FILTER_ZERO: Buffer;
11
- export declare const PNG_HEADER: Buffer;
8
+ export declare const FILTER_ZERO: Buffer<ArrayBuffer>;
9
+ export declare const PNG_HEADER: Buffer<ArrayBuffer>;
12
10
  export declare const PNG_HEADER_HEX: string;
13
11
  export declare const MARKER_COLORS: {
14
12
  r: number;
@@ -1,4 +1,2 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare function crc32(buf: Buffer, previous?: number): number;
4
2
  export declare function adler32(buf: Buffer, prev?: number): number;
@@ -1,4 +1,2 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  import { DecodeOptions, DecodeResult } from './types.js';
4
2
  export declare function decodePngToBinary(input: Buffer | string, opts?: DecodeOptions): Promise<DecodeResult>;
@@ -1,14 +1,12 @@
1
1
  import { execFileSync } from 'child_process';
2
- import cliProgress from 'cli-progress';
3
2
  import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'fs';
4
3
  import { tmpdir } from 'os';
5
4
  import { join } from 'path';
6
- import extract from 'png-chunks-extract';
7
- import sharp from 'sharp';
8
5
  import { unpackBuffer } from '../pack.js';
9
6
  import { CHUNK_TYPE, MAGIC, MARKER_END, MARKER_START, PIXEL_MAGIC, PIXEL_MAGIC_BLOCK, PNG_HEADER, } from './constants.js';
10
7
  import { DataFormatError, IncorrectPassphraseError, PassphraseRequiredError, } from './errors.js';
11
8
  import { colorsToBytes, deltaDecode, tryDecryptIfNeeded } from './helpers.js';
9
+ import { native } from './native.js';
12
10
  import { cropAndReconstitute } from './reconstitution.js';
13
11
  import { parallelZstdDecompress, tryZstdDecompress } from './zstd.js';
14
12
  async function tryDecompress(payload, onProgress) {
@@ -90,11 +88,17 @@ export async function decodePngToBinary(input, opts = {}) {
90
88
  }
91
89
  else {
92
90
  try {
93
- const metadata = await sharp(input).metadata();
94
- const rawBytesEstimate = (metadata.width || 0) * (metadata.height || 0) * 4;
95
- const MAX_RAW_BYTES = 200 * 1024 * 1024;
96
- if (rawBytesEstimate > MAX_RAW_BYTES) {
97
- pngBuf = readFileSync(input);
91
+ if (native?.sharpMetadata) {
92
+ const inputBuf = readFileSync(input);
93
+ const metadata = native.sharpMetadata(inputBuf);
94
+ const rawBytesEstimate = metadata.width * metadata.height * 4;
95
+ const MAX_RAW_BYTES = 200 * 1024 * 1024;
96
+ if (rawBytesEstimate > MAX_RAW_BYTES) {
97
+ pngBuf = inputBuf;
98
+ }
99
+ else {
100
+ pngBuf = inputBuf;
101
+ }
98
102
  }
99
103
  else {
100
104
  pngBuf = readFileSync(input);
@@ -111,10 +115,11 @@ export async function decodePngToBinary(input, opts = {}) {
111
115
  }
112
116
  let progressBar = null;
113
117
  if (opts.showProgress) {
114
- progressBar = new cliProgress.SingleBar({
115
- format: ' {bar} {percentage}% | {step} | {elapsed}s',
116
- }, cliProgress.Presets.shades_classic);
117
- progressBar.start(100, 0, { step: 'Starting', elapsed: '0' });
118
+ progressBar = {
119
+ start: () => { },
120
+ update: () => { },
121
+ stop: () => { },
122
+ };
118
123
  const startTime = Date.now();
119
124
  if (!opts.onProgress) {
120
125
  opts.onProgress = (info) => {
@@ -128,10 +133,6 @@ export async function decodePngToBinary(input, opts = {}) {
128
133
  else if (info.phase === 'done') {
129
134
  pct = 100;
130
135
  }
131
- progressBar.update(Math.floor(pct), {
132
- step: info.phase.replace('_', ' '),
133
- elapsed: String(Math.floor((Date.now() - startTime) / 1000)),
134
- });
135
136
  };
136
137
  }
137
138
  }
@@ -139,28 +140,17 @@ export async function decodePngToBinary(input, opts = {}) {
139
140
  opts.onProgress({ phase: 'start' });
140
141
  let processedBuf = pngBuf;
141
142
  try {
142
- const info = await sharp(pngBuf).metadata();
143
- if (info.width && info.height) {
144
- const MAX_RAW_BYTES = 1200 * 1024 * 1024;
145
- const rawBytesEstimate = info.width * info.height * 4;
146
- if (rawBytesEstimate > MAX_RAW_BYTES) {
147
- throw new DataFormatError(`Image too large to decode in-process (${Math.round(rawBytesEstimate / 1024 / 1024)} MB). Increase Node heap or use a smaller image/compact mode.`);
148
- }
149
- if (false) {
150
- const doubledBuffer = await sharp(pngBuf)
151
- .resize({
152
- width: info.width * 2,
153
- height: info.height * 2,
154
- kernel: 'nearest',
155
- })
156
- .png()
157
- .toBuffer();
158
- processedBuf = await cropAndReconstitute(doubledBuffer, opts.debugDir);
159
- }
160
- else {
161
- processedBuf = pngBuf;
143
+ if (native?.sharpMetadata) {
144
+ const info = native.sharpMetadata(pngBuf);
145
+ if (info.width && info.height) {
146
+ const MAX_RAW_BYTES = 1200 * 1024 * 1024;
147
+ const rawBytesEstimate = info.width * info.height * 4;
148
+ if (rawBytesEstimate > MAX_RAW_BYTES) {
149
+ throw new DataFormatError(`Image too large to decode in-process (${Math.round(rawBytesEstimate / 1024 / 1024)} MB). Increase Node heap or use a smaller image/compact mode.`);
150
+ }
162
151
  }
163
152
  }
153
+ processedBuf = pngBuf;
164
154
  }
165
155
  catch (e) {
166
156
  if (e instanceof DataFormatError)
@@ -204,24 +194,30 @@ export async function decodePngToBinary(input, opts = {}) {
204
194
  }
205
195
  let chunks = [];
206
196
  try {
207
- const chunksRaw = extract(processedBuf);
208
- chunks = chunksRaw.map((c) => ({
209
- name: c.name,
210
- data: Buffer.isBuffer(c.data)
211
- ? c.data
212
- : Buffer.from(c.data),
213
- }));
197
+ if (native?.extractPngChunks) {
198
+ const chunksRaw = native.extractPngChunks(processedBuf);
199
+ chunks = chunksRaw.map((c) => ({
200
+ name: c.name,
201
+ data: Buffer.from(c.data),
202
+ }));
203
+ }
204
+ else {
205
+ throw new Error('Native PNG chunk extraction not available');
206
+ }
214
207
  }
215
208
  catch (e) {
216
209
  try {
217
210
  const withHeader = Buffer.concat([PNG_HEADER, pngBuf]);
218
- const chunksRaw = extract(withHeader);
219
- chunks = chunksRaw.map((c) => ({
220
- name: c.name,
221
- data: Buffer.isBuffer(c.data)
222
- ? c.data
223
- : Buffer.from(c.data),
224
- }));
211
+ if (native?.extractPngChunks) {
212
+ const chunksRaw = native.extractPngChunks(withHeader);
213
+ chunks = chunksRaw.map((c) => ({
214
+ name: c.name,
215
+ data: Buffer.from(c.data),
216
+ }));
217
+ }
218
+ else {
219
+ throw new Error('Native PNG chunk extraction not available');
220
+ }
225
221
  }
226
222
  catch (e2) {
227
223
  chunks = [];
@@ -274,28 +270,21 @@ export async function decodePngToBinary(input, opts = {}) {
274
270
  return { buf: payload, meta: { name } };
275
271
  }
276
272
  try {
277
- const metadata = await sharp(processedBuf).metadata();
273
+ const metadata = native.sharpMetadata(processedBuf);
278
274
  const currentWidth = metadata.width;
279
275
  const currentHeight = metadata.height;
280
276
  let rawRGB = Buffer.alloc(0);
281
277
  let isBlockEncoded = false;
282
278
  if (currentWidth % 2 === 0 && currentHeight % 2 === 0) {
283
- const { data: testData } = await sharp(processedBuf)
284
- .extract({
285
- left: 0,
286
- top: 0,
287
- width: Math.min(4, currentWidth),
288
- height: Math.min(4, currentHeight),
289
- })
290
- .raw()
291
- .toBuffer({ resolveWithObject: true });
279
+ const rawData = native.sharpToRaw(processedBuf);
280
+ const testData = rawData.pixels;
292
281
  let hasBlockPattern = true;
293
282
  for (let y = 0; y < Math.min(2, currentHeight / 2); y++) {
294
283
  for (let x = 0; x < Math.min(2, currentWidth / 2); x++) {
295
- const px00 = (y * 2 * Math.min(4, currentWidth) + x * 2) * 3;
296
- const px01 = (y * 2 * Math.min(4, currentWidth) + (x * 2 + 1)) * 3;
297
- const px10 = ((y * 2 + 1) * Math.min(4, currentWidth) + x * 2) * 3;
298
- const px11 = ((y * 2 + 1) * Math.min(4, currentWidth) + (x * 2 + 1)) * 3;
284
+ const px00 = (y * 2 * currentWidth + x * 2) * 3;
285
+ const px01 = (y * 2 * currentWidth + (x * 2 + 1)) * 3;
286
+ const px10 = ((y * 2 + 1) * currentWidth + x * 2) * 3;
287
+ const px11 = ((y * 2 + 1) * currentWidth + (x * 2 + 1)) * 3;
299
288
  if (testData[px00] !== testData[px01] ||
300
289
  testData[px00] !== testData[px10] ||
301
290
  testData[px00] !== testData[px11] ||
@@ -314,56 +303,28 @@ export async function decodePngToBinary(input, opts = {}) {
314
303
  const blocksWide = currentWidth / 2;
315
304
  const blocksHigh = currentHeight / 2;
316
305
  rawRGB = Buffer.alloc(blocksWide * blocksHigh * 3);
306
+ const fullRaw = native.sharpToRaw(processedBuf);
307
+ const fullData = fullRaw.pixels;
317
308
  let outIdx = 0;
318
309
  for (let by = 0; by < blocksHigh; by++) {
319
310
  for (let bx = 0; bx < blocksWide; bx++) {
320
- const { data: blockData } = await sharp(processedBuf)
321
- .extract({ left: bx * 2, top: by * 2, width: 1, height: 1 })
322
- .raw()
323
- .toBuffer({ resolveWithObject: true });
324
- rawRGB[outIdx++] = blockData[0];
325
- rawRGB[outIdx++] = blockData[1];
326
- rawRGB[outIdx++] = blockData[2];
311
+ const pixelOffset = (by * 2 * currentWidth + bx * 2) * 3;
312
+ rawRGB[outIdx++] = fullData[pixelOffset];
313
+ rawRGB[outIdx++] = fullData[pixelOffset + 1];
314
+ rawRGB[outIdx++] = fullData[pixelOffset + 2];
327
315
  }
328
316
  }
329
317
  }
330
318
  }
331
319
  if (!isBlockEncoded) {
332
- rawRGB = Buffer.allocUnsafe(currentWidth * currentHeight * 3);
333
- let writeOffset = 0;
334
- const rowsPerChunk = 2000;
335
- for (let startRow = 0; startRow < currentHeight; startRow += rowsPerChunk) {
336
- const endRow = Math.min(startRow + rowsPerChunk, currentHeight);
337
- const chunkHeight = endRow - startRow;
338
- const { data: chunkData, info: chunkInfo } = await sharp(processedBuf)
339
- .extract({
340
- left: 0,
341
- top: startRow,
342
- width: currentWidth,
343
- height: chunkHeight,
344
- })
345
- .raw()
346
- .toBuffer({ resolveWithObject: true });
347
- const channels = chunkInfo.channels;
348
- const pixelsInChunk = currentWidth * chunkHeight;
349
- if (channels === 3) {
350
- chunkData.copy(rawRGB, writeOffset);
351
- writeOffset += pixelsInChunk * 3;
352
- }
353
- else if (channels === 4) {
354
- for (let i = 0; i < pixelsInChunk; i++) {
355
- rawRGB[writeOffset++] = chunkData[i * 4];
356
- rawRGB[writeOffset++] = chunkData[i * 4 + 1];
357
- rawRGB[writeOffset++] = chunkData[i * 4 + 2];
358
- }
359
- }
360
- if (opts.onProgress) {
361
- opts.onProgress({
362
- phase: 'extract_pixels',
363
- loaded: endRow,
364
- total: currentHeight,
365
- });
366
- }
320
+ const rawData = native.sharpToRaw(processedBuf);
321
+ rawRGB = Buffer.from(rawData.pixels);
322
+ if (opts.onProgress) {
323
+ opts.onProgress({
324
+ phase: 'extract_pixels',
325
+ loaded: currentHeight,
326
+ total: currentHeight,
327
+ });
367
328
  }
368
329
  }
369
330
  const firstPixels = [];
@@ -410,22 +371,10 @@ export async function decodePngToBinary(input, opts = {}) {
410
371
  }
411
372
  else {
412
373
  const reconstructed = await cropAndReconstitute(processedBuf, opts.debugDir);
413
- const { data: rdata, info: rinfo } = await sharp(reconstructed)
414
- .raw()
415
- .toBuffer({ resolveWithObject: true });
416
- logicalWidth = rinfo.width;
417
- logicalHeight = rinfo.height;
418
- logicalData = Buffer.alloc(rinfo.width * rinfo.height * 3);
419
- if (rinfo.channels === 3) {
420
- rdata.copy(logicalData);
421
- }
422
- else if (rinfo.channels === 4) {
423
- for (let i = 0; i < logicalWidth * logicalHeight; i++) {
424
- logicalData[i * 3] = rdata[i * 4];
425
- logicalData[i * 3 + 1] = rdata[i * 4 + 1];
426
- logicalData[i * 3 + 2] = rdata[i * 4 + 2];
427
- }
428
- }
374
+ const rawData = native.sharpToRaw(reconstructed);
375
+ logicalWidth = rawData.width;
376
+ logicalHeight = rawData.height;
377
+ logicalData = Buffer.from(rawData.pixels);
429
378
  }
430
379
  if (process.env.ROX_DEBUG) {
431
380
  console.log('DEBUG: Logical grid reconstructed:', logicalWidth, 'x', logicalHeight, '=', logicalWidth * logicalHeight, 'pixels');
@@ -1,4 +1,2 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  import { EncodeOptions } from './types.js';
4
2
  export declare function encodeBinaryToPng(input: Buffer | Buffer[], opts?: EncodeOptions): Promise<Buffer>;
@@ -1,20 +1,19 @@
1
- import cliProgress from 'cli-progress';
2
1
  import { createCipheriv, pbkdf2Sync, randomBytes } from 'crypto';
3
- import sharp from 'sharp';
4
2
  import * as zlib from 'zlib';
5
3
  import { unpackBuffer } from '../pack.js';
6
4
  import { COMPRESSION_MARKERS, ENC_AES, ENC_NONE, ENC_XOR, MAGIC, MARKER_END, MARKER_START, PIXEL_MAGIC, PIXEL_MAGIC_BLOCK, PNG_HEADER, } from './constants.js';
7
5
  import { crc32 } from './crc.js';
8
6
  import { colorsToBytes } from './helpers.js';
9
- import { optimizePngBuffer } from './optimization.js';
7
+ import { native } from './native.js';
10
8
  import { parallelZstdCompress } from './zstd.js';
11
9
  export async function encodeBinaryToPng(input, opts = {}) {
12
10
  let progressBar = null;
13
11
  if (opts.showProgress) {
14
- progressBar = new cliProgress.SingleBar({
15
- format: ' {bar} {percentage}% | {step} | {elapsed}s',
16
- }, cliProgress.Presets.shades_classic);
17
- progressBar.start(100, 0, { step: 'Starting', elapsed: '0' });
12
+ progressBar = {
13
+ start: () => { },
14
+ update: () => { },
15
+ stop: () => { },
16
+ };
18
17
  const startTime = Date.now();
19
18
  if (!opts.onProgress) {
20
19
  opts.onProgress = (info) => {
@@ -34,10 +33,6 @@ export async function encodeBinaryToPng(input, opts = {}) {
34
33
  else if (info.phase === 'done') {
35
34
  pct = 100;
36
35
  }
37
- progressBar.update(Math.floor(pct), {
38
- step: info.phase.replace('_', ' '),
39
- elapsed: String(Math.floor((Date.now() - startTime) / 1000)),
40
- });
41
36
  };
42
37
  }
43
38
  }
@@ -220,7 +215,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
220
215
  lenBuf.writeUInt32BE(jsonBuf.length, 0);
221
216
  metaPixel = [...metaPixel, Buffer.from('rXFL', 'utf8'), lenBuf, jsonBuf];
222
217
  }
223
- const useBlockEncoding = opts.useBlockEncoding ?? true;
218
+ const useBlockEncoding = false;
224
219
  const pixelMagic = useBlockEncoding ? PIXEL_MAGIC_BLOCK : PIXEL_MAGIC;
225
220
  const dataWithoutMarkers = [pixelMagic, ...metaPixel];
226
221
  const dataWithoutMarkersLen = dataWithoutMarkers.reduce((a, b) => a + b.length, 0);
@@ -261,15 +256,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
261
256
  }
262
257
  }
263
258
  }
264
- bufScr = await sharp(rgbBuffer, {
265
- raw: { width, height, channels: 3 },
266
- })
267
- .png({
268
- compressionLevel: 9,
269
- adaptiveFiltering: true,
270
- effort: 9,
271
- })
272
- .toBuffer();
259
+ bufScr = Buffer.from(native.rgbToPng(rgbBuffer, width, height));
273
260
  }
274
261
  else {
275
262
  const bytesPerPixel = 3;
@@ -394,27 +381,10 @@ export async function encodeBinaryToPng(input, opts = {}) {
394
381
  else {
395
382
  const outputFormat = opts.outputFormat || 'png';
396
383
  if (outputFormat === 'webp') {
397
- bufScr = await sharp(raw, {
398
- raw: { width, height, channels: 3 },
399
- })
400
- .webp({
401
- lossless: true,
402
- quality: 100,
403
- effort: 6,
404
- })
405
- .toBuffer();
384
+ throw new Error('WebP output format not supported with native backend');
406
385
  }
407
386
  else {
408
- bufScr = await sharp(raw, {
409
- raw: { width, height, channels: 3 },
410
- })
411
- .png({
412
- compressionLevel: 3,
413
- palette: false,
414
- effort: 1,
415
- adaptiveFiltering: false,
416
- })
417
- .toBuffer();
387
+ bufScr = Buffer.from(native.rgbToPng(raw, width, height));
418
388
  }
419
389
  }
420
390
  }
@@ -426,22 +396,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
426
396
  dataWithoutMarkers.length = 0;
427
397
  if (opts.onProgress)
428
398
  opts.onProgress({ phase: 'png_compress', loaded: 100, total: 100 });
429
- if (opts.skipOptimization || opts.outputFormat === 'webp') {
430
- progressBar?.stop();
431
- return bufScr;
432
- }
433
- if (opts.onProgress)
434
- opts.onProgress({ phase: 'optimizing', loaded: 0, total: 100 });
435
- try {
436
- const optimized = await optimizePngBuffer(bufScr, true);
437
- if (opts.onProgress)
438
- opts.onProgress({ phase: 'optimizing', loaded: 100, total: 100 });
439
- progressBar?.stop();
440
- return optimized;
441
- }
442
- catch (e) {
443
- progressBar?.stop();
444
- return bufScr;
445
- }
399
+ progressBar?.stop();
400
+ return bufScr;
446
401
  }
447
402
  }
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare function colorsToBytes(colors: Array<{
4
2
  r: number;
5
3
  g: number;
@@ -1,20 +1,18 @@
1
1
  import { createDecipheriv, pbkdf2Sync } from 'crypto';
2
2
  import { ENC_AES, ENC_NONE, ENC_XOR } from './constants.js';
3
3
  import { IncorrectPassphraseError, PassphraseRequiredError } from './errors.js';
4
+ import { native } from './native.js';
4
5
  let nativeDeltaEncode = null;
5
6
  let nativeDeltaDecode = null;
6
7
  let hasNative = false;
7
8
  try {
8
- const native = require('../../libroxify_native.node');
9
9
  if (native?.nativeDeltaEncode && native?.nativeDeltaDecode) {
10
10
  nativeDeltaEncode = native.nativeDeltaEncode;
11
11
  nativeDeltaDecode = native.nativeDeltaDecode;
12
12
  hasNative = true;
13
13
  }
14
14
  }
15
- catch (e) {
16
- // Native module not available, will use TS fallback
17
- }
15
+ catch (e) { }
18
16
  export function colorsToBytes(colors) {
19
17
  const buf = Buffer.alloc(colors.length * 3);
20
18
  for (let i = 0; i < colors.length; i++) {
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  /**
4
2
  * List files in a Rox PNG archive without decoding the full payload.
5
3
  * Returns the file list if available, otherwise null.