roxify 1.4.1 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
- import cliProgress from 'cli-progress';
3
2
  import { mkdirSync, readFileSync, statSync, writeFileSync } from 'fs';
4
3
  import { open } from 'fs/promises';
5
4
  import { basename, dirname, join, resolve } from 'path';
6
5
  import { DataFormatError, decodePngToBinary, encodeBinaryToPng, hasPassphraseInPng, IncorrectPassphraseError, listFilesInPng, PassphraseRequiredError, } from './index.js';
7
6
  import { packPathsGenerator, unpackBuffer } from './pack.js';
7
+ import * as cliProgress from './stub-progress.js';
8
8
  import { encodeWithRustCLI, isRustBinaryAvailable, } from './utils/rust-cli-wrapper.js';
9
9
  const VERSION = '1.4.0';
10
10
  async function readLargeFile(filePath) {
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export interface CompressionStats {
4
2
  originalSize: number;
5
3
  compressedSize: number;
@@ -1,10 +1,10 @@
1
+ import { native } from './utils/native.js';
1
2
  let check_gpu_status;
2
3
  let entropy_estimate;
3
4
  let get_compression_stats;
4
5
  let hybrid_compress;
5
6
  let hybrid_decompress;
6
7
  try {
7
- const native = require('../libroxify_native.node');
8
8
  check_gpu_status = native.check_gpu_status;
9
9
  entropy_estimate = native.entropy_estimate;
10
10
  get_compression_stats = native.get_compression_stats;
package/dist/minpng.d.ts CHANGED
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare function encodeMinPng(rgb: Buffer, width: number, height: number): Promise<Buffer>;
4
2
  export declare function decodeMinPng(pngBuf: Buffer): Promise<{
5
3
  buf: Buffer;
package/dist/minpng.js CHANGED
@@ -1,6 +1,20 @@
1
- import { compress as zstdCompress, decompress as zstdDecompress, } from '@mongodb-js/zstd';
2
- import encode from 'png-chunks-encode';
3
1
  import { deflateSync } from 'zlib';
2
+ import { native } from './utils/native.js';
3
+ let nativeZstdCompress = null;
4
+ let nativeZstdDecompress = null;
5
+ let nativeEncodePngChunks = null;
6
+ try {
7
+ if (native?.nativeZstdCompress) {
8
+ nativeZstdCompress = native.nativeZstdCompress;
9
+ }
10
+ if (native?.nativeZstdDecompress) {
11
+ nativeZstdDecompress = native.nativeZstdDecompress;
12
+ }
13
+ if (native?.encodePngChunks) {
14
+ nativeEncodePngChunks = native.encodePngChunks;
15
+ }
16
+ }
17
+ catch (e) { }
4
18
  const PIXEL_MAGIC = Buffer.from('MNPG');
5
19
  const MARKER_START = [
6
20
  { r: 255, g: 0, b: 0 },
@@ -82,7 +96,10 @@ export async function encodeMinPng(rgb, width, height) {
82
96
  transformed[tIdx++] = b;
83
97
  }
84
98
  const transformedBuf = Buffer.from(transformed);
85
- const compressed = Buffer.from(await zstdCompress(transformedBuf, 19));
99
+ if (!nativeZstdCompress) {
100
+ throw new Error('Native zstd compression not available');
101
+ }
102
+ const compressed = Buffer.from(nativeZstdCompress(transformedBuf, 19));
86
103
  const header = Buffer.alloc(4 + 1 + 4 + 4);
87
104
  PIXEL_MAGIC.copy(header, 0);
88
105
  header[4] = 1;
@@ -142,16 +159,29 @@ export async function encodeMinPng(rgb, width, height) {
142
159
  { name: 'IDAT', data: idat },
143
160
  { name: 'IEND', data: Buffer.alloc(0) },
144
161
  ];
145
- return Buffer.from(encode(chunks));
162
+ if (nativeEncodePngChunks) {
163
+ return Buffer.from(nativeEncodePngChunks(chunks));
164
+ }
165
+ const PNG_SIG = Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]);
166
+ const output = [PNG_SIG];
167
+ for (const chunk of chunks) {
168
+ const type = Buffer.from(chunk.name, 'ascii');
169
+ const length = Buffer.alloc(4);
170
+ length.writeUInt32BE(chunk.data.length, 0);
171
+ const crcData = Buffer.concat([type, chunk.data]);
172
+ const crc = Buffer.alloc(4);
173
+ const crc32fast = native?.nativeCrc32;
174
+ const crcVal = crc32fast ? crc32fast(crcData) : 0;
175
+ crc.writeUInt32BE(crcVal, 0);
176
+ output.push(length, type, chunk.data, crc);
177
+ }
178
+ return Buffer.concat(output);
146
179
  }
147
180
  export async function decodeMinPng(pngBuf) {
148
- const sharp = await import('sharp');
149
- const { data, info } = await sharp
150
- .default(pngBuf)
151
- .raw()
152
- .toBuffer({ resolveWithObject: true });
153
- const currentWidth = info.width;
154
- const currentHeight = info.height;
181
+ const rawData = native.sharpToRaw(pngBuf);
182
+ const data = rawData.pixels;
183
+ const currentWidth = rawData.width;
184
+ const currentHeight = rawData.height;
155
185
  const rawRGB = Buffer.alloc(currentWidth * currentHeight * 3);
156
186
  for (let i = 0; i < currentWidth * currentHeight; i++) {
157
187
  rawRGB[i * 3] = data[i * 3];
@@ -190,7 +220,10 @@ export async function decodeMinPng(pngBuf) {
190
220
  if (compStart + compressedLen > rawRGB.length)
191
221
  return null;
192
222
  const compressed = rawRGB.subarray(compStart, compStart + compressedLen);
193
- const decompressed = Buffer.from(await zstdDecompress(compressed));
223
+ if (!nativeZstdDecompress) {
224
+ throw new Error('Native zstd decompression not available');
225
+ }
226
+ const decompressed = Buffer.from(nativeZstdDecompress(compressed));
194
227
  const indices = zigzagOrderIndices(origW, origH);
195
228
  const residualR = new Uint8Array(origW * origH);
196
229
  const residualG = new Uint8Array(origW * origH);
package/dist/pack.d.ts CHANGED
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export interface PackedFile {
4
2
  path: string;
5
3
  buf: Buffer;
package/dist/roxify-cli CHANGED
Binary file
@@ -0,0 +1,9 @@
1
+ export declare class SingleBar {
2
+ constructor(...args: any[]);
3
+ start(...args: any[]): void;
4
+ update(...args: any[]): void;
5
+ stop(...args: any[]): void;
6
+ }
7
+ export declare const Presets: {
8
+ shades_classic: {};
9
+ };
@@ -0,0 +1,9 @@
1
+ export class SingleBar {
2
+ constructor(...args) { }
3
+ start(...args) { }
4
+ update(...args) { }
5
+ stop(...args) { }
6
+ }
7
+ export const Presets = {
8
+ shades_classic: {},
9
+ };
@@ -1,14 +1,12 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare const CHUNK_TYPE = "rXDT";
4
- export declare const MAGIC: Buffer;
5
- export declare const PIXEL_MAGIC: Buffer;
6
- export declare const PIXEL_MAGIC_BLOCK: Buffer;
2
+ export declare const MAGIC: Buffer<ArrayBuffer>;
3
+ export declare const PIXEL_MAGIC: Buffer<ArrayBuffer>;
4
+ export declare const PIXEL_MAGIC_BLOCK: Buffer<ArrayBuffer>;
7
5
  export declare const ENC_NONE = 0;
8
6
  export declare const ENC_AES = 1;
9
7
  export declare const ENC_XOR = 2;
10
- export declare const FILTER_ZERO: Buffer;
11
- export declare const PNG_HEADER: Buffer;
8
+ export declare const FILTER_ZERO: Buffer<ArrayBuffer>;
9
+ export declare const PNG_HEADER: Buffer<ArrayBuffer>;
12
10
  export declare const PNG_HEADER_HEX: string;
13
11
  export declare const MARKER_COLORS: {
14
12
  r: number;
@@ -31,11 +29,6 @@ export declare const COMPRESSION_MARKERS: {
31
29
  g: number;
32
30
  b: number;
33
31
  }[];
34
- lzma: {
35
- r: number;
36
- g: number;
37
- b: number;
38
- }[];
39
32
  };
40
33
  export declare const FORMAT_MARKERS: {
41
34
  png: {
@@ -19,7 +19,6 @@ export const MARKER_START = MARKER_COLORS;
19
19
  export const MARKER_END = [...MARKER_COLORS].reverse();
20
20
  export const COMPRESSION_MARKERS = {
21
21
  zstd: [{ r: 0, g: 255, b: 0 }],
22
- lzma: [{ r: 255, g: 255, b: 0 }],
23
22
  };
24
23
  export const FORMAT_MARKERS = {
25
24
  png: { r: 0, g: 255, b: 255 },
@@ -1,4 +1,2 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare function crc32(buf: Buffer, previous?: number): number;
4
2
  export declare function adler32(buf: Buffer, prev?: number): number;
@@ -1,4 +1,2 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  import { DecodeOptions, DecodeResult } from './types.js';
4
2
  export declare function decodePngToBinary(input: Buffer | string, opts?: DecodeOptions): Promise<DecodeResult>;
@@ -1,41 +1,16 @@
1
1
  import { execFileSync } from 'child_process';
2
- import cliProgress from 'cli-progress';
3
2
  import { mkdtempSync, readFileSync, rmSync, writeFileSync } from 'fs';
4
3
  import { tmpdir } from 'os';
5
4
  import { join } from 'path';
6
- import extract from 'png-chunks-extract';
7
- import sharp from 'sharp';
8
5
  import { unpackBuffer } from '../pack.js';
9
6
  import { CHUNK_TYPE, MAGIC, MARKER_END, MARKER_START, PIXEL_MAGIC, PIXEL_MAGIC_BLOCK, PNG_HEADER, } from './constants.js';
10
7
  import { DataFormatError, IncorrectPassphraseError, PassphraseRequiredError, } from './errors.js';
11
8
  import { colorsToBytes, deltaDecode, tryDecryptIfNeeded } from './helpers.js';
9
+ import { native } from './native.js';
12
10
  import { cropAndReconstitute } from './reconstitution.js';
13
11
  import { parallelZstdDecompress, tryZstdDecompress } from './zstd.js';
14
12
  async function tryDecompress(payload, onProgress) {
15
- try {
16
- return await parallelZstdDecompress(payload, onProgress);
17
- }
18
- catch (e) {
19
- try {
20
- const mod = await import('lzma-purejs');
21
- const decompressFn = mod && (mod.decompress || (mod.LZMA && mod.LZMA.decompress));
22
- if (!decompressFn)
23
- throw new Error('No lzma decompress');
24
- const dec = await new Promise((resolve, reject) => {
25
- try {
26
- decompressFn(Buffer.from(payload), (out) => resolve(out));
27
- }
28
- catch (err) {
29
- reject(err);
30
- }
31
- });
32
- const dBuf = Buffer.isBuffer(dec) ? dec : Buffer.from(dec);
33
- return dBuf;
34
- }
35
- catch (e3) {
36
- throw e;
37
- }
38
- }
13
+ return await parallelZstdDecompress(payload, onProgress);
39
14
  }
40
15
  function detectImageFormat(buf) {
41
16
  if (buf.length < 12)
@@ -90,11 +65,17 @@ export async function decodePngToBinary(input, opts = {}) {
90
65
  }
91
66
  else {
92
67
  try {
93
- const metadata = await sharp(input).metadata();
94
- const rawBytesEstimate = (metadata.width || 0) * (metadata.height || 0) * 4;
95
- const MAX_RAW_BYTES = 200 * 1024 * 1024;
96
- if (rawBytesEstimate > MAX_RAW_BYTES) {
97
- pngBuf = readFileSync(input);
68
+ if (native?.sharpMetadata) {
69
+ const inputBuf = readFileSync(input);
70
+ const metadata = native.sharpMetadata(inputBuf);
71
+ const rawBytesEstimate = metadata.width * metadata.height * 4;
72
+ const MAX_RAW_BYTES = 200 * 1024 * 1024;
73
+ if (rawBytesEstimate > MAX_RAW_BYTES) {
74
+ pngBuf = inputBuf;
75
+ }
76
+ else {
77
+ pngBuf = inputBuf;
78
+ }
98
79
  }
99
80
  else {
100
81
  pngBuf = readFileSync(input);
@@ -111,10 +92,11 @@ export async function decodePngToBinary(input, opts = {}) {
111
92
  }
112
93
  let progressBar = null;
113
94
  if (opts.showProgress) {
114
- progressBar = new cliProgress.SingleBar({
115
- format: ' {bar} {percentage}% | {step} | {elapsed}s',
116
- }, cliProgress.Presets.shades_classic);
117
- progressBar.start(100, 0, { step: 'Starting', elapsed: '0' });
95
+ progressBar = {
96
+ start: () => { },
97
+ update: () => { },
98
+ stop: () => { },
99
+ };
118
100
  const startTime = Date.now();
119
101
  if (!opts.onProgress) {
120
102
  opts.onProgress = (info) => {
@@ -128,10 +110,6 @@ export async function decodePngToBinary(input, opts = {}) {
128
110
  else if (info.phase === 'done') {
129
111
  pct = 100;
130
112
  }
131
- progressBar.update(Math.floor(pct), {
132
- step: info.phase.replace('_', ' '),
133
- elapsed: String(Math.floor((Date.now() - startTime) / 1000)),
134
- });
135
113
  };
136
114
  }
137
115
  }
@@ -139,28 +117,17 @@ export async function decodePngToBinary(input, opts = {}) {
139
117
  opts.onProgress({ phase: 'start' });
140
118
  let processedBuf = pngBuf;
141
119
  try {
142
- const info = await sharp(pngBuf).metadata();
143
- if (info.width && info.height) {
144
- const MAX_RAW_BYTES = 1200 * 1024 * 1024;
145
- const rawBytesEstimate = info.width * info.height * 4;
146
- if (rawBytesEstimate > MAX_RAW_BYTES) {
147
- throw new DataFormatError(`Image too large to decode in-process (${Math.round(rawBytesEstimate / 1024 / 1024)} MB). Increase Node heap or use a smaller image/compact mode.`);
148
- }
149
- if (false) {
150
- const doubledBuffer = await sharp(pngBuf)
151
- .resize({
152
- width: info.width * 2,
153
- height: info.height * 2,
154
- kernel: 'nearest',
155
- })
156
- .png()
157
- .toBuffer();
158
- processedBuf = await cropAndReconstitute(doubledBuffer, opts.debugDir);
159
- }
160
- else {
161
- processedBuf = pngBuf;
120
+ if (native?.sharpMetadata) {
121
+ const info = native.sharpMetadata(pngBuf);
122
+ if (info.width && info.height) {
123
+ const MAX_RAW_BYTES = 1200 * 1024 * 1024;
124
+ const rawBytesEstimate = info.width * info.height * 4;
125
+ if (rawBytesEstimate > MAX_RAW_BYTES) {
126
+ throw new DataFormatError(`Image too large to decode in-process (${Math.round(rawBytesEstimate / 1024 / 1024)} MB). Increase Node heap or use a smaller image/compact mode.`);
127
+ }
162
128
  }
163
129
  }
130
+ processedBuf = pngBuf;
164
131
  }
165
132
  catch (e) {
166
133
  if (e instanceof DataFormatError)
@@ -204,24 +171,30 @@ export async function decodePngToBinary(input, opts = {}) {
204
171
  }
205
172
  let chunks = [];
206
173
  try {
207
- const chunksRaw = extract(processedBuf);
208
- chunks = chunksRaw.map((c) => ({
209
- name: c.name,
210
- data: Buffer.isBuffer(c.data)
211
- ? c.data
212
- : Buffer.from(c.data),
213
- }));
174
+ if (native?.extractPngChunks) {
175
+ const chunksRaw = native.extractPngChunks(processedBuf);
176
+ chunks = chunksRaw.map((c) => ({
177
+ name: c.name,
178
+ data: Buffer.from(c.data),
179
+ }));
180
+ }
181
+ else {
182
+ throw new Error('Native PNG chunk extraction not available');
183
+ }
214
184
  }
215
185
  catch (e) {
216
186
  try {
217
187
  const withHeader = Buffer.concat([PNG_HEADER, pngBuf]);
218
- const chunksRaw = extract(withHeader);
219
- chunks = chunksRaw.map((c) => ({
220
- name: c.name,
221
- data: Buffer.isBuffer(c.data)
222
- ? c.data
223
- : Buffer.from(c.data),
224
- }));
188
+ if (native?.extractPngChunks) {
189
+ const chunksRaw = native.extractPngChunks(withHeader);
190
+ chunks = chunksRaw.map((c) => ({
191
+ name: c.name,
192
+ data: Buffer.from(c.data),
193
+ }));
194
+ }
195
+ else {
196
+ throw new Error('Native PNG chunk extraction not available');
197
+ }
225
198
  }
226
199
  catch (e2) {
227
200
  chunks = [];
@@ -274,28 +247,21 @@ export async function decodePngToBinary(input, opts = {}) {
274
247
  return { buf: payload, meta: { name } };
275
248
  }
276
249
  try {
277
- const metadata = await sharp(processedBuf).metadata();
250
+ const metadata = native.sharpMetadata(processedBuf);
278
251
  const currentWidth = metadata.width;
279
252
  const currentHeight = metadata.height;
280
253
  let rawRGB = Buffer.alloc(0);
281
254
  let isBlockEncoded = false;
282
255
  if (currentWidth % 2 === 0 && currentHeight % 2 === 0) {
283
- const { data: testData } = await sharp(processedBuf)
284
- .extract({
285
- left: 0,
286
- top: 0,
287
- width: Math.min(4, currentWidth),
288
- height: Math.min(4, currentHeight),
289
- })
290
- .raw()
291
- .toBuffer({ resolveWithObject: true });
256
+ const rawData = native.sharpToRaw(processedBuf);
257
+ const testData = rawData.pixels;
292
258
  let hasBlockPattern = true;
293
259
  for (let y = 0; y < Math.min(2, currentHeight / 2); y++) {
294
260
  for (let x = 0; x < Math.min(2, currentWidth / 2); x++) {
295
- const px00 = (y * 2 * Math.min(4, currentWidth) + x * 2) * 3;
296
- const px01 = (y * 2 * Math.min(4, currentWidth) + (x * 2 + 1)) * 3;
297
- const px10 = ((y * 2 + 1) * Math.min(4, currentWidth) + x * 2) * 3;
298
- const px11 = ((y * 2 + 1) * Math.min(4, currentWidth) + (x * 2 + 1)) * 3;
261
+ const px00 = (y * 2 * currentWidth + x * 2) * 3;
262
+ const px01 = (y * 2 * currentWidth + (x * 2 + 1)) * 3;
263
+ const px10 = ((y * 2 + 1) * currentWidth + x * 2) * 3;
264
+ const px11 = ((y * 2 + 1) * currentWidth + (x * 2 + 1)) * 3;
299
265
  if (testData[px00] !== testData[px01] ||
300
266
  testData[px00] !== testData[px10] ||
301
267
  testData[px00] !== testData[px11] ||
@@ -314,56 +280,28 @@ export async function decodePngToBinary(input, opts = {}) {
314
280
  const blocksWide = currentWidth / 2;
315
281
  const blocksHigh = currentHeight / 2;
316
282
  rawRGB = Buffer.alloc(blocksWide * blocksHigh * 3);
283
+ const fullRaw = native.sharpToRaw(processedBuf);
284
+ const fullData = fullRaw.pixels;
317
285
  let outIdx = 0;
318
286
  for (let by = 0; by < blocksHigh; by++) {
319
287
  for (let bx = 0; bx < blocksWide; bx++) {
320
- const { data: blockData } = await sharp(processedBuf)
321
- .extract({ left: bx * 2, top: by * 2, width: 1, height: 1 })
322
- .raw()
323
- .toBuffer({ resolveWithObject: true });
324
- rawRGB[outIdx++] = blockData[0];
325
- rawRGB[outIdx++] = blockData[1];
326
- rawRGB[outIdx++] = blockData[2];
288
+ const pixelOffset = (by * 2 * currentWidth + bx * 2) * 3;
289
+ rawRGB[outIdx++] = fullData[pixelOffset];
290
+ rawRGB[outIdx++] = fullData[pixelOffset + 1];
291
+ rawRGB[outIdx++] = fullData[pixelOffset + 2];
327
292
  }
328
293
  }
329
294
  }
330
295
  }
331
296
  if (!isBlockEncoded) {
332
- rawRGB = Buffer.allocUnsafe(currentWidth * currentHeight * 3);
333
- let writeOffset = 0;
334
- const rowsPerChunk = 2000;
335
- for (let startRow = 0; startRow < currentHeight; startRow += rowsPerChunk) {
336
- const endRow = Math.min(startRow + rowsPerChunk, currentHeight);
337
- const chunkHeight = endRow - startRow;
338
- const { data: chunkData, info: chunkInfo } = await sharp(processedBuf)
339
- .extract({
340
- left: 0,
341
- top: startRow,
342
- width: currentWidth,
343
- height: chunkHeight,
344
- })
345
- .raw()
346
- .toBuffer({ resolveWithObject: true });
347
- const channels = chunkInfo.channels;
348
- const pixelsInChunk = currentWidth * chunkHeight;
349
- if (channels === 3) {
350
- chunkData.copy(rawRGB, writeOffset);
351
- writeOffset += pixelsInChunk * 3;
352
- }
353
- else if (channels === 4) {
354
- for (let i = 0; i < pixelsInChunk; i++) {
355
- rawRGB[writeOffset++] = chunkData[i * 4];
356
- rawRGB[writeOffset++] = chunkData[i * 4 + 1];
357
- rawRGB[writeOffset++] = chunkData[i * 4 + 2];
358
- }
359
- }
360
- if (opts.onProgress) {
361
- opts.onProgress({
362
- phase: 'extract_pixels',
363
- loaded: endRow,
364
- total: currentHeight,
365
- });
366
- }
297
+ const rawData = native.sharpToRaw(processedBuf);
298
+ rawRGB = Buffer.from(rawData.pixels);
299
+ if (opts.onProgress) {
300
+ opts.onProgress({
301
+ phase: 'extract_pixels',
302
+ loaded: currentHeight,
303
+ total: currentHeight,
304
+ });
367
305
  }
368
306
  }
369
307
  const firstPixels = [];
@@ -410,22 +348,10 @@ export async function decodePngToBinary(input, opts = {}) {
410
348
  }
411
349
  else {
412
350
  const reconstructed = await cropAndReconstitute(processedBuf, opts.debugDir);
413
- const { data: rdata, info: rinfo } = await sharp(reconstructed)
414
- .raw()
415
- .toBuffer({ resolveWithObject: true });
416
- logicalWidth = rinfo.width;
417
- logicalHeight = rinfo.height;
418
- logicalData = Buffer.alloc(rinfo.width * rinfo.height * 3);
419
- if (rinfo.channels === 3) {
420
- rdata.copy(logicalData);
421
- }
422
- else if (rinfo.channels === 4) {
423
- for (let i = 0; i < logicalWidth * logicalHeight; i++) {
424
- logicalData[i * 3] = rdata[i * 4];
425
- logicalData[i * 3 + 1] = rdata[i * 4 + 1];
426
- logicalData[i * 3 + 2] = rdata[i * 4 + 2];
427
- }
428
- }
351
+ const rawData = native.sharpToRaw(reconstructed);
352
+ logicalWidth = rawData.width;
353
+ logicalHeight = rawData.height;
354
+ logicalData = Buffer.from(rawData.pixels);
429
355
  }
430
356
  if (process.env.ROX_DEBUG) {
431
357
  console.log('DEBUG: Logical grid reconstructed:', logicalWidth, 'x', logicalHeight, '=', logicalWidth * logicalHeight, 'pixels');
@@ -1,4 +1,2 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  import { EncodeOptions } from './types.js';
4
2
  export declare function encodeBinaryToPng(input: Buffer | Buffer[], opts?: EncodeOptions): Promise<Buffer>;
@@ -1,20 +1,19 @@
1
- import cliProgress from 'cli-progress';
2
1
  import { createCipheriv, pbkdf2Sync, randomBytes } from 'crypto';
3
- import sharp from 'sharp';
4
2
  import * as zlib from 'zlib';
5
3
  import { unpackBuffer } from '../pack.js';
6
4
  import { COMPRESSION_MARKERS, ENC_AES, ENC_NONE, ENC_XOR, MAGIC, MARKER_END, MARKER_START, PIXEL_MAGIC, PIXEL_MAGIC_BLOCK, PNG_HEADER, } from './constants.js';
7
5
  import { crc32 } from './crc.js';
8
6
  import { colorsToBytes } from './helpers.js';
9
- import { optimizePngBuffer } from './optimization.js';
7
+ import { native } from './native.js';
10
8
  import { parallelZstdCompress } from './zstd.js';
11
9
  export async function encodeBinaryToPng(input, opts = {}) {
12
10
  let progressBar = null;
13
11
  if (opts.showProgress) {
14
- progressBar = new cliProgress.SingleBar({
15
- format: ' {bar} {percentage}% | {step} | {elapsed}s',
16
- }, cliProgress.Presets.shades_classic);
17
- progressBar.start(100, 0, { step: 'Starting', elapsed: '0' });
12
+ progressBar = {
13
+ start: () => { },
14
+ update: () => { },
15
+ stop: () => { },
16
+ };
18
17
  const startTime = Date.now();
19
18
  if (!opts.onProgress) {
20
19
  opts.onProgress = (info) => {
@@ -34,10 +33,6 @@ export async function encodeBinaryToPng(input, opts = {}) {
34
33
  else if (info.phase === 'done') {
35
34
  pct = 100;
36
35
  }
37
- progressBar.update(Math.floor(pct), {
38
- step: info.phase.replace('_', ' '),
39
- elapsed: String(Math.floor((Date.now() - startTime) / 1000)),
40
- });
41
36
  };
42
37
  }
43
38
  }
@@ -220,7 +215,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
220
215
  lenBuf.writeUInt32BE(jsonBuf.length, 0);
221
216
  metaPixel = [...metaPixel, Buffer.from('rXFL', 'utf8'), lenBuf, jsonBuf];
222
217
  }
223
- const useBlockEncoding = opts.useBlockEncoding ?? true;
218
+ const useBlockEncoding = false;
224
219
  const pixelMagic = useBlockEncoding ? PIXEL_MAGIC_BLOCK : PIXEL_MAGIC;
225
220
  const dataWithoutMarkers = [pixelMagic, ...metaPixel];
226
221
  const dataWithoutMarkersLen = dataWithoutMarkers.reduce((a, b) => a + b.length, 0);
@@ -261,15 +256,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
261
256
  }
262
257
  }
263
258
  }
264
- bufScr = await sharp(rgbBuffer, {
265
- raw: { width, height, channels: 3 },
266
- })
267
- .png({
268
- compressionLevel: 9,
269
- adaptiveFiltering: true,
270
- effort: 9,
271
- })
272
- .toBuffer();
259
+ bufScr = Buffer.from(native.rgbToPng(rgbBuffer, width, height));
273
260
  }
274
261
  else {
275
262
  const bytesPerPixel = 3;
@@ -394,27 +381,10 @@ export async function encodeBinaryToPng(input, opts = {}) {
394
381
  else {
395
382
  const outputFormat = opts.outputFormat || 'png';
396
383
  if (outputFormat === 'webp') {
397
- bufScr = await sharp(raw, {
398
- raw: { width, height, channels: 3 },
399
- })
400
- .webp({
401
- lossless: true,
402
- quality: 100,
403
- effort: 6,
404
- })
405
- .toBuffer();
384
+ throw new Error('WebP output format not supported with native backend');
406
385
  }
407
386
  else {
408
- bufScr = await sharp(raw, {
409
- raw: { width, height, channels: 3 },
410
- })
411
- .png({
412
- compressionLevel: 3,
413
- palette: false,
414
- effort: 1,
415
- adaptiveFiltering: false,
416
- })
417
- .toBuffer();
387
+ bufScr = Buffer.from(native.rgbToPng(raw, width, height));
418
388
  }
419
389
  }
420
390
  }
@@ -426,22 +396,7 @@ export async function encodeBinaryToPng(input, opts = {}) {
426
396
  dataWithoutMarkers.length = 0;
427
397
  if (opts.onProgress)
428
398
  opts.onProgress({ phase: 'png_compress', loaded: 100, total: 100 });
429
- if (opts.skipOptimization || opts.outputFormat === 'webp') {
430
- progressBar?.stop();
431
- return bufScr;
432
- }
433
- if (opts.onProgress)
434
- opts.onProgress({ phase: 'optimizing', loaded: 0, total: 100 });
435
- try {
436
- const optimized = await optimizePngBuffer(bufScr, true);
437
- if (opts.onProgress)
438
- opts.onProgress({ phase: 'optimizing', loaded: 100, total: 100 });
439
- progressBar?.stop();
440
- return optimized;
441
- }
442
- catch (e) {
443
- progressBar?.stop();
444
- return bufScr;
445
- }
399
+ progressBar?.stop();
400
+ return bufScr;
446
401
  }
447
402
  }
@@ -1,5 +1,3 @@
1
- /// <reference types="node" />
2
- /// <reference types="node" />
3
1
  export declare function colorsToBytes(colors: Array<{
4
2
  r: number;
5
3
  g: number;