roxify 1.2.3 → 1.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,266 @@
1
+ import { join } from 'path';
2
+ import sharp from 'sharp';
3
+ export async function cropAndReconstitute(input, debugDir) {
4
+ async function loadRaw(imgInput) {
5
+ const { data, info } = await sharp(imgInput)
6
+ .ensureAlpha()
7
+ .raw()
8
+ .toBuffer({ resolveWithObject: true });
9
+ return { data, info };
10
+ }
11
+ function idxFor(x, y, width) {
12
+ return (y * width + x) * 4;
13
+ }
14
+ function eqRGB(a, b) {
15
+ return a[0] === b[0] && a[1] === b[1] && a[2] === b[2];
16
+ }
17
+ const { info } = await loadRaw(input);
18
+ const doubledBuffer = await sharp(input)
19
+ .resize({
20
+ width: info.width * 2,
21
+ height: info.height * 2,
22
+ kernel: 'nearest',
23
+ })
24
+ .png()
25
+ .toBuffer();
26
+ if (debugDir) {
27
+ await sharp(doubledBuffer).toFile(join(debugDir, 'doubled.png'));
28
+ }
29
+ const { data: doubledData, info: doubledInfo } = await loadRaw(doubledBuffer);
30
+ const w = doubledInfo.width, h = doubledInfo.height;
31
+ const at = (x, y) => {
32
+ const i = idxFor(x, y, w);
33
+ return [
34
+ doubledData[i],
35
+ doubledData[i + 1],
36
+ doubledData[i + 2],
37
+ doubledData[i + 3],
38
+ ];
39
+ };
40
+ const findPattern = (startX, startY, dirX, dirY, pattern) => {
41
+ for (let y = startY; y >= 0 && y < h; y += dirY) {
42
+ for (let x = startX; x >= 0 && x < w; x += dirX) {
43
+ const p = at(x, y);
44
+ if (p[0] !== 255 || p[1] !== 0 || p[2] !== 0)
45
+ continue;
46
+ let nx = x + dirX;
47
+ while (nx >= 0 && nx < w && eqRGB(at(nx, y), p))
48
+ nx += dirX;
49
+ if (nx < 0 || nx >= w)
50
+ continue;
51
+ const a = at(nx, y);
52
+ let nx2 = nx + dirX;
53
+ while (nx2 >= 0 && nx2 < w && eqRGB(at(nx2, y), a))
54
+ nx2 += dirX;
55
+ if (nx2 < 0 || nx2 >= w)
56
+ continue;
57
+ const b = at(nx2, y);
58
+ if (a[0] === pattern[0][0] &&
59
+ a[1] === pattern[0][1] &&
60
+ a[2] === pattern[0][2] &&
61
+ b[0] === pattern[1][0] &&
62
+ b[1] === pattern[1][1] &&
63
+ b[2] === pattern[1][2]) {
64
+ return { x, y };
65
+ }
66
+ }
67
+ }
68
+ return null;
69
+ };
70
+ const startPoint = findPattern(0, 0, 1, 1, [
71
+ [0, 255, 0],
72
+ [0, 0, 255],
73
+ ]);
74
+ const endPoint = findPattern(w - 1, h - 1, -1, -1, [
75
+ [0, 255, 0],
76
+ [0, 0, 255],
77
+ ]);
78
+ if (!startPoint || !endPoint)
79
+ throw new Error('Patterns not found');
80
+ const sx1 = Math.min(startPoint.x, endPoint.x), sy1 = Math.min(startPoint.y, endPoint.y);
81
+ const sx2 = Math.max(startPoint.x, endPoint.x), sy2 = Math.max(startPoint.y, endPoint.y);
82
+ const cropW = sx2 - sx1 + 1, cropH = sy2 - sy1 + 1;
83
+ if (cropW <= 0 || cropH <= 0)
84
+ throw new Error('Invalid crop dimensions');
85
+ const cropped = await sharp(doubledBuffer)
86
+ .extract({ left: sx1, top: sy1, width: cropW, height: cropH })
87
+ .png()
88
+ .toBuffer();
89
+ const { data: cdata, info: cinfo } = await loadRaw(cropped);
90
+ const cw = cinfo.width, ch = cinfo.height;
91
+ const newWidth = cw, newHeight = ch + 1;
92
+ const out = Buffer.alloc(newWidth * newHeight * 4, 0);
93
+ for (let i = 0; i < out.length; i += 4)
94
+ out[i + 3] = 255;
95
+ for (let y = 0; y < ch; y++) {
96
+ for (let x = 0; x < cw; x++) {
97
+ const srcI = (y * cw + x) * 4;
98
+ const dstI = (y * newWidth + x) * 4;
99
+ out[dstI] = cdata[srcI];
100
+ out[dstI + 1] = cdata[srcI + 1];
101
+ out[dstI + 2] = cdata[srcI + 2];
102
+ out[dstI + 3] = cdata[srcI + 3];
103
+ }
104
+ }
105
+ for (let x = 0; x < newWidth; x++) {
106
+ const i = ((ch - 1) * newWidth + x) * 4;
107
+ out[i] = out[i + 1] = out[i + 2] = 0;
108
+ out[i + 3] = 255;
109
+ const j = (ch * newWidth + x) * 4;
110
+ out[j] = out[j + 1] = out[j + 2] = 0;
111
+ out[j + 3] = 255;
112
+ }
113
+ if (newWidth >= 3) {
114
+ const bgrStart = newWidth - 3;
115
+ const bgr = [
116
+ [0, 0, 255],
117
+ [0, 255, 0],
118
+ [255, 0, 0],
119
+ ];
120
+ for (let k = 0; k < 3; k++) {
121
+ const i = (ch * newWidth + bgrStart + k) * 4;
122
+ out[i] = bgr[k][0];
123
+ out[i + 1] = bgr[k][1];
124
+ out[i + 2] = bgr[k][2];
125
+ out[i + 3] = 255;
126
+ }
127
+ }
128
+ const getPixel = (x, y) => {
129
+ const i = (y * newWidth + x) * 4;
130
+ return [out[i], out[i + 1], out[i + 2], out[i + 3]];
131
+ };
132
+ const compressedLines = [];
133
+ for (let y = 0; y < newHeight; y++) {
134
+ const line = [];
135
+ for (let x = 0; x < newWidth; x++)
136
+ line.push(getPixel(x, y));
137
+ const isAllBlack = line.every((p) => p[0] === 0 && p[1] === 0 && p[2] === 0 && p[3] === 255);
138
+ if (!isAllBlack &&
139
+ (compressedLines.length === 0 ||
140
+ !line.every((p, i) => p.every((v, j) => v === compressedLines[compressedLines.length - 1][i][j])))) {
141
+ compressedLines.push(line);
142
+ }
143
+ }
144
+ if (compressedLines.length === 0) {
145
+ return sharp({
146
+ create: {
147
+ width: 1,
148
+ height: 1,
149
+ channels: 4,
150
+ background: { r: 0, g: 0, b: 0, alpha: 1 },
151
+ },
152
+ })
153
+ .png()
154
+ .toBuffer();
155
+ }
156
+ let finalWidth = newWidth, finalHeight = compressedLines.length;
157
+ let finalOut = Buffer.alloc(finalWidth * finalHeight * 4, 0);
158
+ for (let i = 0; i < finalOut.length; i += 4)
159
+ finalOut[i + 3] = 255;
160
+ for (let y = 0; y < finalHeight; y++) {
161
+ for (let x = 0; x < finalWidth; x++) {
162
+ const i = (y * finalWidth + x) * 4;
163
+ finalOut[i] = compressedLines[y][x][0];
164
+ finalOut[i + 1] = compressedLines[y][x][1];
165
+ finalOut[i + 2] = compressedLines[y][x][2];
166
+ finalOut[i + 3] = compressedLines[y][x][3] || 255;
167
+ }
168
+ }
169
+ if (finalHeight >= 1 && finalWidth >= 3) {
170
+ const lastY = finalHeight - 1;
171
+ for (let k = 0; k < 3; k++) {
172
+ const i = (lastY * finalWidth + finalWidth - 3 + k) * 4;
173
+ finalOut[i] = finalOut[i + 1] = finalOut[i + 2] = 0;
174
+ finalOut[i + 3] = 255;
175
+ }
176
+ }
177
+ if (finalWidth >= 2) {
178
+ const kept = [];
179
+ for (let x = 0; x < finalWidth; x++) {
180
+ if (kept.length === 0) {
181
+ kept.push(x);
182
+ continue;
183
+ }
184
+ const prevX = kept[kept.length - 1];
185
+ let same = true;
186
+ for (let y = 0; y < finalHeight; y++) {
187
+ const ia = (y * finalWidth + prevX) * 4, ib = (y * finalWidth + x) * 4;
188
+ if (finalOut[ia] !== finalOut[ib] ||
189
+ finalOut[ia + 1] !== finalOut[ib + 1] ||
190
+ finalOut[ia + 2] !== finalOut[ib + 2] ||
191
+ finalOut[ia + 3] !== finalOut[ib + 3]) {
192
+ same = false;
193
+ break;
194
+ }
195
+ }
196
+ if (!same)
197
+ kept.push(x);
198
+ }
199
+ if (kept.length !== finalWidth) {
200
+ const newFinalWidth = kept.length;
201
+ const newOut = Buffer.alloc(newFinalWidth * finalHeight * 4, 0);
202
+ for (let i = 0; i < newOut.length; i += 4)
203
+ newOut[i + 3] = 255;
204
+ for (let nx = 0; nx < kept.length; nx++) {
205
+ const sx = kept[nx];
206
+ for (let y = 0; y < finalHeight; y++) {
207
+ const srcI = (y * finalWidth + sx) * 4, dstI = (y * newFinalWidth + nx) * 4;
208
+ newOut[dstI] = finalOut[srcI];
209
+ newOut[dstI + 1] = finalOut[srcI + 1];
210
+ newOut[dstI + 2] = finalOut[srcI + 2];
211
+ newOut[dstI + 3] = finalOut[srcI + 3];
212
+ }
213
+ }
214
+ finalOut = newOut;
215
+ finalWidth = newFinalWidth;
216
+ }
217
+ }
218
+ if (finalHeight >= 2 && finalWidth >= 3) {
219
+ const secondLastY = finalHeight - 2;
220
+ const bgrSeq = [
221
+ [0, 0, 255],
222
+ [0, 255, 0],
223
+ [255, 0, 0],
224
+ ];
225
+ let hasBGR = true;
226
+ for (let k = 0; k < 3; k++) {
227
+ const i = (secondLastY * finalWidth + finalWidth - 3 + k) * 4;
228
+ if (finalOut[i] !== bgrSeq[k][0] ||
229
+ finalOut[i + 1] !== bgrSeq[k][1] ||
230
+ finalOut[i + 2] !== bgrSeq[k][2]) {
231
+ hasBGR = false;
232
+ break;
233
+ }
234
+ }
235
+ if (hasBGR) {
236
+ for (let k = 0; k < 3; k++) {
237
+ const i = (secondLastY * finalWidth + finalWidth - 3 + k) * 4;
238
+ finalOut[i] = finalOut[i + 1] = finalOut[i + 2] = 0;
239
+ finalOut[i + 3] = 255;
240
+ }
241
+ }
242
+ }
243
+ if (finalHeight >= 1 && finalWidth >= 1) {
244
+ const lastYFinal = finalHeight - 1;
245
+ const bgrSeq = [
246
+ [0, 0, 255],
247
+ [0, 255, 0],
248
+ [255, 0, 0],
249
+ ];
250
+ for (let k = 0; k < 3; k++) {
251
+ const sx = finalWidth - 3 + k;
252
+ if (sx >= 0) {
253
+ const i = (lastYFinal * finalWidth + sx) * 4;
254
+ finalOut[i] = bgrSeq[k][0];
255
+ finalOut[i + 1] = bgrSeq[k][1];
256
+ finalOut[i + 2] = bgrSeq[k][2];
257
+ finalOut[i + 3] = 255;
258
+ }
259
+ }
260
+ }
261
+ return sharp(finalOut, {
262
+ raw: { width: finalWidth, height: finalHeight, channels: 4 },
263
+ })
264
+ .png()
265
+ .toBuffer();
266
+ }
@@ -0,0 +1,41 @@
1
+ /// <reference types="node" />
2
+ /// <reference types="node" />
3
+ import { PackedFile } from '../pack.js';
4
+ export interface EncodeOptions {
5
+ compression?: 'zstd';
6
+ passphrase?: string;
7
+ name?: string;
8
+ mode?: 'screenshot';
9
+ encrypt?: 'auto' | 'aes' | 'xor' | 'none';
10
+ _skipAuto?: boolean;
11
+ output?: 'auto' | 'png' | 'rox';
12
+ includeName?: boolean;
13
+ includeFileList?: boolean;
14
+ fileList?: string[];
15
+ onProgress?: (info: {
16
+ phase: string;
17
+ loaded?: number;
18
+ total?: number;
19
+ }) => void;
20
+ showProgress?: boolean;
21
+ verbose?: boolean;
22
+ }
23
+ export interface DecodeResult {
24
+ buf?: Buffer;
25
+ meta?: {
26
+ name?: string;
27
+ };
28
+ files?: PackedFile[];
29
+ }
30
+ export interface DecodeOptions {
31
+ passphrase?: string;
32
+ debugDir?: string;
33
+ outPath?: string;
34
+ files?: string[];
35
+ onProgress?: (info: {
36
+ phase: string;
37
+ loaded?: number;
38
+ total?: number;
39
+ }) => void;
40
+ showProgress?: boolean;
41
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,17 @@
1
+ /// <reference types="node" />
2
+ /// <reference types="node" />
3
+ export declare function compressStream(stream: AsyncGenerator<Buffer>, level?: number, onProgress?: (loaded: number, total: number) => void): Promise<{
4
+ chunks: Buffer[];
5
+ totalLength: number;
6
+ }>;
7
+ export declare function parallelZstdCompress(payload: Buffer, level?: number, onProgress?: (loaded: number, total: number) => void): Promise<Buffer>;
8
+ export declare function parallelZstdDecompress(payload: Buffer, onProgress?: (info: {
9
+ phase: string;
10
+ loaded?: number;
11
+ total?: number;
12
+ }) => void): Promise<Buffer>;
13
+ export declare function tryZstdDecompress(payload: Buffer, onProgress?: (info: {
14
+ phase: string;
15
+ loaded?: number;
16
+ total?: number;
17
+ }) => void): Promise<Buffer>;
@@ -0,0 +1,118 @@
1
+ import { compress as zstdCompress, decompress as zstdDecompress, } from '@mongodb-js/zstd';
2
+ import { cpus } from 'os';
3
+ export async function compressStream(stream, level = 19, onProgress) {
4
+ const compressedChunks = [];
5
+ let chunkCount = 0;
6
+ for await (const chunk of stream) {
7
+ const compressed = await zstdCompress(chunk, level);
8
+ compressedChunks.push(Buffer.from(compressed));
9
+ chunkCount++;
10
+ if (onProgress)
11
+ onProgress(chunkCount, 0);
12
+ }
13
+ const chunkSizes = Buffer.alloc(compressedChunks.length * 4);
14
+ let totalLength = 8 + chunkSizes.length;
15
+ for (let i = 0; i < compressedChunks.length; i++) {
16
+ chunkSizes.writeUInt32BE(compressedChunks[i].length, i * 4);
17
+ totalLength += compressedChunks[i].length;
18
+ }
19
+ const header = Buffer.alloc(8);
20
+ header.writeUInt32BE(0x5a535444, 0);
21
+ header.writeUInt32BE(compressedChunks.length, 4);
22
+ return {
23
+ chunks: [header, chunkSizes, ...compressedChunks],
24
+ totalLength,
25
+ };
26
+ }
27
+ export async function parallelZstdCompress(payload, level = 19, onProgress) {
28
+ const chunkSize = 8 * 1024 * 1024;
29
+ if (payload.length <= chunkSize) {
30
+ if (onProgress)
31
+ onProgress(0, 1);
32
+ const result = await zstdCompress(payload, level);
33
+ if (onProgress)
34
+ onProgress(1, 1);
35
+ return Buffer.from(result);
36
+ }
37
+ const chunks = [];
38
+ for (let i = 0; i < payload.length; i += chunkSize) {
39
+ const chunk = payload.subarray(i, Math.min(i + chunkSize, payload.length));
40
+ chunks.push(chunk);
41
+ }
42
+ const totalChunks = chunks.length;
43
+ let completedChunks = 0;
44
+ const concurrency = Math.max(1, Math.min(8, cpus().length));
45
+ const compressedChunks = new Array(totalChunks);
46
+ let idx = 0;
47
+ const worker = async () => {
48
+ while (true) {
49
+ const cur = idx++;
50
+ if (cur >= totalChunks)
51
+ return;
52
+ const chunk = chunks[cur];
53
+ const compressed = await zstdCompress(chunk, level);
54
+ compressedChunks[cur] = Buffer.from(compressed);
55
+ completedChunks++;
56
+ if (onProgress)
57
+ onProgress(completedChunks, totalChunks);
58
+ }
59
+ };
60
+ await Promise.all(new Array(concurrency).fill(0).map(() => worker()));
61
+ const chunkSizes = Buffer.alloc(compressedChunks.length * 4);
62
+ for (let i = 0; i < compressedChunks.length; i++) {
63
+ chunkSizes.writeUInt32BE(compressedChunks[i].length, i * 4);
64
+ }
65
+ const header = Buffer.alloc(8);
66
+ header.writeUInt32BE(0x5a535444, 0);
67
+ header.writeUInt32BE(compressedChunks.length, 4);
68
+ return Buffer.concat([header, chunkSizes, ...compressedChunks]);
69
+ }
70
+ export async function parallelZstdDecompress(payload, onProgress) {
71
+ if (payload.length < 8) {
72
+ onProgress?.({ phase: 'decompress_start', total: 1 });
73
+ const d = Buffer.from(await zstdDecompress(payload));
74
+ onProgress?.({ phase: 'decompress_progress', loaded: 1, total: 1 });
75
+ onProgress?.({ phase: 'decompress_done', loaded: 1, total: 1 });
76
+ return d;
77
+ }
78
+ const magic = payload.readUInt32BE(0);
79
+ if (magic !== 0x5a535444) {
80
+ if (process.env.ROX_DEBUG)
81
+ console.log('tryZstdDecompress: invalid magic');
82
+ onProgress?.({ phase: 'decompress_start', total: 1 });
83
+ const d = Buffer.from(await zstdDecompress(payload));
84
+ onProgress?.({ phase: 'decompress_progress', loaded: 1, total: 1 });
85
+ onProgress?.({ phase: 'decompress_done', loaded: 1, total: 1 });
86
+ return d;
87
+ }
88
+ const numChunks = payload.readUInt32BE(4);
89
+ const chunkSizes = [];
90
+ let offset = 8;
91
+ for (let i = 0; i < numChunks; i++) {
92
+ chunkSizes.push(payload.readUInt32BE(offset));
93
+ offset += 4;
94
+ }
95
+ onProgress?.({ phase: 'decompress_start', total: numChunks });
96
+ const decompressedChunks = [];
97
+ for (let i = 0; i < numChunks; i++) {
98
+ const size = chunkSizes[i];
99
+ const chunk = payload.slice(offset, offset + size);
100
+ offset += size;
101
+ const dec = Buffer.from(await zstdDecompress(chunk));
102
+ decompressedChunks.push(dec);
103
+ onProgress?.({
104
+ phase: 'decompress_progress',
105
+ loaded: i + 1,
106
+ total: numChunks,
107
+ });
108
+ }
109
+ onProgress?.({
110
+ phase: 'decompress_done',
111
+ loaded: numChunks,
112
+ total: numChunks,
113
+ });
114
+ return Buffer.concat(decompressedChunks);
115
+ }
116
+ export async function tryZstdDecompress(payload, onProgress) {
117
+ return await parallelZstdDecompress(payload, onProgress);
118
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "roxify",
3
- "version": "1.2.3",
3
+ "version": "1.2.5",
4
4
  "description": "Encode binary data into PNG images with Zstd compression and decode them back. Supports CLI and programmatic API (Node.js ESM).",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",