@codernex/nexpress 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/image-processor.js +96 -0
- package/dist/index.js +147 -0
- package/package.json +39 -0
- package/readme.md +146 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Borhan Uddin
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { parentPort, workerData } from "node:worker_threads";
|
|
4
|
+
import sharp from "sharp";
|
|
5
|
+
const { input, output, fileName, quality } = workerData;
|
|
6
|
+
// 2. Fix: Correctly parse quality (The original RegExp was invalid)
|
|
7
|
+
const parsedQuality = parseInt(quality, 10);
|
|
8
|
+
const qualityArg = !isNaN(parsedQuality) && parsedQuality > 0 && parsedQuality <= 100
|
|
9
|
+
? parsedQuality
|
|
10
|
+
: 80;
|
|
11
|
+
async function ensureDirectory(dirPath) {
|
|
12
|
+
// 3. Optimization: mkdir with recursive is idempotent; no need to check access first
|
|
13
|
+
await fs.mkdir(dirPath, { recursive: true });
|
|
14
|
+
}
|
|
15
|
+
async function compressImage() {
|
|
16
|
+
try {
|
|
17
|
+
await ensureDirectory(output);
|
|
18
|
+
// Validate input file exists
|
|
19
|
+
try {
|
|
20
|
+
await fs.access(input);
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
throw new Error(`Input file not found: ${input}`);
|
|
24
|
+
}
|
|
25
|
+
const { name } = path.parse(fileName);
|
|
26
|
+
const outputFileName = `${name}.webp`;
|
|
27
|
+
const outputFilePath = path.join(output, outputFileName);
|
|
28
|
+
// 4. Fix: Sharp Configuration
|
|
29
|
+
// Removed 'lossless: true' because it conflicts with 'quality'.
|
|
30
|
+
// If a user sets quality=50, they want lossy compression to save space.
|
|
31
|
+
const result = await sharp(input)
|
|
32
|
+
.webp({
|
|
33
|
+
quality: qualityArg,
|
|
34
|
+
effort: 4, // Trade-off between compression speed and size (0-6)
|
|
35
|
+
})
|
|
36
|
+
.toFile(outputFilePath);
|
|
37
|
+
const originalStats = await fs.stat(input);
|
|
38
|
+
const savedBytes = originalStats.size - result.size;
|
|
39
|
+
// Convert bytes to Megabytes (1 MB = 1024 * 1024 bytes)
|
|
40
|
+
const savedMB = (savedBytes / (1024 * 1024)).toFixed(2);
|
|
41
|
+
const originalSize = (originalStats.size / (1024 * 1024)).toFixed(2);
|
|
42
|
+
const currentSize = (result.size / (1024 * 1024)).toFixed(2);
|
|
43
|
+
parentPort?.postMessage({
|
|
44
|
+
status: "completed",
|
|
45
|
+
data: {
|
|
46
|
+
size: result.size,
|
|
47
|
+
width: result.width,
|
|
48
|
+
height: result.height,
|
|
49
|
+
inputFile: fileName,
|
|
50
|
+
outputFile: outputFileName,
|
|
51
|
+
originalSize,
|
|
52
|
+
currentSize,
|
|
53
|
+
savedMB: `${savedMB} MB`,
|
|
54
|
+
},
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
catch (err) {
|
|
58
|
+
const error = err;
|
|
59
|
+
parentPort?.postMessage({
|
|
60
|
+
status: "error",
|
|
61
|
+
err: {
|
|
62
|
+
message: error.message,
|
|
63
|
+
stack: error.stack,
|
|
64
|
+
fileName,
|
|
65
|
+
type: error.constructor.name,
|
|
66
|
+
},
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
parentPort?.on("message", (message) => {
|
|
71
|
+
if (message.type === "compress") {
|
|
72
|
+
compressImage();
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
// Global error handlers
|
|
76
|
+
process.on("uncaughtException", (err) => {
|
|
77
|
+
parentPort?.postMessage({
|
|
78
|
+
status: "error",
|
|
79
|
+
err: {
|
|
80
|
+
message: `Uncaught exception: ${err.message}`,
|
|
81
|
+
stack: err.stack,
|
|
82
|
+
fileName,
|
|
83
|
+
},
|
|
84
|
+
});
|
|
85
|
+
process.exit(1);
|
|
86
|
+
});
|
|
87
|
+
process.on("unhandledRejection", (reason) => {
|
|
88
|
+
parentPort?.postMessage({
|
|
89
|
+
status: "error",
|
|
90
|
+
err: {
|
|
91
|
+
message: `Unhandled rejection: ${String(reason)}`,
|
|
92
|
+
fileName,
|
|
93
|
+
},
|
|
94
|
+
});
|
|
95
|
+
process.exit(1);
|
|
96
|
+
});
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import os from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import { Worker } from "node:worker_threads";
|
|
6
|
+
import { v4 as uuidV4 } from "uuid";
|
|
7
|
+
/**
|
|
8
|
+
* Constants
|
|
9
|
+
*/
|
|
10
|
+
const MAX_WORKERS = Math.max(1, os.cpus().length - 2);
|
|
11
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
12
|
+
const __dirname = path.dirname(__filename);
|
|
13
|
+
const homeDir = os.homedir();
|
|
14
|
+
const acceptableArgs = ["--input", "--output", "--quality", "--help"];
|
|
15
|
+
const args = process.argv.slice(2); // Use slice instead of splice
|
|
16
|
+
const filteredArgs = args.filter((p) => {
|
|
17
|
+
const [key] = p.split("=");
|
|
18
|
+
return acceptableArgs.includes(key);
|
|
19
|
+
});
|
|
20
|
+
if (filteredArgs.length === 1) {
|
|
21
|
+
console.log(`
|
|
22
|
+
Usage: compressts [options]
|
|
23
|
+
|
|
24
|
+
Description:
|
|
25
|
+
A multi-threaded image batch processor. Reads images from a source directory
|
|
26
|
+
(relative to user home), processes them using worker threads, and saves
|
|
27
|
+
them to an output directory.
|
|
28
|
+
|
|
29
|
+
Options:
|
|
30
|
+
--input=<path> (Required) Name of the folder containing source images.
|
|
31
|
+
NOTE: This path is relative to your Home Directory.
|
|
32
|
+
Supported formats: .jpg, .jpeg, .png, .webp
|
|
33
|
+
|
|
34
|
+
--output=<path> (Required) Destination path for processed images.
|
|
35
|
+
Can be an absolute path or relative to your Home Directory.
|
|
36
|
+
|
|
37
|
+
--quality=<number> (Optional) The quality of the output image (0-100).
|
|
38
|
+
Default: defined by processor if omitted.
|
|
39
|
+
|
|
40
|
+
--help Show this help message.
|
|
41
|
+
|
|
42
|
+
Examples:
|
|
43
|
+
# Process images from MyImages to MyImages/Compressed
|
|
44
|
+
node image-processor.js --input=MyImages --output=MyImages/Compressed
|
|
45
|
+
|
|
46
|
+
# Process with specific quality
|
|
47
|
+
node image-processor.js --input=RawPhotos --output=/tmp/Processed --quality=80
|
|
48
|
+
|
|
49
|
+
# Relative path to users like /home/users/Desktop pass Desktop otherwise pass /tmp/Processed
|
|
50
|
+
`);
|
|
51
|
+
process.exit(0);
|
|
52
|
+
}
|
|
53
|
+
if (filteredArgs.length < 2) {
|
|
54
|
+
throw new Error("Both --input and --output paths are required");
|
|
55
|
+
}
|
|
56
|
+
const workers = new Map();
|
|
57
|
+
const keyValues = filteredArgs.map((arg) => {
|
|
58
|
+
const [key, value] = arg.split("=");
|
|
59
|
+
return { key, value };
|
|
60
|
+
});
|
|
61
|
+
const input = keyValues.find((k) => k.key === "--input")?.value;
|
|
62
|
+
const output = keyValues.find((k) => k.key === "--output")?.value;
|
|
63
|
+
const quality = keyValues.find((k) => k.key === "--quality")?.value ?? "";
|
|
64
|
+
if (!input || !output)
|
|
65
|
+
throw new Error("Input or Output is missing");
|
|
66
|
+
if (quality && Number.isNaN(quality))
|
|
67
|
+
throw new Error("Quality Must be number");
|
|
68
|
+
const inputPath = input.startsWith("/") ? input : path.join(homeDir, input);
|
|
69
|
+
const outputPath = output.startsWith("/") ? output : path.join(homeDir, output);
|
|
70
|
+
// Ensure output directory exists
|
|
71
|
+
const allImages = fs.readdirSync(inputPath).filter((file) => {
|
|
72
|
+
const ext = path.extname(file);
|
|
73
|
+
return ext && [".jpg", ".jpeg", ".png", ".webp"].includes(ext.toLowerCase());
|
|
74
|
+
});
|
|
75
|
+
// Create workers
|
|
76
|
+
allImages.forEach((fileName) => {
|
|
77
|
+
console.log("Processing:", fileName);
|
|
78
|
+
const workerId = uuidV4();
|
|
79
|
+
const worker = new Worker(path.join(__dirname, "image-processor.ts"), {
|
|
80
|
+
// Use .js file
|
|
81
|
+
workerData: {
|
|
82
|
+
input: path.join(inputPath, fileName),
|
|
83
|
+
output: outputPath,
|
|
84
|
+
fileName,
|
|
85
|
+
quality,
|
|
86
|
+
},
|
|
87
|
+
});
|
|
88
|
+
workers.set(workerId, {
|
|
89
|
+
status: "pending",
|
|
90
|
+
worker,
|
|
91
|
+
workerId,
|
|
92
|
+
});
|
|
93
|
+
// Register event listeners immediately
|
|
94
|
+
worker.on("message", (value) => {
|
|
95
|
+
if (value.status === "error") {
|
|
96
|
+
console.error(`Error processing ${fileName}:`, value.err);
|
|
97
|
+
}
|
|
98
|
+
if (value.status === "completed") {
|
|
99
|
+
console.log("============================\n");
|
|
100
|
+
console.log(`Image processing completed: ${fileName}`, `WorkerId: ${workerId}`, `ThreadId: ${worker.threadId}`);
|
|
101
|
+
console.log("Result: ", value.data);
|
|
102
|
+
console.log("============================\n\n");
|
|
103
|
+
workers.get(workerId).status = "completed";
|
|
104
|
+
}
|
|
105
|
+
worker.terminate();
|
|
106
|
+
workers.delete(workerId);
|
|
107
|
+
});
|
|
108
|
+
worker.on("error", (err) => {
|
|
109
|
+
console.error(`Worker error for ${fileName}:`, err);
|
|
110
|
+
worker.terminate();
|
|
111
|
+
workers.delete(workerId);
|
|
112
|
+
});
|
|
113
|
+
worker.on("exit", (code) => {
|
|
114
|
+
if (code !== 0) {
|
|
115
|
+
console.error(`Worker stopped with exit code ${code} for ${fileName}`);
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
});
|
|
119
|
+
const workerQueue = Array.from(workers.values()).filter((c) => c.status === "pending");
|
|
120
|
+
let active = 0;
|
|
121
|
+
function runNext() {
|
|
122
|
+
while (workerQueue.length > 0 && active < MAX_WORKERS) {
|
|
123
|
+
active++;
|
|
124
|
+
const job = workerQueue.shift();
|
|
125
|
+
if (job) {
|
|
126
|
+
runWorker(job.worker).finally(() => {
|
|
127
|
+
active--;
|
|
128
|
+
runNext();
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
async function runWorker(worker) {
|
|
134
|
+
worker.postMessage({
|
|
135
|
+
type: "compress",
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
// Start processing
|
|
139
|
+
runNext();
|
|
140
|
+
// Graceful shutdown
|
|
141
|
+
process.on("SIGINT", () => {
|
|
142
|
+
console.log("\nShutting down workers...");
|
|
143
|
+
workers.forEach(({ worker }) => {
|
|
144
|
+
worker.terminate();
|
|
145
|
+
});
|
|
146
|
+
process.exit(0);
|
|
147
|
+
});
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@codernex/nexpress",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Multi-threaded image batch processor CLI",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"files": [
|
|
8
|
+
"dist"
|
|
9
|
+
],
|
|
10
|
+
"bin": {
|
|
11
|
+
"nexpress": "./dist/index.js"
|
|
12
|
+
},
|
|
13
|
+
"scripts": {
|
|
14
|
+
"build": "tsc",
|
|
15
|
+
"start": "node dist/index.js",
|
|
16
|
+
"dev": "node --watch --loader ts-node/esm src/index.ts",
|
|
17
|
+
"prepublishOnly": "npm run build"
|
|
18
|
+
},
|
|
19
|
+
"keywords": [
|
|
20
|
+
"image-processing",
|
|
21
|
+
"cli",
|
|
22
|
+
"sharp",
|
|
23
|
+
"worker-threads"
|
|
24
|
+
],
|
|
25
|
+
"author": "@codernex",
|
|
26
|
+
"license": "ISC",
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"axios": "^1.13.2",
|
|
29
|
+
"cheerio": "^1.1.2",
|
|
30
|
+
"sharp": "^0.34.5",
|
|
31
|
+
"typescript": "^5.9.3",
|
|
32
|
+
"uuid": "^11.0.5"
|
|
33
|
+
},
|
|
34
|
+
"devDependencies": {
|
|
35
|
+
"@types/node": "^22.10.7",
|
|
36
|
+
"@types/uuid": "^10.0.0",
|
|
37
|
+
"ts-node": "^10.9.2"
|
|
38
|
+
}
|
|
39
|
+
}
|
package/readme.md
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
# NexPress - TypeScript Image Batch Processor CLI
|
|
2
|
+
|
|
3
|
+
A high-performance, multi-threaded command-line tool built with Node.js and TypeScript. It converts and compresses batches of images into efficient **WebP** format using worker threads for maximum speed.
|
|
4
|
+
|
|
5
|
+
## 🚀 Features
|
|
6
|
+
|
|
7
|
+
- **Multi-threaded Processing:** Uses Node.js `worker_threads` to process images in parallel, utilizing all available CPU cores.
|
|
8
|
+
- **Format Conversion:** Automatically converts `.jpg`, `.jpeg`, and `.png` files to optimized `.webp`.
|
|
9
|
+
- **Smart Path Handling:** Supports both absolute system paths and paths relative to your home directory.
|
|
10
|
+
- **Space Savings:** Calculates and displays the amount of disk space saved per image in MB.
|
|
11
|
+
- **Custom Quality:** User-definable compression quality (1-100).
|
|
12
|
+
- **Robust Error Handling:** Graceful shutdowns and detailed error logging.
|
|
13
|
+
|
|
14
|
+
## 📦 Installation
|
|
15
|
+
|
|
16
|
+
You can use NexPress in three ways:
|
|
17
|
+
|
|
18
|
+
### 1. Global Installation (Recommended)
|
|
19
|
+
|
|
20
|
+
This installs the `nexpress` command system-wide.
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
npm install -g nexpress
|
|
24
|
+
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
### 2. Run Once with npx
|
|
28
|
+
|
|
29
|
+
Use it immediately without installing.
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
npx nexpress --input=Desktop/photos --output=Desktop/compressed
|
|
33
|
+
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
### 3. Local Development / Building from Source
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
# Clone the repository
|
|
40
|
+
git clone <your-repo-url>
|
|
41
|
+
cd nexpress
|
|
42
|
+
|
|
43
|
+
# Install dependencies
|
|
44
|
+
npm install
|
|
45
|
+
|
|
46
|
+
# Build the project
|
|
47
|
+
npm run build
|
|
48
|
+
|
|
49
|
+
# Link globally for testing
|
|
50
|
+
npm link
|
|
51
|
+
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## 🛠 Usage
|
|
55
|
+
|
|
56
|
+
If installed globally or linked, simply run `nexpress`:
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
nexpress --input=<path> --output=<path> [options]
|
|
60
|
+
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
### 📂 Path Flexibility
|
|
64
|
+
|
|
65
|
+
NexPress is smart about file paths. You can provide:
|
|
66
|
+
|
|
67
|
+
1. **Relative Paths (from User Home):**
|
|
68
|
+
If you type `Desktop/images`, the tool looks in `~/Desktop/images` (Linux/Mac) or `C:\Users\You\Desktop\images` (Windows).
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
nexpress --input=Desktop/raw --output=Desktop/processed
|
|
72
|
+
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
2. **Absolute Paths:**
|
|
76
|
+
You can also provide the full system path.
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
nexpress --input=/home/user/Downloads/pics --output=/var/www/html/assets
|
|
80
|
+
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### Options
|
|
84
|
+
|
|
85
|
+
| Option | Required | Description | Default |
|
|
86
|
+
| ----------- | -------- | -------------------------------------------------- | ------- |
|
|
87
|
+
| `--input` | ✅ | Source folder path (Absolute or relative to Home). | N/A |
|
|
88
|
+
| `--output` | ✅ | Destination folder path. | N/A |
|
|
89
|
+
| `--quality` | ❌ | Compression quality (1-100). | `80` |
|
|
90
|
+
| `--help` | ❌ | Displays the help menu. | N/A |
|
|
91
|
+
|
|
92
|
+
### Examples
|
|
93
|
+
|
|
94
|
+
**Standard Compression (Default Quality: 80)**
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
nexpress --input=Photos --output=CompressedPhotos
|
|
98
|
+
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
**High Compression (Maximum Space Saving)**
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
nexpress --input=Desktop/Wallpapers --output=Desktop/WebP_Small --quality=50
|
|
105
|
+
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
**High Quality (Near Lossless)**
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
nexpress --input=/mnt/data/raw --output=/mnt/data/optimized --quality=95
|
|
112
|
+
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
## 💻 Development
|
|
116
|
+
|
|
117
|
+
If you want to contribute or modify the code:
|
|
118
|
+
|
|
119
|
+
1. **Clone and Install:**
|
|
120
|
+
|
|
121
|
+
```bash
|
|
122
|
+
git clone <your-repo-url>
|
|
123
|
+
npm install
|
|
124
|
+
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
2. **Run in Development Mode:**
|
|
128
|
+
You can run the script directly using `ts-node`.
|
|
129
|
+
|
|
130
|
+
```bash
|
|
131
|
+
# Run directly without building
|
|
132
|
+
npx ts-node src/index.ts --input=TestIn --output=TestOut
|
|
133
|
+
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
## 🏗 Architecture
|
|
137
|
+
|
|
138
|
+
The tool uses a **Main Thread** -> **Worker Pool** architecture:
|
|
139
|
+
|
|
140
|
+
1. **Main Thread:** Scans the input directory and creates a queue of image processing jobs.
|
|
141
|
+
2. **Worker Threads:** Each CPU core gets a worker. The main thread distributes jobs to workers dynamically.
|
|
142
|
+
3. **Sharp:** Inside the worker, the `sharp` library handles the actual image buffer manipulation and WebP conversion.
|
|
143
|
+
|
|
144
|
+
## 📝 License
|
|
145
|
+
|
|
146
|
+
This project is licensed under the MIT License.
|