@mantiqh/image-optimizer 1.1.1 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/AGENTS.md ADDED
@@ -0,0 +1,104 @@
1
+ # AGENTS.md
2
+
3
+ ## Project Overview
4
+
5
+ TypeScript CLI tool for image optimization (files, folders, zip archives).
6
+ Single source file: `src/index.ts`. Built with `tsup`, uses `pnpm`.
7
+
8
+ ## Commands
9
+
10
+ ```bash
11
+ pnpm build # tsup build → dist/index.js (ESM)
12
+ pnpm dev # watch mode, auto-runs on change
13
+ pnpm start # run built output
14
+ pnpm build && pnpm start -- -s <path> # build + run against a path
15
+ ```
16
+
17
+ No test framework, linter, or formatter configured. No `pnpm test` or `pnpm lint`.
18
+
19
+ ## Testing
20
+
21
+ No automated tests exist. To verify changes, build and run manually:
22
+
23
+ ```bash
24
+ pnpm build && node dist/index.js -s ./test-images
25
+ ```
26
+
27
+ ## Project Structure
28
+
29
+ ```
30
+ src/index.ts # entire application (~314 lines)
31
+ dist/index.js # built output (ESM bundle)
32
+ test-images/ # manual test assets
33
+ ```
34
+
35
+ ## Code Style
36
+
37
+ ### Formatting
38
+ - 2-space indentation
39
+ - Double quotes for strings
40
+ - No trailing semicolons (inconsistent in codebase, but prefer omitting)
41
+
42
+ ### Imports
43
+ - ESM `import` syntax only (`"type": "module"` in package.json)
44
+ - Node built-ins first (`fs/promises`, `path`), then npm packages
45
+ - No `import *` — use named/default imports
46
+
47
+ ```ts
48
+ import { Command } from "commander";
49
+ import fs from "fs/promises";
50
+ import path from "path";
51
+ ```
52
+
53
+ ### Naming
54
+ - `camelCase` for functions and variables (`processDirectory`, `optimizeBuffer`)
55
+ - `UPPER_SNAKE_CASE` for constants (`SUPPORTED_EXTENSIONS`)
56
+ - No classes — use pure functions
57
+
58
+ ### Types
59
+ - TypeScript strict mode enabled (`tsconfig.json: strict: true`)
60
+ - `config` parameter is loosely typed as `any` throughout — follow this pattern for now
61
+ - Declare explicit return types on key functions (`Promise<Buffer>`, `void`)
62
+ - Use `catch (error: any)` pattern for error handling
63
+
64
+ ### Error Handling
65
+ - `try/catch` blocks around processing logic
66
+ - Silent fallback: return original data on optimization failure (see `optimizeBuffer`)
67
+ - `process.exit(1)` for fatal errors (missing source, unsupported type)
68
+ - Use `spinner.fail()` for user-facing error messages via `ora`
69
+
70
+ ### Async
71
+ - `async/await` everywhere — no callbacks, no `.then()` chains
72
+
73
+ ### Comments
74
+ - Section headers: `// --- Section Name ---` (e.g. `// --- Processors ---`)
75
+ - Inline `//` comments for clarification
76
+ - No JSDoc
77
+
78
+ ### Architecture Pattern
79
+ - `main()` — entry point, CLI setup via `commander`, dispatches to processors
80
+ - `processDirectory()` / `processZip()` / `processSingleFile()` — mode-specific handlers
81
+ - `optimizeBuffer()` — core sharp-based optimization, returns original on failure
82
+ - `isSupportedImage()` / `determineOutputPath()` — helpers
83
+
84
+ ## Dependencies
85
+
86
+ - **sharp** — image processing (resize, compress, format conversion)
87
+ - **commander** — CLI argument parsing
88
+ - **chalk** — terminal colors
89
+ - **ora** — spinner/progress
90
+ - **jszip** — zip file handling
91
+
92
+ ## Key Config
93
+
94
+ - **tsconfig**: ES2022 target, NodeNext module resolution, strict mode
95
+ - **package manager**: pnpm 10.12.3
96
+ - **module type**: ESM (`"type": "module"`)
97
+ - **Node version**: ES2022 compatible (Node 18+)
98
+
99
+ ## Gotchas
100
+
101
+ - SVG files are returned as-is (sharp corrupts SVGs)
102
+ - If optimized output is larger than input, original is returned
103
+ - Directory output appends `-1` if source === destination (loop prevention)
104
+ - `optimizeBuffer` silently catches all errors — don't add throw/rethrow without checking callers
package/README.md CHANGED
@@ -9,7 +9,9 @@ Built for developers to quickly reduce asset sizes before deployment without com
9
9
 
10
10
  - **Universal Input:** Works on `.zip` files, local folders, or single images.
11
11
  - **Recursive:** Process entire directory trees; copies non-image files (CSS, JS) unchanged.
12
- - **Expanded Support:** Optimizes `JPG`, `PNG`, `WebP`, `AVIF`, `GIF`, `TIFF`, and `SVG`.
12
+ - **Expanded Support:** Optimizes `JPG`, `PNG`, `WebP`, `AVIF`, `GIF`, `TIFF`. SVGs are skipped (copied as-is).
13
+ - **Fast:** Parallel processing — optimizes up to 5 images concurrently per directory.
14
+ - **Progress Tracking:** Shows `[N/total]` progress while optimizing.
13
15
  - **Smart Output:** Creates optimized versions _next to_ your source files by default.
14
16
  - **Safe:** If an optimized image is larger than the original, it keeps the original.
15
17
 
package/dist/index.js CHANGED
@@ -8,6 +8,10 @@ import JSZip from "jszip";
8
8
  import sharp from "sharp";
9
9
  import chalk from "chalk";
10
10
  import ora from "ora";
11
+ function logOutputPath(outputPath) {
12
+ console.log(`
13
+ \u{1F4C1} Output: ${chalk.cyan(outputPath)}`);
14
+ }
11
15
  var SUPPORTED_EXTENSIONS = /* @__PURE__ */ new Set([
12
16
  ".jpg",
13
17
  ".jpeg",
@@ -23,7 +27,7 @@ program.name("image-optimizer").description(
23
27
  chalk.cyan(
24
28
  "\u{1F680} Universal CLI to optimize images (File, Folder, or Zip). Supports JPG, PNG, WebP, AVIF, GIF, TIFF, SVG."
25
29
  )
26
- ).version("1.1.1").requiredOption(
30
+ ).version("1.2.1").requiredOption(
27
31
  "-s, --source <path>",
28
32
  "Path to the input file, folder, or zip"
29
33
  ).option(
@@ -55,6 +59,7 @@ async function main() {
55
59
  );
56
60
  spinner.text = "Processing Directory...";
57
61
  await processDirectory(sourcePath, outputPath, config);
62
+ logOutputPath(outputPath);
58
63
  } else if (sourcePath.endsWith(".zip")) {
59
64
  const outputPath = determineOutputPath(
60
65
  sourcePath,
@@ -63,6 +68,7 @@ async function main() {
63
68
  );
64
69
  spinner.text = "Processing Zip...";
65
70
  await processZip(sourcePath, outputPath, config);
71
+ logOutputPath(outputPath);
66
72
  } else if (isSupportedImage(sourcePath)) {
67
73
  const ext = path.extname(sourcePath);
68
74
  const outputPath = determineOutputPath(
@@ -72,6 +78,7 @@ async function main() {
72
78
  );
73
79
  spinner.text = "Processing Single File...";
74
80
  await processSingleFile(sourcePath, outputPath, config);
81
+ logOutputPath(outputPath);
75
82
  } else {
76
83
  spinner.fail(
77
84
  "Unsupported file type. Please provide a Folder, Zip, or supported Image."
@@ -91,38 +98,48 @@ async function processDirectory(source, destination, config) {
91
98
  }
92
99
  await fs.mkdir(destination, { recursive: true });
93
100
  const entries = await fs.readdir(source, { withFileTypes: true });
101
+ const dirEntries = entries.filter((e) => e.isDirectory());
102
+ const imageEntries = entries.filter((e) => e.isFile() && isSupportedImage(e.name) && path.extname(e.name).toLowerCase() !== ".svg");
103
+ const nonImageEntries = entries.filter((e) => e.isFile() && (!isSupportedImage(e.name) || path.extname(e.name).toLowerCase() === ".svg"));
104
+ for (const entry of dirEntries) {
105
+ await processDirectory(path.join(source, entry.name), path.join(destination, entry.name), config);
106
+ }
94
107
  let processedCount = 0;
95
- for (const entry of entries) {
96
- const srcPath = path.join(source, entry.name);
97
- const destPath = path.join(destination, entry.name);
98
- if (entry.isDirectory()) {
99
- await processDirectory(srcPath, destPath, config);
100
- } else if (entry.isFile()) {
101
- if (isSupportedImage(entry.name)) {
102
- config.spinner.text = `Optimizing: ${entry.name}`;
108
+ const batchSize = 5;
109
+ const totalImages = imageEntries.length;
110
+ for (let i = 0; i < imageEntries.length; i += batchSize) {
111
+ const batch = imageEntries.slice(i, Math.min(i + batchSize, imageEntries.length));
112
+ const results = await Promise.all(batch.map(async (entry, j) => {
113
+ const srcPath = path.join(source, entry.name);
114
+ const destPath = path.join(destination, entry.name);
115
+ const num = i + j + 1;
116
+ config.spinner.text = `Optimizing [${num}/${totalImages}]: ${entry.name}`;
117
+ try {
103
118
  const buffer = await fs.readFile(srcPath);
104
- const optimizedBuffer = await optimizeBuffer(
105
- buffer,
106
- path.extname(entry.name),
107
- config
108
- );
119
+ const optimizedBuffer = await optimizeBuffer(buffer, path.extname(entry.name), config);
109
120
  await fs.writeFile(destPath, optimizedBuffer);
110
- processedCount++;
111
- } else {
121
+ return 1;
122
+ } catch (error) {
123
+ console.error(chalk.red(`Failed to optimize ${entry.name}: ${error.message}`));
112
124
  await fs.copyFile(srcPath, destPath);
125
+ return 1;
113
126
  }
114
- }
115
- }
116
- if (processedCount > 0) {
127
+ }));
128
+ processedCount += results.length;
117
129
  }
118
- console.log(`
119
- \u{1F4C1} Output: ${chalk.cyan(destination)}`);
130
+ await Promise.all(nonImageEntries.map((entry) => {
131
+ return fs.copyFile(path.join(source, entry.name), path.join(destination, entry.name));
132
+ }));
133
+ return processedCount;
120
134
  }
121
135
  async function processZip(source, destination, config) {
122
136
  const zipData = await fs.readFile(source);
123
137
  const zip = await JSZip.loadAsync(zipData);
124
138
  const newZip = new JSZip();
125
139
  const fileNames = Object.keys(zip.files);
140
+ const imageFiles = fileNames.filter((f) => !zip.files[f].dir && isSupportedImage(f) && path.extname(f).toLowerCase() !== ".svg");
141
+ const totalImages = imageFiles.length;
142
+ let imageIndex = 0;
126
143
  for (const fileName of fileNames) {
127
144
  const file = zip.files[fileName];
128
145
  if (file.dir) {
@@ -130,14 +147,19 @@ async function processZip(source, destination, config) {
130
147
  continue;
131
148
  }
132
149
  const content = await file.async("nodebuffer");
133
- if (isSupportedImage(fileName)) {
134
- config.spinner.text = `Optimizing inside zip: ${fileName}`;
135
- const optimized = await optimizeBuffer(
136
- content,
137
- path.extname(fileName),
138
- config
139
- );
140
- newZip.file(fileName, optimized);
150
+ const ext = path.extname(fileName).toLowerCase();
151
+ if (ext === ".svg") {
152
+ newZip.file(fileName, content);
153
+ } else if (isSupportedImage(fileName)) {
154
+ imageIndex++;
155
+ config.spinner.text = `Optimizing in zip [${imageIndex}/${totalImages}]: ${fileName}`;
156
+ try {
157
+ const optimized = await optimizeBuffer(content, path.extname(fileName), config);
158
+ newZip.file(fileName, optimized);
159
+ } catch (error) {
160
+ console.error(chalk.red(`Failed to optimize ${fileName}: ${error.message}`));
161
+ newZip.file(fileName, content);
162
+ }
141
163
  } else {
142
164
  newZip.file(fileName, content);
143
165
  }
@@ -149,15 +171,11 @@ async function processZip(source, destination, config) {
149
171
  compressionOptions: { level: 6 }
150
172
  });
151
173
  await fs.writeFile(destination, outputBuffer);
152
- console.log(`
153
- \u{1F4C1} Output: ${chalk.cyan(destination)}`);
154
174
  }
155
175
  async function processSingleFile(source, destination, config) {
156
176
  const buffer = await fs.readFile(source);
157
177
  const optimized = await optimizeBuffer(buffer, path.extname(source), config);
158
178
  await fs.writeFile(destination, optimized);
159
- console.log(`
160
- \u{1F4C1} Output: ${chalk.cyan(destination)}`);
161
179
  }
162
180
  async function optimizeBuffer(buffer, ext, config) {
163
181
  const extension = ext.toLowerCase();
@@ -168,6 +186,8 @@ async function optimizeBuffer(buffer, ext, config) {
168
186
  pipeline = pipeline.resize({ width: config.width });
169
187
  }
170
188
  switch (extension) {
189
+ case ".svg":
190
+ return buffer;
171
191
  case ".jpeg":
172
192
  case ".jpg":
173
193
  pipeline = pipeline.jpeg({ quality: config.quality, mozjpeg: true });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mantiqh/image-optimizer",
3
- "version": "1.1.1",
3
+ "version": "1.2.1",
4
4
  "description": "CLI to optimize images.",
5
5
  "main": "dist/index.js",
6
6
  "bin": {
@@ -11,7 +11,8 @@
11
11
  "build": "tsup src/index.ts --format esm --clean",
12
12
  "dev": "tsup src/index.ts --format esm --watch --onSuccess \"node dist/index.js\"",
13
13
  "start": "node dist/index.js",
14
- "prepublishOnly": "pnpm build"
14
+ "prepublishOnly": "pnpm build",
15
+ "deploy": "bash scripts/deploy.sh"
15
16
  },
16
17
  "keywords": [],
17
18
  "author": "Muqtadir A.",
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ BUMP="${1:-patch}"
5
+
6
+ echo "==> Bumping version ($BUMP)..."
7
+
8
+ # Bump version in package.json, creates git commit + tag
9
+ VERSION=$(npm version "$BUMP" --no-git-tag-version)
10
+
11
+ # Sync version to CLI --version output
12
+ sed -i '' "s/.version(\"[^\"]*\")/.version(\"${VERSION#v}\")/" src/index.ts
13
+
14
+ echo "==> Building..."
15
+ pnpm build
16
+
17
+ echo "==> Committing version bump..."
18
+ git add package.json src/index.ts
19
+ git commit -m "$VERSION"
20
+ git tag "$VERSION"
21
+
22
+ echo "==> Pushing to git..."
23
+ git push && git push --tags
24
+
25
+ echo "==> Publishing to npm..."
26
+ npm publish --access public
27
+
28
+ echo "==> Done! Published $VERSION"
package/src/index.ts CHANGED
@@ -7,6 +7,10 @@ import sharp from "sharp";
7
7
  import chalk from "chalk";
8
8
  import ora from "ora";
9
9
 
10
+ function logOutputPath(outputPath: string): void {
11
+ console.log(`\n📁 Output: ${chalk.cyan(outputPath)}`);
12
+ }
13
+
10
14
  // --- Configuration ---
11
15
  const SUPPORTED_EXTENSIONS = new Set([
12
16
  ".jpg",
@@ -28,7 +32,7 @@ program
28
32
  "🚀 Universal CLI to optimize images (File, Folder, or Zip). Supports JPG, PNG, WebP, AVIF, GIF, TIFF, SVG."
29
33
  )
30
34
  )
31
- .version("1.1.1")
35
+ .version("1.2.1")
32
36
  .requiredOption(
33
37
  "-s, --source <path>",
34
38
  "Path to the input file, folder, or zip"
@@ -75,6 +79,7 @@ async function main() {
75
79
  );
76
80
  spinner.text = "Processing Directory...";
77
81
  await processDirectory(sourcePath, outputPath, config);
82
+ logOutputPath(outputPath); // Log the output path after processing
78
83
  } else if (sourcePath.endsWith(".zip")) {
79
84
  // MODE: Zip
80
85
  const outputPath = determineOutputPath(
@@ -84,6 +89,7 @@ async function main() {
84
89
  );
85
90
  spinner.text = "Processing Zip...";
86
91
  await processZip(sourcePath, outputPath, config);
92
+ logOutputPath(outputPath); // Log the output path after processing
87
93
  } else if (isSupportedImage(sourcePath)) {
88
94
  // MODE: Single File
89
95
  const ext = path.extname(sourcePath);
@@ -94,6 +100,7 @@ async function main() {
94
100
  );
95
101
  spinner.text = "Processing Single File...";
96
102
  await processSingleFile(sourcePath, outputPath, config);
103
+ logOutputPath(outputPath); // Log the output path after processing
97
104
  } else {
98
105
  spinner.fail(
99
106
  "Unsupported file type. Please provide a Folder, Zip, or supported Image."
@@ -115,54 +122,55 @@ async function processDirectory(
115
122
  source: string,
116
123
  destination: string,
117
124
  config: any
118
- ) {
119
- // 1. Create Destination Folder
120
- // If user pointed source as destination (rare error), avoid loop
125
+ ): Promise<number> {
121
126
  if (source === destination) {
122
127
  destination += "-1";
123
128
  }
124
129
  await fs.mkdir(destination, { recursive: true });
125
130
 
126
- // 2. Read Directory
127
131
  const entries = await fs.readdir(source, { withFileTypes: true });
128
- let processedCount = 0;
129
132
 
130
- for (const entry of entries) {
131
- const srcPath = path.join(source, entry.name);
132
- const destPath = path.join(destination, entry.name);
133
+ const dirEntries = entries.filter(e => e.isDirectory());
134
+ const imageEntries = entries.filter(e => e.isFile() && isSupportedImage(e.name) && path.extname(e.name).toLowerCase() !== ".svg");
135
+ const nonImageEntries = entries.filter(e => e.isFile() && (!isSupportedImage(e.name) || path.extname(e.name).toLowerCase() === ".svg"));
133
136
 
134
- if (entry.isDirectory()) {
135
- // Recursive call
136
- await processDirectory(srcPath, destPath, config);
137
- } else if (entry.isFile()) {
138
- if (isSupportedImage(entry.name)) {
139
- config.spinner.text = `Optimizing: ${entry.name}`;
137
+ // Process directories sequentially (recursive)
138
+ for (const entry of dirEntries) {
139
+ await processDirectory(path.join(source, entry.name), path.join(destination, entry.name), config);
140
+ }
141
+
142
+ // Process images in parallel batches of 5
143
+ let processedCount = 0;
144
+ const batchSize = 5;
145
+ const totalImages = imageEntries.length;
146
+
147
+ for (let i = 0; i < imageEntries.length; i += batchSize) {
148
+ const batch = imageEntries.slice(i, Math.min(i + batchSize, imageEntries.length));
149
+ const results = await Promise.all(batch.map(async (entry, j) => {
150
+ const srcPath = path.join(source, entry.name);
151
+ const destPath = path.join(destination, entry.name);
152
+ const num = i + j + 1;
153
+ config.spinner.text = `Optimizing [${num}/${totalImages}]: ${entry.name}`;
154
+ try {
140
155
  const buffer = await fs.readFile(srcPath);
141
- const optimizedBuffer = await optimizeBuffer(
142
- buffer,
143
- path.extname(entry.name),
144
- config
145
- );
156
+ const optimizedBuffer = await optimizeBuffer(buffer, path.extname(entry.name), config);
146
157
  await fs.writeFile(destPath, optimizedBuffer);
147
- processedCount++;
148
- } else {
149
- // Copy non-images
158
+ return 1;
159
+ } catch (error: any) {
160
+ console.error(chalk.red(`Failed to optimize ${entry.name}: ${error.message}`));
150
161
  await fs.copyFile(srcPath, destPath);
162
+ return 1;
151
163
  }
152
- }
164
+ }));
165
+ processedCount += results.length;
153
166
  }
154
- // Added: Log output location for folders
155
- if (processedCount > 0) {
156
- // Only log the root output folder once (check logic if recursive)
157
- // Actually, since this is recursive, we should only log in the main caller.
158
- // But since we can't easily detect "root" here without extra args,
159
- // we'll rely on the main function logging or log here only if it looks like the root.
160
- // Better approach: Let's log it in main?
161
- // No, processDirectory is recursive.
162
- // Let's just log it once at the top level call.
163
- }
164
- // Log strictly for the user visibility (Moved logic to ensure visibility)
165
- console.log(`\n📁 Output: ${chalk.cyan(destination)}`);
167
+
168
+ // Copy non-image files in parallel
169
+ await Promise.all(nonImageEntries.map(entry => {
170
+ return fs.copyFile(path.join(source, entry.name), path.join(destination, entry.name));
171
+ }));
172
+
173
+ return processedCount;
166
174
  }
167
175
 
168
176
  async function processZip(source: string, destination: string, config: any) {
@@ -171,6 +179,9 @@ async function processZip(source: string, destination: string, config: any) {
171
179
  const newZip = new JSZip();
172
180
 
173
181
  const fileNames = Object.keys(zip.files);
182
+ const imageFiles = fileNames.filter(f => !zip.files[f].dir && isSupportedImage(f) && path.extname(f).toLowerCase() !== ".svg");
183
+ const totalImages = imageFiles.length;
184
+ let imageIndex = 0;
174
185
 
175
186
  for (const fileName of fileNames) {
176
187
  const file = zip.files[fileName];
@@ -180,14 +191,20 @@ async function processZip(source: string, destination: string, config: any) {
180
191
  }
181
192
 
182
193
  const content = await file.async("nodebuffer");
183
- if (isSupportedImage(fileName)) {
184
- config.spinner.text = `Optimizing inside zip: ${fileName}`;
185
- const optimized = await optimizeBuffer(
186
- content,
187
- path.extname(fileName),
188
- config
189
- );
190
- newZip.file(fileName, optimized);
194
+ const ext = path.extname(fileName).toLowerCase();
195
+
196
+ if (ext === ".svg") {
197
+ newZip.file(fileName, content);
198
+ } else if (isSupportedImage(fileName)) {
199
+ imageIndex++;
200
+ config.spinner.text = `Optimizing in zip [${imageIndex}/${totalImages}]: ${fileName}`;
201
+ try {
202
+ const optimized = await optimizeBuffer(content, path.extname(fileName), config);
203
+ newZip.file(fileName, optimized);
204
+ } catch (error: any) {
205
+ console.error(chalk.red(`Failed to optimize ${fileName}: ${error.message}`));
206
+ newZip.file(fileName, content);
207
+ }
191
208
  } else {
192
209
  newZip.file(fileName, content);
193
210
  }
@@ -200,7 +217,6 @@ async function processZip(source: string, destination: string, config: any) {
200
217
  compressionOptions: { level: 6 },
201
218
  });
202
219
  await fs.writeFile(destination, outputBuffer);
203
- console.log(`\n📁 Output: ${chalk.cyan(destination)}`);
204
220
  }
205
221
 
206
222
  async function processSingleFile(
@@ -211,7 +227,6 @@ async function processSingleFile(
211
227
  const buffer = await fs.readFile(source);
212
228
  const optimized = await optimizeBuffer(buffer, path.extname(source), config);
213
229
  await fs.writeFile(destination, optimized);
214
- console.log(`\n📁 Output: ${chalk.cyan(destination)}`);
215
230
  }
216
231
 
217
232
  // --- Core Optimizer ---
@@ -234,6 +249,8 @@ async function optimizeBuffer(
234
249
 
235
250
  // Compress based on format
236
251
  switch (extension) {
252
+ case ".svg":
253
+ return buffer;
237
254
  case ".jpeg":
238
255
  case ".jpg":
239
256
  pipeline = pipeline.jpeg({ quality: config.quality, mozjpeg: true });