respimagen 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/copilot-instructions.md +139 -0
- package/.github/workflows/npm-publish.yml +33 -0
- package/bin/respimagen.mjs +18 -0
- package/eslint.config.js +10 -0
- package/index.js +80 -0
- package/lib/processor.js +127 -0
- package/package.json +27 -0
- package/readme.md +78 -0
- package/test/dir.test.mjs +81 -0
- package/test/file.test.mjs +66 -0
- package/test/formats.test.mjs +193 -0
- package/test/processor.test.mjs +213 -0
- package/test/srcset.test.mjs +49 -0
- package/utils.js +30 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
# Respimagen - AI Coding Agent Instructions
|
|
2
|
+
|
|
3
|
+
## Project Overview
|
|
4
|
+
|
|
5
|
+
Respimagen is a Node.js CLI tool and library for batch image processing. It resizes images to multiple sizes and converts them to various formats (AVIF, WebP, JPEG, PNG) using Sharp, then generates responsive image srcset strings.
|
|
6
|
+
|
|
7
|
+
## Architecture
|
|
8
|
+
|
|
9
|
+
### Core Module (`lib/processor.js`)
|
|
10
|
+
|
|
11
|
+
- **Exports**: `processPath(input, options)`
|
|
12
|
+
- `input`: string - file path or directory
|
|
13
|
+
- `options`: object with `{ sizes, filetypes, outputdir, clear }`
|
|
14
|
+
- Returns: `Promise<{ output: Array, srcset?: string }>`
|
|
15
|
+
- Contains all image processing logic using Sharp
|
|
16
|
+
- Handles both single files and directories
|
|
17
|
+
- Validates filetypes and throws errors for unknown formats
|
|
18
|
+
- Can be imported and used programmatically
|
|
19
|
+
|
|
20
|
+
### CLI Runner (`index.js`)
|
|
21
|
+
|
|
22
|
+
- Thin wrapper around `lib/processor.js`
|
|
23
|
+
- Uses yargs for argument parsing with positional input argument
|
|
24
|
+
- Calls `processPath()` and logs results
|
|
25
|
+
- Executable with Node.js: `node index.js <input> ...`
|
|
26
|
+
|
|
27
|
+
### Utilities (`utils.js`)
|
|
28
|
+
|
|
29
|
+
- **Exports**: `srcsetGenerator(files)`
|
|
30
|
+
- Groups files by extension
|
|
31
|
+
- Formats srcset strings for responsive images
|
|
32
|
+
- Filters invalid entries (missing ext/size)
|
|
33
|
+
|
|
34
|
+
### CLI Wrapper (`bin/respimagen.mjs`)
|
|
35
|
+
|
|
36
|
+
- Shebang wrapper for npm bin exposure
|
|
37
|
+
- Spawns `node index.js` with forwarded arguments
|
|
38
|
+
- Enables global `respimagen` command via npm
|
|
39
|
+
|
|
40
|
+
## Key Conventions
|
|
41
|
+
|
|
42
|
+
### ESM Only
|
|
43
|
+
|
|
44
|
+
- `package.json` has `"type": "module"`
|
|
45
|
+
- All imports use ESM syntax
|
|
46
|
+
- Use `.mjs` extension for bin scripts
|
|
47
|
+
|
|
48
|
+
### Image Processing
|
|
49
|
+
|
|
50
|
+
- Sizes are optional - if not specified, keeps original dimensions
|
|
51
|
+
- Default format: `avif` (modern, efficient format)
|
|
52
|
+
- Supported formats: `avif`, `webp`, `jpeg`, `jpg`, `png`
|
|
53
|
+
- Output directory: `output/` (configurable)
|
|
54
|
+
- File naming with sizes: `{basename}-{size}.{ext}`
|
|
55
|
+
- File naming without sizes: `{basename}.{ext}`
|
|
56
|
+
|
|
57
|
+
### Testing
|
|
58
|
+
|
|
59
|
+
- Uses Node's built-in test runner (`node:test`)
|
|
60
|
+
- Run with: `npm test`
|
|
61
|
+
- Integration tests: spawn CLI and verify file output
|
|
62
|
+
- Unit tests: directly import and test `processPath()` and `srcsetGenerator()`
|
|
63
|
+
- Format tests: verify all supported formats (avif, webp, jpeg, jpg, png) work correctly
|
|
64
|
+
- Test fixtures use temporary directories, cleaned up after each test
|
|
65
|
+
|
|
66
|
+
## Common Tasks
|
|
67
|
+
|
|
68
|
+
### Running the CLI
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
# Single file with multiple formats
|
|
72
|
+
node index.js image.jpg -s 500,750 -t webp,avif
|
|
73
|
+
|
|
74
|
+
# Directory with default avif format
|
|
75
|
+
node index.js ./photos -s 300,500,700
|
|
76
|
+
|
|
77
|
+
# Simplest: just convert to avif, keep original size
|
|
78
|
+
node index.js ./photos
|
|
79
|
+
|
|
80
|
+
# Via wrapper
|
|
81
|
+
node bin/respimagen.mjs image.jpg
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
### Running Tests
|
|
85
|
+
|
|
86
|
+
```bash
|
|
87
|
+
npm test
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### Using Programmatically
|
|
91
|
+
|
|
92
|
+
```javascript
|
|
93
|
+
import { processPath } from "./lib/processor.js";
|
|
94
|
+
|
|
95
|
+
const result = await processPath("photo.jpg", {
|
|
96
|
+
sizes: [400, 800],
|
|
97
|
+
filetypes: "webp,avif",
|
|
98
|
+
outputdir: "dist",
|
|
99
|
+
clear: true,
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
console.log(result.output); // Array of processed files
|
|
103
|
+
console.log(result.srcset); // Srcset string
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
## Known Issues & Gotchas
|
|
107
|
+
|
|
108
|
+
### Async Array Methods
|
|
109
|
+
|
|
110
|
+
- **Fixed**: Previously used async predicate in `Array.filter()` which returned promises
|
|
111
|
+
- Now uses `Promise.all()` with map+filter pattern for async directory checks
|
|
112
|
+
|
|
113
|
+
### CLI Argument Parsing
|
|
114
|
+
|
|
115
|
+
- yargs may parse numeric sizes as numbers
|
|
116
|
+
- Always coerce `argv.sizes` to string before calling `.split()`
|
|
117
|
+
|
|
118
|
+
### File Validation
|
|
119
|
+
|
|
120
|
+
- `srcsetGenerator` filters invalid entries (missing ext/size, non-numeric size)
|
|
121
|
+
- `processPath` validates filetypes and throws clear errors for unknown formats
|
|
122
|
+
|
|
123
|
+
### Output Directory
|
|
124
|
+
|
|
125
|
+
- Use `{ force: true }` with `fs.rm()` to handle non-existent directories
|
|
126
|
+
- Always create output dir with `{ recursive: true }`
|
|
127
|
+
|
|
128
|
+
## Dependencies
|
|
129
|
+
|
|
130
|
+
- **sharp** (^0.34.4): Image processing
|
|
131
|
+
- **yargs** (^18.0.0): CLI argument parsing
|
|
132
|
+
- **eslint**: Code linting (devDep)
|
|
133
|
+
|
|
134
|
+
## Next Steps / TODOs
|
|
135
|
+
|
|
136
|
+
- Add concurrency limiting for large directories (e.g., p-limit)
|
|
137
|
+
- Add GitHub Actions CI to run tests on PRs
|
|
138
|
+
- Consider additional format validation
|
|
139
|
+
- Add benchmarks for performance testing
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# This workflow will run tests using node and then publish a package to GitHub Packages when a release is created
|
|
2
|
+
# For more information see: https://docs.github.com/en/actions/publishing-packages/publishing-nodejs-packages
|
|
3
|
+
|
|
4
|
+
name: Node.js Package
|
|
5
|
+
|
|
6
|
+
on:
|
|
7
|
+
release:
|
|
8
|
+
types: [created]
|
|
9
|
+
|
|
10
|
+
jobs:
|
|
11
|
+
build:
|
|
12
|
+
runs-on: ubuntu-latest
|
|
13
|
+
steps:
|
|
14
|
+
- uses: actions/checkout@v4
|
|
15
|
+
- uses: actions/setup-node@v4
|
|
16
|
+
with:
|
|
17
|
+
node-version: 20
|
|
18
|
+
- run: npm ci
|
|
19
|
+
- run: npm test
|
|
20
|
+
|
|
21
|
+
publish-npm:
|
|
22
|
+
needs: build
|
|
23
|
+
runs-on: ubuntu-latest
|
|
24
|
+
steps:
|
|
25
|
+
- uses: actions/checkout@v4
|
|
26
|
+
- uses: actions/setup-node@v4
|
|
27
|
+
with:
|
|
28
|
+
node-version: 20
|
|
29
|
+
registry-url: https://registry.npmjs.org/
|
|
30
|
+
- run: npm ci
|
|
31
|
+
- run: npm publish
|
|
32
|
+
env:
|
|
33
|
+
NODE_AUTH_TOKEN: ${{secrets.npm_token}}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { spawn } from "child_process";
|
|
3
|
+
import { fileURLToPath } from "url";
|
|
4
|
+
import path from "path";
|
|
5
|
+
|
|
6
|
+
// Simple wrapper that delegates to index.js
|
|
7
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
8
|
+
const __dirname = path.dirname(__filename);
|
|
9
|
+
const indexPath = path.join(__dirname, "..", "index.js");
|
|
10
|
+
|
|
11
|
+
// Forward all arguments to index.js
|
|
12
|
+
const child = spawn("node", [indexPath, ...process.argv.slice(2)], {
|
|
13
|
+
stdio: "inherit",
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
child.on("exit", (code) => {
|
|
17
|
+
process.exit(code || 0);
|
|
18
|
+
});
|
package/eslint.config.js
ADDED
package/index.js
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import yargs from "yargs";
|
|
3
|
+
import { processPath } from "./lib/processor.js";
|
|
4
|
+
|
|
5
|
+
const argv = yargs(process.argv.slice(2))
|
|
6
|
+
.usage(
|
|
7
|
+
"Usage: $0 <input> -s [sizes] -t [filetypes] -o [outputdir] -c [clear]"
|
|
8
|
+
)
|
|
9
|
+
.command("$0 <input>", "Process image(s)", (yargs) => {
|
|
10
|
+
yargs.positional("input", {
|
|
11
|
+
describe: "file or directory to process",
|
|
12
|
+
type: "string",
|
|
13
|
+
});
|
|
14
|
+
})
|
|
15
|
+
.alias("s", "sizes")
|
|
16
|
+
.alias("t", "filetypes")
|
|
17
|
+
.alias("o", "outputdir")
|
|
18
|
+
.describe(
|
|
19
|
+
"s",
|
|
20
|
+
"different sizes to generate, separated by comma. If omitted, keeps original size"
|
|
21
|
+
)
|
|
22
|
+
.describe(
|
|
23
|
+
"t",
|
|
24
|
+
"different filetypes to generate, separated by comma. Supported: avif, webp, jpeg, jpg, png"
|
|
25
|
+
)
|
|
26
|
+
.describe("c", "clear the output directory before processing, default false")
|
|
27
|
+
.boolean("c")
|
|
28
|
+
.default({
|
|
29
|
+
filetypes: "avif",
|
|
30
|
+
outputdir: "output",
|
|
31
|
+
c: false,
|
|
32
|
+
})
|
|
33
|
+
.example(
|
|
34
|
+
"$0 beach.jpg -s 500,750 -t webp,avif",
|
|
35
|
+
"Resize and convert beach.jpg to 500px and 750px in webp and avif format"
|
|
36
|
+
)
|
|
37
|
+
.example(
|
|
38
|
+
"$0 ./photos",
|
|
39
|
+
"Convert all images in photos directory to avif (original size)"
|
|
40
|
+
)
|
|
41
|
+
.example(
|
|
42
|
+
"$0 ./photos -s 300,500,700",
|
|
43
|
+
"Process all images in the photos directory to avif with multiple sizes"
|
|
44
|
+
).argv;
|
|
45
|
+
|
|
46
|
+
const { input, outputdir } = argv;
|
|
47
|
+
const sizes = [];
|
|
48
|
+
|
|
49
|
+
if (argv.sizes) {
|
|
50
|
+
// yargs may parse a single numeric size as Number (e.g. -s 100),
|
|
51
|
+
// so coerce to string before splitting to avoid calling .split on a Number.
|
|
52
|
+
const sizesRaw =
|
|
53
|
+
typeof argv.sizes === "string" ? argv.sizes : String(argv.sizes);
|
|
54
|
+
sizes.push(...sizesRaw.split(",").map((s) => parseInt(s)));
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const options = {
|
|
58
|
+
sizes: sizes.length > 0 ? sizes : null,
|
|
59
|
+
filetypes: argv.filetypes,
|
|
60
|
+
outputdir,
|
|
61
|
+
clear: argv.c,
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
try {
|
|
65
|
+
const result = await processPath(input, options);
|
|
66
|
+
|
|
67
|
+
// Log results
|
|
68
|
+
for (const item of result.output) {
|
|
69
|
+
console.log(`✅ ${item.file} - ${item.size}`);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
console.log("Everything done");
|
|
73
|
+
|
|
74
|
+
if (result.srcset) {
|
|
75
|
+
console.log(`\n` + result.srcset);
|
|
76
|
+
}
|
|
77
|
+
} catch (error) {
|
|
78
|
+
console.error(`Error: ${error.message}`);
|
|
79
|
+
process.exit(1);
|
|
80
|
+
}
|
package/lib/processor.js
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import sharp from "sharp";
|
|
4
|
+
import { srcsetGenerator } from "../utils.js";
|
|
5
|
+
|
|
6
|
+
const defaultFiletypeSettings = [
|
|
7
|
+
{ id: "avif" },
|
|
8
|
+
{ id: "webp" },
|
|
9
|
+
{ id: "jpeg", options: { mozjpeg: true } },
|
|
10
|
+
{ id: "jpg", options: { mozjpeg: true } },
|
|
11
|
+
{ id: "png" },
|
|
12
|
+
];
|
|
13
|
+
|
|
14
|
+
const logFile = (outputArr, file, ext, size) => {
|
|
15
|
+
// collect result
|
|
16
|
+
outputArr.push({ file: `${file}-${size}.${ext}`, ext, size });
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Process a file or directory and generate resized/converted images.
|
|
21
|
+
* @param {string} input - path to file or directory
|
|
22
|
+
* @param {object} options - { sizes: number[]|null, filetypes: string|array, outputdir: string, clear: boolean }
|
|
23
|
+
* @returns {Promise<{ output: Array, srcset?: string }>} resolves when processing finished
|
|
24
|
+
*/
|
|
25
|
+
export async function processPath(input, options = {}) {
|
|
26
|
+
const output = [];
|
|
27
|
+
const queue = [];
|
|
28
|
+
|
|
29
|
+
const sizes = Array.isArray(options.sizes) ? options.sizes : [];
|
|
30
|
+
const filetypesRaw = options.filetypes || "avif,jpeg";
|
|
31
|
+
const outputdir = options.outputdir || "output";
|
|
32
|
+
const clear = !!options.clear;
|
|
33
|
+
|
|
34
|
+
const formats = (
|
|
35
|
+
typeof filetypesRaw === "string" ? filetypesRaw.split(",") : filetypesRaw
|
|
36
|
+
).map((f) =>
|
|
37
|
+
defaultFiletypeSettings.find(
|
|
38
|
+
(df) => df.id.toLowerCase() === String(f).toLowerCase()
|
|
39
|
+
)
|
|
40
|
+
);
|
|
41
|
+
|
|
42
|
+
// validate formats
|
|
43
|
+
if (formats.some((f) => !f)) {
|
|
44
|
+
const bad = (
|
|
45
|
+
typeof filetypesRaw === "string" ? filetypesRaw.split(",") : filetypesRaw
|
|
46
|
+
).filter(
|
|
47
|
+
(f) =>
|
|
48
|
+
!defaultFiletypeSettings.find(
|
|
49
|
+
(df) => df.id.toLowerCase() === String(f).toLowerCase()
|
|
50
|
+
)
|
|
51
|
+
);
|
|
52
|
+
throw new Error(`Unknown filetypes: ${bad.join(",")}`);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// resolve input
|
|
56
|
+
const inputPath = path.resolve(input);
|
|
57
|
+
let files = [];
|
|
58
|
+
try {
|
|
59
|
+
const stats = await fs.stat(inputPath);
|
|
60
|
+
if (stats.isFile()) files.push(inputPath);
|
|
61
|
+
else if (stats.isDirectory()) {
|
|
62
|
+
const rawDirFils = await fs.readdir(inputPath);
|
|
63
|
+
const directoryFiles = rawDirFils.map((file) =>
|
|
64
|
+
path.join(inputPath, file)
|
|
65
|
+
);
|
|
66
|
+
const checks = await Promise.all(
|
|
67
|
+
directoryFiles.map(async (file) => {
|
|
68
|
+
try {
|
|
69
|
+
const check = await fs.lstat(file);
|
|
70
|
+
return { file, isFile: check.isFile() };
|
|
71
|
+
} catch (e) {
|
|
72
|
+
return { file, isFile: false };
|
|
73
|
+
}
|
|
74
|
+
})
|
|
75
|
+
);
|
|
76
|
+
files = checks.filter((c) => c.isFile).map((c) => c.file);
|
|
77
|
+
} else {
|
|
78
|
+
throw new Error(`${inputPath} is neither a file nor a directory`);
|
|
79
|
+
}
|
|
80
|
+
} catch (e) {
|
|
81
|
+
throw e;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (clear) {
|
|
85
|
+
await fs.rm(outputdir, { recursive: true, force: true });
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
await fs.mkdir(outputdir, { recursive: true });
|
|
89
|
+
|
|
90
|
+
for (const file of files) {
|
|
91
|
+
const filename = path.basename(file, path.extname(file));
|
|
92
|
+
const image = sharp(file);
|
|
93
|
+
|
|
94
|
+
for (const format of formats) {
|
|
95
|
+
if (sizes.length > 0) {
|
|
96
|
+
for (const size of sizes) {
|
|
97
|
+
queue.push(
|
|
98
|
+
image
|
|
99
|
+
.clone()
|
|
100
|
+
.resize({ width: size })
|
|
101
|
+
.toFormat(format.id, format.options)
|
|
102
|
+
.toFile(`${outputdir}/${filename}-${size}.${format.id}`)
|
|
103
|
+
.then(() => logFile(output, filename, format.id, size))
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
} else {
|
|
107
|
+
// no sizes -> keep original width
|
|
108
|
+
const imgSize = await image.metadata();
|
|
109
|
+
queue.push(
|
|
110
|
+
image
|
|
111
|
+
.clone()
|
|
112
|
+
.toFormat(format.id, format.options)
|
|
113
|
+
.toFile(`${outputdir}/${filename}.${format.id}`)
|
|
114
|
+
.then(() => logFile(output, filename, format.id, imgSize.width))
|
|
115
|
+
);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
await Promise.all(queue);
|
|
121
|
+
|
|
122
|
+
const result = { output };
|
|
123
|
+
if (sizes.length !== 0) result.srcset = srcsetGenerator(output);
|
|
124
|
+
return result;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
export default { processPath };
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "respimagen",
|
|
3
|
+
"version": "2.0.0",
|
|
4
|
+
"description": "",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"bin": {
|
|
8
|
+
"respimagen": "bin/respimagen.mjs"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"test": "node --test",
|
|
12
|
+
"cli": "node index.js",
|
|
13
|
+
"lint": "npx eslint . || true"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [],
|
|
16
|
+
"author": "",
|
|
17
|
+
"license": "ISC",
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"sharp": "^0.34.4",
|
|
20
|
+
"yargs": "^18.0.0"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"@eslint/js": "^9.39.0",
|
|
24
|
+
"eslint": "^9.39.0",
|
|
25
|
+
"globals": "^16.5.0"
|
|
26
|
+
}
|
|
27
|
+
}
|
package/readme.md
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# Responsive Images Generator
|
|
2
|
+
|
|
3
|
+
This is a Node.js script and library that generates responsive images for your website. It uses the [sharp](https://sharp.pixelplumbing.com/) image processing library.
|
|
4
|
+
|
|
5
|
+
Feel free to use it and modify it to your needs.
|
|
6
|
+
|
|
7
|
+
## CLI Usage
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
node index.js <input> -s [sizes] -t [filetypes] -o [outputdir] -c [clear]
|
|
11
|
+
|
|
12
|
+
Positional Arguments:
|
|
13
|
+
input file or directory to process [required]
|
|
14
|
+
|
|
15
|
+
Options:
|
|
16
|
+
--help Show help [boolean]
|
|
17
|
+
--version Show version number [boolean]
|
|
18
|
+
-s, --sizes different sizes to generate, separated by comma. If omitted,
|
|
19
|
+
keeps original size [optional]
|
|
20
|
+
-t, --filetypes different filetypes to generate, separated by comma
|
|
21
|
+
Supported: avif, webp, jpeg, jpg, png
|
|
22
|
+
[default: "avif"]
|
|
23
|
+
-o, --outputdir output directory [default: "output"]
|
|
24
|
+
-c clear the output directory before processing, default false
|
|
25
|
+
[boolean] [default: false]
|
|
26
|
+
|
|
27
|
+
Examples:
|
|
28
|
+
node index.js beach.jpg -s 500,750 -t webp,avif
|
|
29
|
+
Resize and convert beach.jpg to 500px and 750px in webp and avif format
|
|
30
|
+
|
|
31
|
+
node index.js ./photos
|
|
32
|
+
Convert all images to avif, keeping original dimensions
|
|
33
|
+
|
|
34
|
+
node index.js ./photos -s 300,500,700
|
|
35
|
+
Process all images with multiple sizes (avif format)
|
|
36
|
+
|
|
37
|
+
node index.js ./photos -s 300,500,700 -t webp,avif -o output -c
|
|
38
|
+
Process all images with multiple sizes and formats, clearing output first
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Programmatic Usage
|
|
42
|
+
|
|
43
|
+
You can also import and use the core functionality in your own Node.js projects:
|
|
44
|
+
|
|
45
|
+
```javascript
|
|
46
|
+
import { processPath } from './lib/processor.js';
|
|
47
|
+
|
|
48
|
+
// Process a single image
|
|
49
|
+
const result = await processPath('photo.jpg', {
|
|
50
|
+
sizes: [400, 800, 1200],
|
|
51
|
+
filetypes: 'webp,avif',
|
|
52
|
+
outputdir: 'dist',
|
|
53
|
+
clear: true
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
console.log(result.output); // Array of processed files
|
|
57
|
+
console.log(result.srcset); // Srcset string for responsive images
|
|
58
|
+
|
|
59
|
+
// Process a directory
|
|
60
|
+
const dirResult = await processPath('./images', {
|
|
61
|
+
sizes: [300, 500, 700],
|
|
62
|
+
filetypes: 'avif,jpeg',
|
|
63
|
+
outputdir: 'output'
|
|
64
|
+
});
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
## Testing
|
|
68
|
+
|
|
69
|
+
Run the test suite with:
|
|
70
|
+
|
|
71
|
+
```bash
|
|
72
|
+
npm test
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Dependencies
|
|
76
|
+
|
|
77
|
+
- **sharp** (^0.34.4): Image processing
|
|
78
|
+
- **yargs** (^18.0.0): CLI argument parsing
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { execFile } from "child_process";
|
|
4
|
+
import sharp from "sharp";
|
|
5
|
+
import test from "node:test";
|
|
6
|
+
import assert from "node:assert/strict";
|
|
7
|
+
|
|
8
|
+
const tmpDir = path.join(process.cwd(), "test", "tmp-dir");
|
|
9
|
+
const fixturesDir = path.join(process.cwd(), "test", "fixtures-dir");
|
|
10
|
+
|
|
11
|
+
const run = (file, args) =>
|
|
12
|
+
new Promise((resolve, reject) => {
|
|
13
|
+
execFile(file, args, { cwd: process.cwd() }, (err, stdout, stderr) => {
|
|
14
|
+
if (err) return reject({ err, stdout, stderr });
|
|
15
|
+
resolve({ stdout, stderr });
|
|
16
|
+
});
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
const ensureDir = async (d) => {
|
|
20
|
+
await fs.rm(d, { recursive: true, force: true }).catch(() => {});
|
|
21
|
+
await fs.mkdir(d, { recursive: true });
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
test("integration CLI directory", async (t) => {
|
|
25
|
+
await ensureDir(tmpDir);
|
|
26
|
+
await ensureDir(fixturesDir);
|
|
27
|
+
|
|
28
|
+
const inputDir = fixturesDir;
|
|
29
|
+
const outDir = path.join(tmpDir, "output");
|
|
30
|
+
|
|
31
|
+
// create two tiny images
|
|
32
|
+
await sharp({
|
|
33
|
+
create: {
|
|
34
|
+
width: 100,
|
|
35
|
+
height: 100,
|
|
36
|
+
channels: 3,
|
|
37
|
+
background: { r: 0, g: 255, b: 0 },
|
|
38
|
+
},
|
|
39
|
+
})
|
|
40
|
+
.png()
|
|
41
|
+
.toFile(path.join(inputDir, "a.png"));
|
|
42
|
+
|
|
43
|
+
await sharp({
|
|
44
|
+
create: {
|
|
45
|
+
width: 150,
|
|
46
|
+
height: 150,
|
|
47
|
+
channels: 3,
|
|
48
|
+
background: { r: 0, g: 0, b: 255 },
|
|
49
|
+
},
|
|
50
|
+
})
|
|
51
|
+
.png()
|
|
52
|
+
.toFile(path.join(inputDir, "b.png"));
|
|
53
|
+
|
|
54
|
+
const nodeBin = process.execPath;
|
|
55
|
+
const cliArgs = [
|
|
56
|
+
"index.js",
|
|
57
|
+
inputDir,
|
|
58
|
+
"-s",
|
|
59
|
+
"50",
|
|
60
|
+
"-t",
|
|
61
|
+
"jpeg",
|
|
62
|
+
"-o",
|
|
63
|
+
outDir,
|
|
64
|
+
"-c",
|
|
65
|
+
];
|
|
66
|
+
|
|
67
|
+
const { stdout } = await run(nodeBin, cliArgs);
|
|
68
|
+
assert.match(stdout, /Everything done/);
|
|
69
|
+
|
|
70
|
+
const expectedFiles = [
|
|
71
|
+
path.join(outDir, "a-50.jpeg"),
|
|
72
|
+
path.join(outDir, "b-50.jpeg"),
|
|
73
|
+
];
|
|
74
|
+
for (const f of expectedFiles) {
|
|
75
|
+
const stat = await fs.stat(f);
|
|
76
|
+
assert.ok(stat.isFile());
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
await fs.rm(tmpDir, { recursive: true, force: true }).catch(() => {});
|
|
80
|
+
await fs.rm(fixturesDir, { recursive: true, force: true }).catch(() => {});
|
|
81
|
+
});
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { execFile } from "child_process";
|
|
4
|
+
import sharp from "sharp";
|
|
5
|
+
import test from "node:test";
|
|
6
|
+
import assert from "node:assert/strict";
|
|
7
|
+
|
|
8
|
+
const tmpDir = path.join(process.cwd(), "test", "tmp");
|
|
9
|
+
const fixturesDir = path.join(process.cwd(), "test", "fixtures");
|
|
10
|
+
|
|
11
|
+
const run = (file, args) =>
|
|
12
|
+
new Promise((resolve, reject) => {
|
|
13
|
+
execFile(file, args, { cwd: process.cwd() }, (err, stdout, stderr) => {
|
|
14
|
+
if (err) return reject({ err, stdout, stderr });
|
|
15
|
+
resolve({ stdout, stderr });
|
|
16
|
+
});
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
const ensureDir = async (d) => {
|
|
20
|
+
await fs.rm(d, { recursive: true, force: true }).catch(() => {});
|
|
21
|
+
await fs.mkdir(d, { recursive: true });
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
test("integration CLI single file", async (t) => {
|
|
25
|
+
await ensureDir(tmpDir);
|
|
26
|
+
await ensureDir(fixturesDir);
|
|
27
|
+
|
|
28
|
+
const inputImage = path.join(fixturesDir, "test.png");
|
|
29
|
+
const outDir = path.join(tmpDir, "output");
|
|
30
|
+
|
|
31
|
+
// create a tiny 200x200 red PNG
|
|
32
|
+
await sharp({
|
|
33
|
+
create: {
|
|
34
|
+
width: 200,
|
|
35
|
+
height: 200,
|
|
36
|
+
channels: 3,
|
|
37
|
+
background: { r: 255, g: 0, b: 0 },
|
|
38
|
+
},
|
|
39
|
+
})
|
|
40
|
+
.png()
|
|
41
|
+
.toFile(inputImage);
|
|
42
|
+
|
|
43
|
+
const nodeBin = process.execPath;
|
|
44
|
+
const cliArgs = [
|
|
45
|
+
"index.js",
|
|
46
|
+
inputImage,
|
|
47
|
+
"-s",
|
|
48
|
+
"100",
|
|
49
|
+
"-t",
|
|
50
|
+
"jpeg",
|
|
51
|
+
"-o",
|
|
52
|
+
outDir,
|
|
53
|
+
"-c",
|
|
54
|
+
];
|
|
55
|
+
|
|
56
|
+
const { stdout } = await run(nodeBin, cliArgs);
|
|
57
|
+
assert.match(stdout, /Everything done/);
|
|
58
|
+
|
|
59
|
+
const expectedFile = path.join(outDir, "test-100.jpeg");
|
|
60
|
+
const stat = await fs.stat(expectedFile);
|
|
61
|
+
assert.ok(stat.isFile());
|
|
62
|
+
|
|
63
|
+
// cleanup
|
|
64
|
+
await fs.rm(tmpDir, { recursive: true, force: true }).catch(() => {});
|
|
65
|
+
await fs.rm(fixturesDir, { recursive: true, force: true }).catch(() => {});
|
|
66
|
+
});
|
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
import test from "node:test";
|
|
2
|
+
import assert from "node:assert";
|
|
3
|
+
import fs from "fs/promises";
|
|
4
|
+
import path from "path";
|
|
5
|
+
import sharp from "sharp";
|
|
6
|
+
import { processPath } from "../lib/processor.js";
|
|
7
|
+
|
|
8
|
+
// Test each supported format individually
|
|
9
|
+
const supportedFormats = ["avif", "webp", "jpeg", "jpg", "png"];
|
|
10
|
+
|
|
11
|
+
for (const format of supportedFormats) {
|
|
12
|
+
test(`processPath supports ${format} format`, async () => {
|
|
13
|
+
const testDir = path.join(process.cwd(), `test-fixtures-format-${format}`);
|
|
14
|
+
const inputFile = path.join(testDir, "test-image.jpg");
|
|
15
|
+
const outputDir = path.join(testDir, "output");
|
|
16
|
+
|
|
17
|
+
try {
|
|
18
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
19
|
+
|
|
20
|
+
// Create test image
|
|
21
|
+
await sharp({
|
|
22
|
+
create: {
|
|
23
|
+
width: 80,
|
|
24
|
+
height: 80,
|
|
25
|
+
channels: 3,
|
|
26
|
+
background: { r: 200, g: 100, b: 50 },
|
|
27
|
+
},
|
|
28
|
+
})
|
|
29
|
+
.jpeg()
|
|
30
|
+
.toFile(inputFile);
|
|
31
|
+
|
|
32
|
+
// Process with specific format
|
|
33
|
+
const result = await processPath(inputFile, {
|
|
34
|
+
sizes: [40],
|
|
35
|
+
filetypes: format,
|
|
36
|
+
outputdir: outputDir,
|
|
37
|
+
clear: true,
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
// Verify output
|
|
41
|
+
assert.ok(result.output, "result should have output array");
|
|
42
|
+
assert.strictEqual(
|
|
43
|
+
result.output.length,
|
|
44
|
+
1,
|
|
45
|
+
`should have 1 output for ${format}`
|
|
46
|
+
);
|
|
47
|
+
assert.strictEqual(
|
|
48
|
+
result.output[0].ext,
|
|
49
|
+
format,
|
|
50
|
+
`output extension should be ${format}`
|
|
51
|
+
);
|
|
52
|
+
|
|
53
|
+
// Verify file exists
|
|
54
|
+
const outputFiles = await fs.readdir(outputDir);
|
|
55
|
+
assert.strictEqual(
|
|
56
|
+
outputFiles.length,
|
|
57
|
+
1,
|
|
58
|
+
"output directory should contain 1 file"
|
|
59
|
+
);
|
|
60
|
+
|
|
61
|
+
const expectedExtension = format;
|
|
62
|
+
assert.ok(
|
|
63
|
+
outputFiles[0].endsWith(`.${expectedExtension}`),
|
|
64
|
+
`file should have ${expectedExtension} extension`
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
// Verify file is valid by reading metadata
|
|
68
|
+
const outputFile = path.join(outputDir, outputFiles[0]);
|
|
69
|
+
const metadata = await sharp(outputFile).metadata();
|
|
70
|
+
assert.ok(metadata, "should be able to read output file metadata");
|
|
71
|
+
assert.strictEqual(metadata.width, 40, "width should be 40px");
|
|
72
|
+
} finally {
|
|
73
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
test("processPath supports multiple formats at once", async () => {
|
|
79
|
+
const testDir = path.join(process.cwd(), "test-fixtures-multi-format");
|
|
80
|
+
const inputFile = path.join(testDir, "test-image.png");
|
|
81
|
+
const outputDir = path.join(testDir, "output");
|
|
82
|
+
|
|
83
|
+
try {
|
|
84
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
85
|
+
|
|
86
|
+
// Create test image
|
|
87
|
+
await sharp({
|
|
88
|
+
create: {
|
|
89
|
+
width: 100,
|
|
90
|
+
height: 100,
|
|
91
|
+
channels: 3,
|
|
92
|
+
background: { r: 50, g: 150, b: 250 },
|
|
93
|
+
},
|
|
94
|
+
})
|
|
95
|
+
.png()
|
|
96
|
+
.toFile(inputFile);
|
|
97
|
+
|
|
98
|
+
// Process with all formats
|
|
99
|
+
const result = await processPath(inputFile, {
|
|
100
|
+
sizes: [50],
|
|
101
|
+
filetypes: "avif,webp,jpeg,png",
|
|
102
|
+
outputdir: outputDir,
|
|
103
|
+
clear: true,
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
// Verify output
|
|
107
|
+
assert.ok(result.output, "result should have output array");
|
|
108
|
+
assert.strictEqual(
|
|
109
|
+
result.output.length,
|
|
110
|
+
4,
|
|
111
|
+
"should have 4 outputs (4 formats)"
|
|
112
|
+
);
|
|
113
|
+
|
|
114
|
+
// Verify all formats are present
|
|
115
|
+
const extensions = result.output.map((o) => o.ext);
|
|
116
|
+
assert.ok(extensions.includes("avif"), "should include avif");
|
|
117
|
+
assert.ok(extensions.includes("webp"), "should include webp");
|
|
118
|
+
assert.ok(extensions.includes("jpeg"), "should include jpeg");
|
|
119
|
+
assert.ok(extensions.includes("png"), "should include png");
|
|
120
|
+
|
|
121
|
+
// Verify files exist
|
|
122
|
+
const outputFiles = await fs.readdir(outputDir);
|
|
123
|
+
assert.strictEqual(
|
|
124
|
+
outputFiles.length,
|
|
125
|
+
4,
|
|
126
|
+
"output directory should contain 4 files"
|
|
127
|
+
);
|
|
128
|
+
|
|
129
|
+
// Verify each file is valid
|
|
130
|
+
for (const file of outputFiles) {
|
|
131
|
+
const filePath = path.join(outputDir, file);
|
|
132
|
+
const metadata = await sharp(filePath).metadata();
|
|
133
|
+
assert.ok(metadata, `should be able to read ${file} metadata`);
|
|
134
|
+
assert.strictEqual(metadata.width, 50, `${file} width should be 50px`);
|
|
135
|
+
}
|
|
136
|
+
} finally {
|
|
137
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
test("processPath handles jpg and jpeg equivalently", async () => {
|
|
142
|
+
const testDir = path.join(process.cwd(), "test-fixtures-jpg-jpeg");
|
|
143
|
+
const inputFile = path.join(testDir, "test-image.png");
|
|
144
|
+
const outputDir = path.join(testDir, "output");
|
|
145
|
+
|
|
146
|
+
try {
|
|
147
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
148
|
+
|
|
149
|
+
// Create test image
|
|
150
|
+
await sharp({
|
|
151
|
+
create: {
|
|
152
|
+
width: 60,
|
|
153
|
+
height: 60,
|
|
154
|
+
channels: 3,
|
|
155
|
+
background: { r: 128, g: 128, b: 128 },
|
|
156
|
+
},
|
|
157
|
+
})
|
|
158
|
+
.png()
|
|
159
|
+
.toFile(inputFile);
|
|
160
|
+
|
|
161
|
+
// Process with both jpg and jpeg
|
|
162
|
+
const result = await processPath(inputFile, {
|
|
163
|
+
sizes: [30],
|
|
164
|
+
filetypes: "jpg,jpeg",
|
|
165
|
+
outputdir: outputDir,
|
|
166
|
+
clear: true,
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
// Both should produce valid output
|
|
170
|
+
assert.strictEqual(
|
|
171
|
+
result.output.length,
|
|
172
|
+
2,
|
|
173
|
+
"should have 2 outputs (jpg and jpeg)"
|
|
174
|
+
);
|
|
175
|
+
|
|
176
|
+
// Verify files exist and are valid
|
|
177
|
+
const outputFiles = await fs.readdir(outputDir);
|
|
178
|
+
assert.strictEqual(outputFiles.length, 2, "should have 2 output files");
|
|
179
|
+
|
|
180
|
+
for (const file of outputFiles) {
|
|
181
|
+
const filePath = path.join(outputDir, file);
|
|
182
|
+
const metadata = await sharp(filePath).metadata();
|
|
183
|
+
assert.ok(metadata, `should be able to read ${file}`);
|
|
184
|
+
// Both jpg and jpeg files are handled as JPEG by Sharp
|
|
185
|
+
assert.ok(
|
|
186
|
+
metadata.format === "jpeg" || metadata.format === "jpg",
|
|
187
|
+
`${file} should be JPEG format`
|
|
188
|
+
);
|
|
189
|
+
}
|
|
190
|
+
} finally {
|
|
191
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
192
|
+
}
|
|
193
|
+
});
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import test from "node:test";
|
|
2
|
+
import assert from "node:assert";
|
|
3
|
+
import fs from "fs/promises";
|
|
4
|
+
import path from "path";
|
|
5
|
+
import sharp from "sharp";
|
|
6
|
+
import { processPath } from "../lib/processor.js";
|
|
7
|
+
|
|
8
|
+
test("processPath can be imported and used programmatically", async () => {
|
|
9
|
+
const testDir = path.join(process.cwd(), "test-fixtures-processor");
|
|
10
|
+
const inputFile = path.join(testDir, "test-image.jpg");
|
|
11
|
+
const outputDir = path.join(testDir, "output");
|
|
12
|
+
|
|
13
|
+
try {
|
|
14
|
+
// Setup: create test fixture
|
|
15
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
16
|
+
await sharp({
|
|
17
|
+
create: {
|
|
18
|
+
width: 100,
|
|
19
|
+
height: 100,
|
|
20
|
+
channels: 3,
|
|
21
|
+
background: { r: 255, g: 0, b: 0 },
|
|
22
|
+
},
|
|
23
|
+
})
|
|
24
|
+
.jpeg()
|
|
25
|
+
.toFile(inputFile);
|
|
26
|
+
|
|
27
|
+
// Use the exported API
|
|
28
|
+
const result = await processPath(inputFile, {
|
|
29
|
+
sizes: [50, 75],
|
|
30
|
+
filetypes: "webp,avif",
|
|
31
|
+
outputdir: outputDir,
|
|
32
|
+
clear: true,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// Verify output
|
|
36
|
+
assert.ok(result.output, "result should have output array");
|
|
37
|
+
assert.ok(result.srcset, "result should have srcset string");
|
|
38
|
+
assert.strictEqual(
|
|
39
|
+
result.output.length,
|
|
40
|
+
4,
|
|
41
|
+
"should have 4 outputs (2 sizes × 2 formats)"
|
|
42
|
+
);
|
|
43
|
+
|
|
44
|
+
// Verify files exist
|
|
45
|
+
const outputFiles = await fs.readdir(outputDir);
|
|
46
|
+
assert.strictEqual(
|
|
47
|
+
outputFiles.length,
|
|
48
|
+
4,
|
|
49
|
+
"output directory should contain 4 files"
|
|
50
|
+
);
|
|
51
|
+
assert.ok(
|
|
52
|
+
outputFiles.some((f) => f.includes("-50.webp")),
|
|
53
|
+
"should have 50px webp file"
|
|
54
|
+
);
|
|
55
|
+
assert.ok(
|
|
56
|
+
outputFiles.some((f) => f.includes("-75.avif")),
|
|
57
|
+
"should have 75px avif file"
|
|
58
|
+
);
|
|
59
|
+
} finally {
|
|
60
|
+
// Cleanup
|
|
61
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
test("processPath throws error for unknown filetypes", async () => {
|
|
66
|
+
const testDir = path.join(process.cwd(), "test-fixtures-processor-error");
|
|
67
|
+
const inputFile = path.join(testDir, "test-image.jpg");
|
|
68
|
+
|
|
69
|
+
try {
|
|
70
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
71
|
+
await sharp({
|
|
72
|
+
create: {
|
|
73
|
+
width: 50,
|
|
74
|
+
height: 50,
|
|
75
|
+
channels: 3,
|
|
76
|
+
background: { r: 0, g: 255, b: 0 },
|
|
77
|
+
},
|
|
78
|
+
})
|
|
79
|
+
.jpeg()
|
|
80
|
+
.toFile(inputFile);
|
|
81
|
+
|
|
82
|
+
// Should throw for unknown filetype
|
|
83
|
+
await assert.rejects(
|
|
84
|
+
async () => {
|
|
85
|
+
await processPath(inputFile, {
|
|
86
|
+
sizes: [50],
|
|
87
|
+
filetypes: "unknownformat",
|
|
88
|
+
outputdir: path.join(testDir, "output"),
|
|
89
|
+
});
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
message: /Unknown filetypes/,
|
|
93
|
+
}
|
|
94
|
+
);
|
|
95
|
+
} finally {
|
|
96
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
test("processPath handles directory input", async () => {
|
|
101
|
+
const testDir = path.join(process.cwd(), "test-fixtures-processor-dir");
|
|
102
|
+
const outputDir = path.join(testDir, "output");
|
|
103
|
+
|
|
104
|
+
try {
|
|
105
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
106
|
+
|
|
107
|
+
// Create two test images
|
|
108
|
+
await sharp({
|
|
109
|
+
create: {
|
|
110
|
+
width: 80,
|
|
111
|
+
height: 80,
|
|
112
|
+
channels: 3,
|
|
113
|
+
background: { r: 100, g: 100, b: 255 },
|
|
114
|
+
},
|
|
115
|
+
})
|
|
116
|
+
.png()
|
|
117
|
+
.toFile(path.join(testDir, "image1.png"));
|
|
118
|
+
|
|
119
|
+
await sharp({
|
|
120
|
+
create: {
|
|
121
|
+
width: 90,
|
|
122
|
+
height: 90,
|
|
123
|
+
channels: 3,
|
|
124
|
+
background: { r: 255, g: 255, b: 0 },
|
|
125
|
+
},
|
|
126
|
+
})
|
|
127
|
+
.png()
|
|
128
|
+
.toFile(path.join(testDir, "image2.png"));
|
|
129
|
+
|
|
130
|
+
const result = await processPath(testDir, {
|
|
131
|
+
sizes: [40],
|
|
132
|
+
filetypes: "webp",
|
|
133
|
+
outputdir: outputDir,
|
|
134
|
+
clear: true,
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
// Should process both images
|
|
138
|
+
assert.strictEqual(result.output.length, 2, "should process 2 images");
|
|
139
|
+
assert.ok(result.srcset, "should generate srcset");
|
|
140
|
+
|
|
141
|
+
const outputFiles = await fs.readdir(outputDir);
|
|
142
|
+
assert.strictEqual(outputFiles.length, 2, "should have 2 output files");
|
|
143
|
+
} finally {
|
|
144
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
test("processPath without sizes keeps original dimensions", async () => {
|
|
149
|
+
const testDir = path.join(process.cwd(), "test-fixtures-processor-no-resize");
|
|
150
|
+
const inputFile = path.join(testDir, "test-image.jpg");
|
|
151
|
+
const outputDir = path.join(testDir, "output");
|
|
152
|
+
|
|
153
|
+
try {
|
|
154
|
+
await fs.mkdir(testDir, { recursive: true });
|
|
155
|
+
|
|
156
|
+
// Create 120x120 test image
|
|
157
|
+
await sharp({
|
|
158
|
+
create: {
|
|
159
|
+
width: 120,
|
|
160
|
+
height: 120,
|
|
161
|
+
channels: 3,
|
|
162
|
+
background: { r: 255, g: 100, b: 50 },
|
|
163
|
+
},
|
|
164
|
+
})
|
|
165
|
+
.jpeg()
|
|
166
|
+
.toFile(inputFile);
|
|
167
|
+
|
|
168
|
+
// Process without sizes
|
|
169
|
+
const result = await processPath(inputFile, {
|
|
170
|
+
sizes: null,
|
|
171
|
+
filetypes: "webp,avif",
|
|
172
|
+
outputdir: outputDir,
|
|
173
|
+
clear: true,
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
// Verify output
|
|
177
|
+
assert.ok(result.output, "result should have output array");
|
|
178
|
+
assert.strictEqual(
|
|
179
|
+
result.output.length,
|
|
180
|
+
2,
|
|
181
|
+
"should have 2 outputs (2 formats, no resize)"
|
|
182
|
+
);
|
|
183
|
+
|
|
184
|
+
// Should not generate srcset when no sizes specified
|
|
185
|
+
assert.strictEqual(
|
|
186
|
+
result.srcset,
|
|
187
|
+
undefined,
|
|
188
|
+
"should not have srcset when no sizes"
|
|
189
|
+
);
|
|
190
|
+
|
|
191
|
+
// Verify files exist with original dimensions
|
|
192
|
+
const outputFiles = await fs.readdir(outputDir);
|
|
193
|
+
assert.strictEqual(outputFiles.length, 2, "should have 2 output files");
|
|
194
|
+
|
|
195
|
+
// Check files don't have size suffix
|
|
196
|
+
assert.ok(
|
|
197
|
+
outputFiles.some((f) => f === "test-image.webp"),
|
|
198
|
+
"should have webp without size suffix"
|
|
199
|
+
);
|
|
200
|
+
assert.ok(
|
|
201
|
+
outputFiles.some((f) => f === "test-image.avif"),
|
|
202
|
+
"should have avif without size suffix"
|
|
203
|
+
);
|
|
204
|
+
|
|
205
|
+
// Verify dimensions are preserved
|
|
206
|
+
const webpPath = path.join(outputDir, "test-image.webp");
|
|
207
|
+
const metadata = await sharp(webpPath).metadata();
|
|
208
|
+
assert.strictEqual(metadata.width, 120, "width should be preserved");
|
|
209
|
+
assert.strictEqual(metadata.height, 120, "height should be preserved");
|
|
210
|
+
} finally {
|
|
211
|
+
await fs.rm(testDir, { recursive: true, force: true });
|
|
212
|
+
}
|
|
213
|
+
});
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import test from "node:test";
|
|
2
|
+
import assert from "node:assert/strict";
|
|
3
|
+
import { srcsetGenerator } from "../utils.js";
|
|
4
|
+
|
|
5
|
+
test("srcsetGenerator groups by extension and formats srcset correctly", () => {
|
|
6
|
+
const files = [
|
|
7
|
+
{ file: "image-300.avif", ext: "avif", size: 300 },
|
|
8
|
+
{ file: "image-600.avif", ext: "avif", size: 600 },
|
|
9
|
+
{ file: "image-300.jpeg", ext: "jpeg", size: 300 },
|
|
10
|
+
];
|
|
11
|
+
|
|
12
|
+
const out = srcsetGenerator(files);
|
|
13
|
+
|
|
14
|
+
// Expect two srcset blocks separated by blank line
|
|
15
|
+
assert.ok(out.includes('srcset="image-300.avif 300w,\nimage-600.avif 600w"'));
|
|
16
|
+
assert.ok(out.includes('srcset="image-300.jpeg 300w"'));
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
test("srcsetGenerator returns empty string for empty input", () => {
|
|
20
|
+
const out = srcsetGenerator([]);
|
|
21
|
+
assert.strictEqual(out, "");
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
test("srcsetGenerator skips entries missing ext or size gracefully", () => {
|
|
25
|
+
const files = [
|
|
26
|
+
{ file: "ok-300.webp", ext: "webp", size: 300 },
|
|
27
|
+
{ file: "bad-noext", size: 200 },
|
|
28
|
+
{ file: "bad-nosize.webp", ext: "webp" },
|
|
29
|
+
];
|
|
30
|
+
|
|
31
|
+
const out = srcsetGenerator(files);
|
|
32
|
+
// Should include only the valid entry
|
|
33
|
+
assert.ok(out.includes('srcset="ok-300.webp 300w"'));
|
|
34
|
+
// Ensure entries without ext/size do not crash the function
|
|
35
|
+
assert.ok(!out.includes("bad-noext"));
|
|
36
|
+
assert.ok(!out.includes("bad-nosize"));
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
test("srcsetGenerator preserves input ordering within each extension group", () => {
|
|
40
|
+
const files = [
|
|
41
|
+
{ file: "first-100.jpg", ext: "jpg", size: 100 },
|
|
42
|
+
{ file: "second-200.jpg", ext: "jpg", size: 200 },
|
|
43
|
+
];
|
|
44
|
+
|
|
45
|
+
const out = srcsetGenerator(files);
|
|
46
|
+
const idxFirst = out.indexOf("first-100.jpg");
|
|
47
|
+
const idxSecond = out.indexOf("second-200.jpg");
|
|
48
|
+
assert.ok(idxFirst < idxSecond, "ordering should preserve insertion order");
|
|
49
|
+
});
|
package/utils.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
const srcsetGenerator = (files) => {
|
|
2
|
+
if (!Array.isArray(files) || files.length === 0) return "";
|
|
3
|
+
|
|
4
|
+
// keep only valid entries with ext, file and numeric size
|
|
5
|
+
const valid = files.filter(
|
|
6
|
+
(f) => f && f.ext && f.file && Number.isFinite(Number(f.size))
|
|
7
|
+
);
|
|
8
|
+
|
|
9
|
+
if (valid.length === 0) return "";
|
|
10
|
+
|
|
11
|
+
// group files by extension while preserving input order
|
|
12
|
+
const groupedFiles = valid.reduce((acc, file) => {
|
|
13
|
+
const ext = file.ext;
|
|
14
|
+
if (!acc[ext]) acc[ext] = [];
|
|
15
|
+
acc[ext].push(file);
|
|
16
|
+
return acc;
|
|
17
|
+
}, {});
|
|
18
|
+
|
|
19
|
+
// generate srcset for each extension
|
|
20
|
+
const srcsets = Object.entries(groupedFiles).map(
|
|
21
|
+
([ext, files]) =>
|
|
22
|
+
`srcset="${files
|
|
23
|
+
.map((file) => `${file.file} ${file.size}w`)
|
|
24
|
+
.join(",\n")}"`
|
|
25
|
+
);
|
|
26
|
+
|
|
27
|
+
return srcsets.join("\n\n");
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export { srcsetGenerator };
|