@inoo-ch/payload-image-optimizer 1.9.0 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/AGENT_DOCS.md CHANGED
@@ -98,14 +98,12 @@ collections: {
98
98
 
99
99
  When an image is uploaded to an optimized collection:
100
100
 
101
- 1. **`beforeChange` hook** (in-memory processing):
102
- - If `uniqueFileNames: true`: renames file to UUID (e.g., `photo.jpg` → `a1b2c3d4.jpg`)
103
- - Auto-rotates based on EXIF orientation
104
- - Resizes to fit within `maxDimensions`
105
- - Strips metadata (if enabled)
106
- - If `replaceOriginal: true`: converts to primary format (first in `formats` array), updates filename/mimeType
107
- - Generates ThumbHash (if enabled)
108
- - Sets `imageOptimizer.status = 'pending'`
101
+ 1. **`beforeChange` hook** (single-pass in-memory processing):
102
+ - If `generateFilename` / `uniqueFileNames`: renames file (e.g., `photo.jpg` → `a1b2c3d4.jpg`)
103
+ - Single sharp pipeline: resizes to `maxDimensions`, strips metadata, and optionally converts to primary format — all in one decode/encode cycle
104
+ - Skips redundant `.rotate()` Payload's `generateFileData()` already auto-rotated before hooks run
105
+ - If no async job is needed: generates ThumbHash synchronously (included in initial DB write)
106
+ - Sets `imageOptimizer.status` to `'pending'` (async job) or `'complete'` (no job needed)
109
107
 
110
108
  2. **`afterChange` hook** (disk + async):
111
109
  - Writes processed buffer to disk (overwriting Payload's original)
@@ -115,7 +113,8 @@ When an image is uploaded to an optimized collection:
115
113
  3. **Background job** (`imageOptimizer_convertFormats`):
116
114
  - Generates variant files for any additional formats (e.g., AVIF)
117
115
  - Writes variants to disk with `-optimized` suffix
118
- - Updates document: `imageOptimizer.status = 'complete'`, populates `variants` array
116
+ - Generates ThumbHash (deferred from the sync save path to avoid blocking uploads)
117
+ - Updates document: `imageOptimizer.status = 'complete'`, populates `variants` array and `thumbHash`
119
118
 
120
119
  ### File Naming
121
120
 
package/README.md CHANGED
@@ -83,7 +83,7 @@ imageOptimizer({
83
83
  // Global defaults (overridden by per-collection config)
84
84
  formats: [
85
85
  { format: 'webp', quality: 80 },
86
- { format: 'avif', quality: 65 },
86
+ // { format: 'avif', quality: 65 }, // opt-in — AVIF is ~5-10x slower to encode than WebP
87
87
  ],
88
88
  maxDimensions: { width: 2560, height: 2560 },
89
89
  generateThumbHash: true,
@@ -152,16 +152,16 @@ imageOptimizer({
152
152
  ## How It Works
153
153
 
154
154
  1. **Upload** — An image is uploaded to a configured collection
155
- 2. **Pre-process** — The `beforeChange` hook strips metadata, resizes the image, and generates a ThumbHash
155
+ 2. **Pre-process** — A single-pass sharp pipeline strips metadata, resizes, and optionally converts format — all in one operation
156
156
  3. **Save** — Payload writes the optimized image to disk
157
- 4. **Convert** — A background job converts the image to WebP/AVIF variants asynchronously
158
- 5. **Done** — The document is updated with variant URLs, file sizes, and optimization status
157
+ 4. **Convert** — A background job converts the image to additional format variants (e.g. AVIF) and generates the ThumbHash asynchronously
158
+ 5. **Done** — The document is updated with variant URLs, file sizes, ThumbHash, and optimization status
159
159
 
160
- All format conversion runs as async background jobs, so uploads return immediately.
160
+ Format conversion and ThumbHash generation run as async background jobs, so uploads return immediately.
161
161
 
162
162
  ### Vercel / Serverless Deployment
163
163
 
164
- Image processing (especially AVIF encoding, ThumbHash generation, and metadata stripping) can exceed the default serverless function timeout. The plugin exports a recommended `maxDuration` that you can re-export from your Payload API route:
164
+ Image processing (especially AVIF encoding and metadata stripping) can exceed the default serverless function timeout. The plugin exports a recommended `maxDuration` that you can re-export from your Payload API route:
165
165
 
166
166
  ```ts
167
167
  // src/app/(payload)/api/[...slug]/route.ts
@@ -212,7 +212,7 @@ vercelBlobStorage({
212
212
 
213
213
  ## How It Differs from Payload's Default Image Handling
214
214
 
215
- Payload CMS ships with [sharp](https://sharp.pixelplumbing.com/) built-in and can resize images and generate sizes on upload. This plugin **does not double-process your images** — it intercepts the raw upload in a `beforeChange` hook *before* Payload's own sharp pipeline runs, and writes the optimized buffer back to `req.file.data`. When Payload's built-in `uploadFiles` step kicks in to generate your configured sizes, it works from the already-optimized file, not the raw original.
215
+ Payload CMS ships with [sharp](https://sharp.pixelplumbing.com/) built-in and can resize images and generate sizes on upload. This plugin optimizes the uploaded image in a `beforeChange` hook and writes the result back to `req.file.data`. Payload's `generateFileData` runs before hooks and handles `imageSizes` generation using `Promise.all`, so the plugin focuses on what Payload doesn't do natively: format conversion (WebP/AVIF), metadata stripping, and ThumbHash generation. Using `clientOptimization: true` (the default) is the most effective way to speed up uploads with many `imageSizes`, since it reduces the source image before Payload processes it.
216
216
 
217
217
  ### Comparison
218
218
 
@@ -229,9 +229,10 @@ Payload CMS ships with [sharp](https://sharp.pixelplumbing.com/) built-in and ca
229
229
 
230
230
  ### CPU & Resource Impact
231
231
 
232
+ - **Single-pass pipeline** — Metadata stripping, resizing, and format conversion run in a single sharp pipeline (one decode/encode cycle), minimizing processing overhead.
233
+ - **Deferred ThumbHash** — ThumbHash generation runs in the background (via the format conversion job or `waitUntil`) rather than blocking the upload response.
232
234
  - **Single-format mode** (e.g. WebP only with `replaceOriginal: true`) adds virtually zero overhead compared to Payload's default sharp processing — the plugin replaces the sharp pass rather than adding a second one.
233
- - **Additional format variants** (e.g. both WebP and AVIF) run as background jobs after upload — this is the one area where you'll see extra CPU usage vs vanilla Payload.
234
- - **ThumbHash generation** processes a 100×100px thumbnail — negligible impact.
235
+ - **Additional format variants** (e.g. both WebP and AVIF) run as background jobs after upload — this is the one area where you'll see extra CPU usage vs vanilla Payload. Note that AVIF encoding is ~5-10x slower than WebP.
235
236
  - **Bulk regeneration** processes images sequentially, not all at once, so it won't spike your server.
236
237
 
237
238
  If you're on a resource-constrained server, use single-format mode and you'll be at roughly the same CPU cost as stock Payload.
@@ -6,6 +6,9 @@ import { waitUntil } from '../utilities/waitUntil.js';
6
6
  export const createAfterChangeHook = (resolvedConfig, collectionSlug)=>{
7
7
  return async ({ context, doc, req })=>{
8
8
  if (context?.imageOptimizer_skip) return doc;
9
+ // Native re-uploads (focal point/crop changes): optimization was skipped in beforeChange.
10
+ // Payload's native image-size regeneration handles everything.
11
+ if (context?.imageOptimizer_nativeReupload) return doc;
9
12
  // Use context flag from beforeChange instead of checking req.file.data directly.
10
13
  // Cloud storage adapters may consume req.file.data in their own afterChange hook
11
14
  // before ours runs, which would cause this guard to bail out and leave status as 'pending'.
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/hooks/afterChange.ts"],"sourcesContent":["import fs from 'fs/promises'\nimport path from 'path'\nimport type { CollectionAfterChangeHook } from 'payload'\n\nimport type { ResolvedImageOptimizerConfig } from '../types.js'\nimport { resolveStaticDir } from '../utilities/resolveStaticDir.js'\nimport { isCloudStorage } from '../utilities/storage.js'\nimport { waitUntil } from '../utilities/waitUntil.js'\n\nexport const createAfterChangeHook = (\n resolvedConfig: ResolvedImageOptimizerConfig,\n collectionSlug: string,\n): CollectionAfterChangeHook => {\n return async ({ context, doc, req }) => {\n if (context?.imageOptimizer_skip) return doc\n\n // Use context flag from beforeChange instead of checking req.file.data directly.\n // Cloud storage adapters may consume req.file.data in their own afterChange hook\n // before ours runs, which would cause this guard to bail out and leave status as 'pending'.\n if (!context?.imageOptimizer_hasUpload) return doc\n\n const collectionConfig = req.payload.collections[collectionSlug as keyof typeof req.payload.collections].config\n const cloudStorage = isCloudStorage(collectionConfig)\n\n // When using local storage, overwrite the file on disk with the processed buffer.\n // Payload's uploadFiles step writes the original buffer; we replace it here.\n // When using cloud storage, skip — the cloud adapter's afterChange hook already\n // uploads the correct buffer from req.file.data (set in our beforeChange hook).\n if (!cloudStorage) {\n const staticDir = resolveStaticDir(collectionConfig)\n const processedBuffer = context.imageOptimizer_processedBuffer as Buffer | undefined\n if (processedBuffer && doc.filename && staticDir) {\n const safeFilename = path.basename(doc.filename as string)\n const filePath = path.join(staticDir, safeFilename)\n await fs.writeFile(filePath, processedBuffer)\n\n // If replaceOriginal changed the filename, clean up the old file Payload wrote\n const originalFilename = context.imageOptimizer_originalFilename as string | undefined\n if (originalFilename && originalFilename !== safeFilename) {\n const oldFilePath = path.join(staticDir, path.basename(originalFilename))\n await fs.unlink(oldFilePath).catch(() => {\n // Old file may not exist if Payload used the new filename\n })\n }\n }\n }\n\n // When status was already resolved in beforeChange (cloud storage, or\n // replaceOriginal with a single format), no async job or update is needed.\n // This avoids a separate update() call that fails with 404 on MongoDB due to\n // transaction isolation when cloud storage adapters are involved.\n if (context?.imageOptimizer_statusResolved) {\n return doc\n }\n\n // Queue async format conversion job for remaining variants (local storage only)\n await req.payload.jobs.queue({\n task: 'imageOptimizer_convertFormats',\n input: {\n collectionSlug,\n docId: String(doc.id),\n },\n })\n\n const runPromise = req.payload.jobs.run({ sequential: true }).catch((err: unknown) => {\n req.payload.logger.error({ err }, 'Image optimizer job runner failed')\n })\n waitUntil(runPromise, req)\n\n return doc\n }\n}\n"],"names":["fs","path","resolveStaticDir","isCloudStorage","waitUntil","createAfterChangeHook","resolvedConfig","collectionSlug","context","doc","req","imageOptimizer_skip","imageOptimizer_hasUpload","collectionConfig","payload","collections","config","cloudStorage","staticDir","processedBuffer","imageOptimizer_processedBuffer","filename","safeFilename","basename","filePath","join","writeFile","originalFilename","imageOptimizer_originalFilename","oldFilePath","unlink","catch","imageOptimizer_statusResolved","jobs","queue","task","input","docId","String","id","runPromise","run","sequential","err","logger","error"],"mappings":"AAAA,OAAOA,QAAQ,cAAa;AAC5B,OAAOC,UAAU,OAAM;AAIvB,SAASC,gBAAgB,QAAQ,mCAAkC;AACnE,SAASC,cAAc,QAAQ,0BAAyB;AACxD,SAASC,SAAS,QAAQ,4BAA2B;AAErD,OAAO,MAAMC,wBAAwB,CACnCC,gBACAC;IAEA,OAAO,OAAO,EAAEC,OAAO,EAAEC,GAAG,EAAEC,GAAG,EAAE;QACjC,IAAIF,SAASG,qBAAqB,OAAOF;QAEzC,iFAAiF;QACjF,iFAAiF;QACjF,4FAA4F;QAC5F,IAAI,CAACD,SAASI,0BAA0B,OAAOH;QAE/C,MAAMI,mBAAmBH,IAAII,OAAO,CAACC,WAAW,CAACR,eAAuD,CAACS,MAAM;QAC/G,MAAMC,eAAed,eAAeU;QAEpC,kFAAkF;QAClF,6EAA6E;QAC7E,gFAAgF;QAChF,gFAAgF;QAChF,IAAI,CAACI,cAAc;YACjB,MAAMC,YAAYhB,iBAAiBW;YACnC,MAAMM,kBAAkBX,QAAQY,8BAA8B;YAC9D,IAAID,mBAAmBV,IAAIY,QAAQ,IAAIH,WAAW;gBAChD,MAAMI,eAAerB,KAAKsB,QAAQ,CAACd,IAAIY,QAAQ;gBAC/C,MAAMG,WAAWvB,KAAKwB,IAAI,CAACP,WAAWI;gBACtC,MAAMtB,GAAG0B,SAAS,CAACF,UAAUL;gBAE7B,+EAA+E;gBAC/E,MAAMQ,mBAAmBnB,QAAQoB,+BAA+B;gBAChE,IAAID,oBAAoBA,qBAAqBL,cAAc;oBACzD,MAAMO,cAAc5B,KAAKwB,IAAI,CAACP,WAAWjB,KAAKsB,QAAQ,CAACI;oBACvD,MAAM3B,GAAG8B,MAAM,CAACD,aAAaE,KAAK,CAAC;oBACjC,0DAA0D;oBAC5D;gBACF;YACF;QACF;QAEA,sEAAsE;QACtE,2EAA2E;QAC3E,6EAA6E;QAC7E,kEAAkE;QAClE,IAAIvB,SAASwB,+BAA+B;YAC1C,OAAOvB;QACT;QAEA,gFAAgF;QAChF,MAAMC,IAAII,OAAO,CAACmB,IAAI,CAACC,KAAK,CAAC;YAC3BC,MAAM;YACNC,OAAO;gBACL7B;gBACA8B,OAAOC,OAAO7B,IAAI8B,EAAE;YACtB;QACF;QAEA,MAAMC,aAAa9B,IAAII,OAAO,CAACmB,IAAI,CAACQ,GAAG,CAAC;YAAEC,YAAY;QAAK,GAAGX,KAAK,CAAC,CAACY;YACnEjC,IAAII,OAAO,CAAC8B,MAAM,CAACC,KAAK,CAAC;gBAAEF;YAAI,GAAG;QACpC;QACAvC,UAAUoC,YAAY9B;QAEtB,OAAOD;IACT;AACF,EAAC"}
1
+ {"version":3,"sources":["../../src/hooks/afterChange.ts"],"sourcesContent":["import fs from 'fs/promises'\nimport path from 'path'\nimport type { CollectionAfterChangeHook } from 'payload'\n\nimport type { ResolvedImageOptimizerConfig } from '../types.js'\nimport { resolveStaticDir } from '../utilities/resolveStaticDir.js'\nimport { isCloudStorage } from '../utilities/storage.js'\nimport { waitUntil } from '../utilities/waitUntil.js'\n\nexport const createAfterChangeHook = (\n resolvedConfig: ResolvedImageOptimizerConfig,\n collectionSlug: string,\n): CollectionAfterChangeHook => {\n return async ({ context, doc, req }) => {\n if (context?.imageOptimizer_skip) return doc\n\n // Native re-uploads (focal point/crop changes): optimization was skipped in beforeChange.\n // Payload's native image-size regeneration handles everything.\n if (context?.imageOptimizer_nativeReupload) return doc\n\n // Use context flag from beforeChange instead of checking req.file.data directly.\n // Cloud storage adapters may consume req.file.data in their own afterChange hook\n // before ours runs, which would cause this guard to bail out and leave status as 'pending'.\n if (!context?.imageOptimizer_hasUpload) return doc\n\n const collectionConfig = req.payload.collections[collectionSlug as keyof typeof req.payload.collections].config\n const cloudStorage = isCloudStorage(collectionConfig)\n\n // When using local storage, overwrite the file on disk with the processed buffer.\n // Payload's uploadFiles step writes the original buffer; we replace it here.\n // When using cloud storage, skip — the cloud adapter's afterChange hook already\n // uploads the correct buffer from req.file.data (set in our beforeChange hook).\n if (!cloudStorage) {\n const staticDir = resolveStaticDir(collectionConfig)\n const processedBuffer = context.imageOptimizer_processedBuffer as Buffer | undefined\n if (processedBuffer && doc.filename && staticDir) {\n const safeFilename = path.basename(doc.filename as string)\n const filePath = path.join(staticDir, safeFilename)\n await fs.writeFile(filePath, processedBuffer)\n\n // If replaceOriginal changed the filename, clean up the old file Payload wrote\n const originalFilename = context.imageOptimizer_originalFilename as string | undefined\n if (originalFilename && originalFilename !== safeFilename) {\n const oldFilePath = path.join(staticDir, path.basename(originalFilename))\n await fs.unlink(oldFilePath).catch(() => {\n // Old file may not exist if Payload used the new filename\n })\n }\n }\n }\n\n // When status was already resolved in beforeChange (cloud storage, or\n // replaceOriginal with a single format), no async job or update is needed.\n // This avoids a separate update() call that fails with 404 on MongoDB due to\n // transaction isolation when cloud storage adapters are involved.\n if (context?.imageOptimizer_statusResolved) {\n return doc\n }\n\n // Queue async format conversion job for remaining variants (local storage only)\n await req.payload.jobs.queue({\n task: 'imageOptimizer_convertFormats',\n input: {\n collectionSlug,\n docId: String(doc.id),\n },\n })\n\n const runPromise = req.payload.jobs.run({ sequential: true }).catch((err: unknown) => {\n req.payload.logger.error({ err }, 'Image optimizer job runner failed')\n })\n waitUntil(runPromise, req)\n\n return doc\n }\n}\n"],"names":["fs","path","resolveStaticDir","isCloudStorage","waitUntil","createAfterChangeHook","resolvedConfig","collectionSlug","context","doc","req","imageOptimizer_skip","imageOptimizer_nativeReupload","imageOptimizer_hasUpload","collectionConfig","payload","collections","config","cloudStorage","staticDir","processedBuffer","imageOptimizer_processedBuffer","filename","safeFilename","basename","filePath","join","writeFile","originalFilename","imageOptimizer_originalFilename","oldFilePath","unlink","catch","imageOptimizer_statusResolved","jobs","queue","task","input","docId","String","id","runPromise","run","sequential","err","logger","error"],"mappings":"AAAA,OAAOA,QAAQ,cAAa;AAC5B,OAAOC,UAAU,OAAM;AAIvB,SAASC,gBAAgB,QAAQ,mCAAkC;AACnE,SAASC,cAAc,QAAQ,0BAAyB;AACxD,SAASC,SAAS,QAAQ,4BAA2B;AAErD,OAAO,MAAMC,wBAAwB,CACnCC,gBACAC;IAEA,OAAO,OAAO,EAAEC,OAAO,EAAEC,GAAG,EAAEC,GAAG,EAAE;QACjC,IAAIF,SAASG,qBAAqB,OAAOF;QAEzC,0FAA0F;QAC1F,+DAA+D;QAC/D,IAAID,SAASI,+BAA+B,OAAOH;QAEnD,iFAAiF;QACjF,iFAAiF;QACjF,4FAA4F;QAC5F,IAAI,CAACD,SAASK,0BAA0B,OAAOJ;QAE/C,MAAMK,mBAAmBJ,IAAIK,OAAO,CAACC,WAAW,CAACT,eAAuD,CAACU,MAAM;QAC/G,MAAMC,eAAef,eAAeW;QAEpC,kFAAkF;QAClF,6EAA6E;QAC7E,gFAAgF;QAChF,gFAAgF;QAChF,IAAI,CAACI,cAAc;YACjB,MAAMC,YAAYjB,iBAAiBY;YACnC,MAAMM,kBAAkBZ,QAAQa,8BAA8B;YAC9D,IAAID,mBAAmBX,IAAIa,QAAQ,IAAIH,WAAW;gBAChD,MAAMI,eAAetB,KAAKuB,QAAQ,CAACf,IAAIa,QAAQ;gBAC/C,MAAMG,WAAWxB,KAAKyB,IAAI,CAACP,WAAWI;gBACtC,MAAMvB,GAAG2B,SAAS,CAACF,UAAUL;gBAE7B,+EAA+E;gBAC/E,MAAMQ,mBAAmBpB,QAAQqB,+BAA+B;gBAChE,IAAID,oBAAoBA,qBAAqBL,cAAc;oBACzD,MAAMO,cAAc7B,KAAKyB,IAAI,CAACP,WAAWlB,KAAKuB,QAAQ,CAACI;oBACvD,MAAM5B,GAAG+B,MAAM,CAACD,aAAaE,KAAK,CAAC;oBACjC,0DAA0D;oBAC5D;gBACF;YACF;QACF;QAEA,sEAAsE;QACtE,2EAA2E;QAC3E,6EAA6E;QAC7E,kEAAkE;QAClE,IAAIxB,SAASyB,+BAA+B;YAC1C,OAAOxB;QACT;QAEA,gFAAgF;QAChF,MAAMC,IAAIK,OAAO,CAACmB,IAAI,CAACC,KAAK,CAAC;YAC3BC,MAAM;YACNC,OAAO;gBACL9B;gBACA+B,OAAOC,OAAO9B,IAAI+B,EAAE;YACtB;QACF;QAEA,MAAMC,aAAa/B,IAAIK,OAAO,CAACmB,IAAI,CAACQ,GAAG,CAAC;YAAEC,YAAY;QAAK,GAAGX,KAAK,CAAC,CAACY;YACnElC,IAAIK,OAAO,CAAC8B,MAAM,CAACC,KAAK,CAAC;gBAAEF;YAAI,GAAG;QACpC;QACAxC,UAAUqC,YAAY/B;QAEtB,OAAOD;IACT;AACF,EAAC"}
@@ -1,11 +1,28 @@
1
1
  import path from 'path';
2
2
  import { resolveCollectionConfig } from '../defaults.js';
3
- import { convertFormat, generateThumbHash, stripAndResize } from '../processing/index.js';
3
+ import { generateThumbHash, optimizeImage } from '../processing/index.js';
4
4
  import { isCloudStorage } from '../utilities/storage.js';
5
5
  export const createBeforeChangeHook = (resolvedConfig, collectionSlug)=>{
6
6
  return async ({ context, data, originalDoc, req })=>{
7
7
  if (context?.imageOptimizer_skip) return data;
8
8
  if (!req.file || !req.file.data || !req.file.mimetype?.startsWith('image/')) return data;
9
+ // Detect re-upload triggered by Payload's shouldReupload() — focal point or crop change.
10
+ // shouldReupload re-fetches the stored (already-optimized) file and sets req.file.
11
+ // When re-fetching, Payload sets req.file.name to the stored filename verbatim
12
+ // (via getFileByPath or getExternalFile). For genuine user uploads, req.file.name
13
+ // comes from the user's filesystem and will differ from the stored filename.
14
+ // Skip redundant optimization; let Payload's native image-size regeneration handle cropping.
15
+ if (originalDoc) {
16
+ const existingFilename = originalDoc.filename;
17
+ if (existingFilename && req.file.name === existingFilename) {
18
+ const existingOptimizer = originalDoc.imageOptimizer;
19
+ if (existingOptimizer) {
20
+ data.imageOptimizer = existingOptimizer;
21
+ }
22
+ context.imageOptimizer_nativeReupload = true;
23
+ return data;
24
+ }
25
+ }
9
26
  // Apply custom filename strategy (seoFilename, uuidFilename, or user-provided).
10
27
  // The callback returns a stem (no extension) — we append the original extension here,
11
28
  // and replaceOriginal may swap it to the target format extension later.
@@ -25,22 +42,23 @@ export const createBeforeChangeHook = (resolvedConfig, collectionSlug)=>{
25
42
  }
26
43
  const originalSize = req.file.data.length;
27
44
  const perCollectionConfig = resolveCollectionConfig(resolvedConfig, collectionSlug);
28
- // Process in memory: strip EXIF, resize, generate blur
29
- const processed = await stripAndResize(req.file.data, perCollectionConfig.maxDimensions, resolvedConfig.stripMetadata);
45
+ // Single-pipeline optimization: resize + strip metadata + optional format conversion.
46
+ // Skips .rotate() Payload's generateFileData() already auto-rotated before hooks run.
47
+ const primaryFormat = perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0 ? perCollectionConfig.formats[0] : undefined;
48
+ const processed = await optimizeImage(req.file.data, {
49
+ maxDimensions: perCollectionConfig.maxDimensions,
50
+ stripMetadata: resolvedConfig.stripMetadata,
51
+ format: primaryFormat
52
+ });
30
53
  let finalBuffer = processed.buffer;
31
54
  let finalSize = processed.size;
32
- if (perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0) {
33
- // Convert to primary format (first in the formats array)
34
- const primaryFormat = perCollectionConfig.formats[0];
35
- const converted = await convertFormat(processed.buffer, primaryFormat.format, primaryFormat.quality);
36
- finalBuffer = converted.buffer;
37
- finalSize = converted.size;
55
+ if (primaryFormat && processed.mimeType) {
38
56
  // Update filename and mimeType so Payload stores the correct metadata
39
57
  const originalFilename = data.filename || req.file.name || '';
40
58
  const newFilename = `${path.parse(originalFilename).name}.${primaryFormat.format}`;
41
59
  context.imageOptimizer_originalFilename = originalFilename;
42
60
  data.filename = newFilename;
43
- data.mimeType = converted.mimeType;
61
+ data.mimeType = processed.mimeType;
44
62
  data.filesize = finalSize;
45
63
  }
46
64
  // Determine if async work (variant generation job) is needed after create.
@@ -60,7 +78,11 @@ export const createBeforeChangeHook = (resolvedConfig, collectionSlug)=>{
60
78
  if (!needsAsyncJob) {
61
79
  context.imageOptimizer_statusResolved = true;
62
80
  }
63
- if (resolvedConfig.generateThumbHash) {
81
+ // When no async job will run, compute ThumbHash now so it's included in the
82
+ // initial DB write. This avoids a separate update() call that would fail with
83
+ // 404 on MongoDB due to transaction isolation. When a job WILL run, the
84
+ // convertFormats task computes ThumbHash in the background instead.
85
+ if (resolvedConfig.generateThumbHash && !needsAsyncJob) {
64
86
  data.imageOptimizer.thumbHash = await generateThumbHash(finalBuffer);
65
87
  }
66
88
  // Write processed buffer back to req.file so cloud storage adapters
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/hooks/beforeChange.ts"],"sourcesContent":["import path from 'path'\nimport type { CollectionBeforeChangeHook } from 'payload'\n\nimport type { ResolvedImageOptimizerConfig } from '../types.js'\nimport { resolveCollectionConfig } from '../defaults.js'\nimport { convertFormat, generateThumbHash, stripAndResize } from '../processing/index.js'\nimport { isCloudStorage } from '../utilities/storage.js'\n\nexport const createBeforeChangeHook = (\n resolvedConfig: ResolvedImageOptimizerConfig,\n collectionSlug: string,\n): CollectionBeforeChangeHook => {\n return async ({ context, data, originalDoc, req }) => {\n if (context?.imageOptimizer_skip) return data\n\n if (!req.file || !req.file.data || !req.file.mimetype?.startsWith('image/')) return data\n\n // Apply custom filename strategy (seoFilename, uuidFilename, or user-provided).\n // The callback returns a stem (no extension) — we append the original extension here,\n // and replaceOriginal may swap it to the target format extension later.\n if (resolvedConfig.generateFilename) {\n const existingFilename = (originalDoc as Record<string, unknown> | undefined)?.filename as string | undefined\n const ext = path.extname(req.file.name)\n const stem = resolvedConfig.generateFilename({\n altText: (data as Record<string, unknown>).alt as string | undefined,\n originalFilename: req.file.name,\n mimeType: req.file.mimetype,\n collectionSlug,\n existingFilename,\n })\n const newFilename = `${stem}${ext}`\n req.file.name = newFilename\n data.filename = newFilename\n }\n\n const originalSize = req.file.data.length\n\n const perCollectionConfig = resolveCollectionConfig(resolvedConfig, collectionSlug)\n\n // Process in memory: strip EXIF, resize, generate blur\n const processed = await stripAndResize(\n req.file.data,\n perCollectionConfig.maxDimensions,\n resolvedConfig.stripMetadata,\n )\n\n let finalBuffer = processed.buffer\n let finalSize = processed.size\n\n if (perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0) {\n // Convert to primary format (first in the formats array)\n const primaryFormat = perCollectionConfig.formats[0]\n const converted = await convertFormat(processed.buffer, primaryFormat.format, primaryFormat.quality)\n\n finalBuffer = converted.buffer\n finalSize = converted.size\n\n // Update filename and mimeType so Payload stores the correct metadata\n const originalFilename = data.filename || req.file.name || ''\n const newFilename = `${path.parse(originalFilename).name}.${primaryFormat.format}`\n context.imageOptimizer_originalFilename = originalFilename\n data.filename = newFilename\n data.mimeType = converted.mimeType\n data.filesize = finalSize\n }\n\n // Determine if async work (variant generation job) is needed after create.\n // If not, set status to 'complete' now so afterChange doesn't need a separate\n // update() call — which fails with 404 on MongoDB due to transaction isolation\n // when cloud storage adapters are involved.\n const collectionConfig = req.payload.collections[collectionSlug as keyof typeof req.payload.collections].config\n const cloudStorage = isCloudStorage(collectionConfig)\n const needsAsyncJob = !cloudStorage && perCollectionConfig.formats.length > 0 && !(perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length <= 1)\n\n data.imageOptimizer = {\n originalSize,\n optimizedSize: finalSize,\n status: needsAsyncJob ? 'pending' : 'complete',\n variants: needsAsyncJob ? undefined : [],\n error: null,\n }\n\n if (!needsAsyncJob) {\n context.imageOptimizer_statusResolved = true\n }\n\n if (resolvedConfig.generateThumbHash) {\n data.imageOptimizer.thumbHash = await generateThumbHash(finalBuffer)\n }\n\n // Write processed buffer back to req.file so cloud storage adapters\n // (which read req.file in their afterChange hook) upload the optimized version.\n // Payload's own uploadFiles step does NOT re-read req.file.data for its local\n // disk write, so we also store the buffer in context for our afterChange hook\n // to overwrite the local file when local storage is enabled.\n req.file.data = finalBuffer\n req.file.size = finalSize\n if (perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0) {\n req.file.name = data.filename\n req.file.mimetype = data.mimeType\n }\n context.imageOptimizer_processedBuffer = finalBuffer\n context.imageOptimizer_hasUpload = true\n\n return data\n }\n}\n"],"names":["path","resolveCollectionConfig","convertFormat","generateThumbHash","stripAndResize","isCloudStorage","createBeforeChangeHook","resolvedConfig","collectionSlug","context","data","originalDoc","req","imageOptimizer_skip","file","mimetype","startsWith","generateFilename","existingFilename","filename","ext","extname","name","stem","altText","alt","originalFilename","mimeType","newFilename","originalSize","length","perCollectionConfig","processed","maxDimensions","stripMetadata","finalBuffer","buffer","finalSize","size","replaceOriginal","formats","primaryFormat","converted","format","quality","parse","imageOptimizer_originalFilename","filesize","collectionConfig","payload","collections","config","cloudStorage","needsAsyncJob","imageOptimizer","optimizedSize","status","variants","undefined","error","imageOptimizer_statusResolved","thumbHash","imageOptimizer_processedBuffer","imageOptimizer_hasUpload"],"mappings":"AAAA,OAAOA,UAAU,OAAM;AAIvB,SAASC,uBAAuB,QAAQ,iBAAgB;AACxD,SAASC,aAAa,EAAEC,iBAAiB,EAAEC,cAAc,QAAQ,yBAAwB;AACzF,SAASC,cAAc,QAAQ,0BAAyB;AAExD,OAAO,MAAMC,yBAAyB,CACpCC,gBACAC;IAEA,OAAO,OAAO,EAAEC,OAAO,EAAEC,IAAI,EAAEC,WAAW,EAAEC,GAAG,EAAE;QAC/C,IAAIH,SAASI,qBAAqB,OAAOH;QAEzC,IAAI,CAACE,IAAIE,IAAI,IAAI,CAACF,IAAIE,IAAI,CAACJ,IAAI,IAAI,CAACE,IAAIE,IAAI,CAACC,QAAQ,EAAEC,WAAW,WAAW,OAAON;QAEpF,gFAAgF;QAChF,sFAAsF;QACtF,wEAAwE;QACxE,IAAIH,eAAeU,gBAAgB,EAAE;YACnC,MAAMC,mBAAoBP,aAAqDQ;YAC/E,MAAMC,MAAMpB,KAAKqB,OAAO,CAACT,IAAIE,IAAI,CAACQ,IAAI;YACtC,MAAMC,OAAOhB,eAAeU,gBAAgB,CAAC;gBAC3CO,SAAS,AAACd,KAAiCe,GAAG;gBAC9CC,kBAAkBd,IAAIE,IAAI,CAACQ,IAAI;gBAC/BK,UAAUf,IAAIE,IAAI,CAACC,QAAQ;gBAC3BP;gBACAU;YACF;YACA,MAAMU,cAAc,GAAGL,OAAOH,KAAK;YACnCR,IAAIE,IAAI,CAACQ,IAAI,GAAGM;YAChBlB,KAAKS,QAAQ,GAAGS;QAClB;QAEA,MAAMC,eAAejB,IAAIE,IAAI,CAACJ,IAAI,CAACoB,MAAM;QAEzC,MAAMC,sBAAsB9B,wBAAwBM,gBAAgBC;QAEpE,uDAAuD;QACvD,MAAMwB,YAAY,MAAM5B,eACtBQ,IAAIE,IAAI,CAACJ,IAAI,EACbqB,oBAAoBE,aAAa,EACjC1B,eAAe2B,aAAa;QAG9B,IAAIC,cAAcH,UAAUI,MAAM;QAClC,IAAIC,YAAYL,UAAUM,IAAI;QAE9B,IAAIP,oBAAoBQ,eAAe,IAAIR,oBAAoBS,OAAO,CAACV,MAAM,GAAG,GAAG;YACjF,yDAAyD;YACzD,MAAMW,gBAAgBV,oBAAoBS,OAAO,CAAC,EAAE;YACpD,MAAME,YAAY,MAAMxC,cAAc8B,UAAUI,MAAM,EAAEK,cAAcE,MAAM,EAAEF,cAAcG,OAAO;YAEnGT,cAAcO,UAAUN,MAAM;YAC9BC,YAAYK,UAAUJ,IAAI;YAE1B,sEAAsE;YACtE,MAAMZ,mBAAmBhB,KAAKS,QAAQ,IAAIP,IAAIE,IAAI,CAACQ,IAAI,IAAI;YAC3D,MAAMM,cAAc,GAAG5B,KAAK6C,KAAK,CAACnB,kBAAkBJ,IAAI,CAAC,CAAC,EAAEmB,cAAcE,MAAM,EAAE;YAClFlC,QAAQqC,+BAA+B,GAAGpB;YAC1ChB,KAAKS,QAAQ,GAAGS;YAChBlB,KAAKiB,QAAQ,GAAGe,UAAUf,QAAQ;YAClCjB,KAAKqC,QAAQ,GAAGV;QAClB;QAEA,2EAA2E;QAC3E,8EAA8E;QAC9E,+EAA+E;QAC/E,4CAA4C;QAC5C,MAAMW,mBAAmBpC,IAAIqC,OAAO,CAACC,WAAW,CAAC1C,eAAuD,CAAC2C,MAAM;QAC/G,MAAMC,eAAe/C,eAAe2C;QACpC,MAAMK,gBAAgB,CAACD,gBAAgBrB,oBAAoBS,OAAO,CAACV,MAAM,GAAG,KAAK,CAAEC,CAAAA,oBAAoBQ,eAAe,IAAIR,oBAAoBS,OAAO,CAACV,MAAM,IAAI,CAAA;QAEhKpB,KAAK4C,cAAc,GAAG;YACpBzB;YACA0B,eAAelB;YACfmB,QAAQH,gBAAgB,YAAY;YACpCI,UAAUJ,gBAAgBK,YAAY,EAAE;YACxCC,OAAO;QACT;QAEA,IAAI,CAACN,eAAe;YAClB5C,QAAQmD,6BAA6B,GAAG;QAC1C;QAEA,IAAIrD,eAAeJ,iBAAiB,EAAE;YACpCO,KAAK4C,cAAc,CAACO,SAAS,GAAG,MAAM1D,kBAAkBgC;QAC1D;QAEA,oEAAoE;QACpE,gFAAgF;QAChF,8EAA8E;QAC9E,8EAA8E;QAC9E,6DAA6D;QAC7DvB,IAAIE,IAAI,CAACJ,IAAI,GAAGyB;QAChBvB,IAAIE,IAAI,CAACwB,IAAI,GAAGD;QAChB,IAAIN,oBAAoBQ,eAAe,IAAIR,oBAAoBS,OAAO,CAACV,MAAM,GAAG,GAAG;YACjFlB,IAAIE,IAAI,CAACQ,IAAI,GAAGZ,KAAKS,QAAQ;YAC7BP,IAAIE,IAAI,CAACC,QAAQ,GAAGL,KAAKiB,QAAQ;QACnC;QACAlB,QAAQqD,8BAA8B,GAAG3B;QACzC1B,QAAQsD,wBAAwB,GAAG;QAEnC,OAAOrD;IACT;AACF,EAAC"}
1
+ {"version":3,"sources":["../../src/hooks/beforeChange.ts"],"sourcesContent":["import path from 'path'\nimport type { CollectionBeforeChangeHook } from 'payload'\n\nimport type { ResolvedImageOptimizerConfig } from '../types.js'\nimport { resolveCollectionConfig } from '../defaults.js'\nimport { generateThumbHash, optimizeImage } from '../processing/index.js'\nimport { isCloudStorage } from '../utilities/storage.js'\n\nexport const createBeforeChangeHook = (\n resolvedConfig: ResolvedImageOptimizerConfig,\n collectionSlug: string,\n): CollectionBeforeChangeHook => {\n return async ({ context, data, originalDoc, req }) => {\n if (context?.imageOptimizer_skip) return data\n\n if (!req.file || !req.file.data || !req.file.mimetype?.startsWith('image/')) return data\n\n // Detect re-upload triggered by Payload's shouldReupload() — focal point or crop change.\n // shouldReupload re-fetches the stored (already-optimized) file and sets req.file.\n // When re-fetching, Payload sets req.file.name to the stored filename verbatim\n // (via getFileByPath or getExternalFile). For genuine user uploads, req.file.name\n // comes from the user's filesystem and will differ from the stored filename.\n // Skip redundant optimization; let Payload's native image-size regeneration handle cropping.\n if (originalDoc) {\n const existingFilename = (originalDoc as Record<string, unknown>).filename as string | undefined\n\n if (existingFilename && req.file.name === existingFilename) {\n const existingOptimizer = (originalDoc as Record<string, unknown>).imageOptimizer\n if (existingOptimizer) {\n data.imageOptimizer = existingOptimizer as typeof data.imageOptimizer\n }\n context.imageOptimizer_nativeReupload = true\n return data\n }\n }\n\n // Apply custom filename strategy (seoFilename, uuidFilename, or user-provided).\n // The callback returns a stem (no extension) — we append the original extension here,\n // and replaceOriginal may swap it to the target format extension later.\n if (resolvedConfig.generateFilename) {\n const existingFilename = (originalDoc as Record<string, unknown> | undefined)?.filename as string | undefined\n const ext = path.extname(req.file.name)\n const stem = resolvedConfig.generateFilename({\n altText: (data as Record<string, unknown>).alt as string | undefined,\n originalFilename: req.file.name,\n mimeType: req.file.mimetype,\n collectionSlug,\n existingFilename,\n })\n const newFilename = `${stem}${ext}`\n req.file.name = newFilename\n data.filename = newFilename\n }\n\n const originalSize = req.file.data.length\n\n const perCollectionConfig = resolveCollectionConfig(resolvedConfig, collectionSlug)\n\n // Single-pipeline optimization: resize + strip metadata + optional format conversion.\n // Skips .rotate() Payload's generateFileData() already auto-rotated before hooks run.\n const primaryFormat = perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0\n ? perCollectionConfig.formats[0]\n : undefined\n\n const processed = await optimizeImage(req.file.data, {\n maxDimensions: perCollectionConfig.maxDimensions,\n stripMetadata: resolvedConfig.stripMetadata,\n format: primaryFormat,\n })\n\n let finalBuffer = processed.buffer\n let finalSize = processed.size\n\n if (primaryFormat && processed.mimeType) {\n // Update filename and mimeType so Payload stores the correct metadata\n const originalFilename = data.filename || req.file.name || ''\n const newFilename = `${path.parse(originalFilename).name}.${primaryFormat.format}`\n context.imageOptimizer_originalFilename = originalFilename\n data.filename = newFilename\n data.mimeType = processed.mimeType\n data.filesize = finalSize\n }\n\n // Determine if async work (variant generation job) is needed after create.\n // If not, set status to 'complete' now so afterChange doesn't need a separate\n // update() call — which fails with 404 on MongoDB due to transaction isolation\n // when cloud storage adapters are involved.\n const collectionConfig = req.payload.collections[collectionSlug as keyof typeof req.payload.collections].config\n const cloudStorage = isCloudStorage(collectionConfig)\n const needsAsyncJob = !cloudStorage && perCollectionConfig.formats.length > 0 && !(perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length <= 1)\n\n data.imageOptimizer = {\n originalSize,\n optimizedSize: finalSize,\n status: needsAsyncJob ? 'pending' : 'complete',\n variants: needsAsyncJob ? undefined : [],\n error: null,\n }\n\n if (!needsAsyncJob) {\n context.imageOptimizer_statusResolved = true\n }\n\n // When no async job will run, compute ThumbHash now so it's included in the\n // initial DB write. This avoids a separate update() call that would fail with\n // 404 on MongoDB due to transaction isolation. When a job WILL run, the\n // convertFormats task computes ThumbHash in the background instead.\n if (resolvedConfig.generateThumbHash && !needsAsyncJob) {\n data.imageOptimizer.thumbHash = await generateThumbHash(finalBuffer)\n }\n\n // Write processed buffer back to req.file so cloud storage adapters\n // (which read req.file in their afterChange hook) upload the optimized version.\n // Payload's own uploadFiles step does NOT re-read req.file.data for its local\n // disk write, so we also store the buffer in context for our afterChange hook\n // to overwrite the local file when local storage is enabled.\n req.file.data = finalBuffer\n req.file.size = finalSize\n if (perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0) {\n req.file.name = data.filename\n req.file.mimetype = data.mimeType\n }\n context.imageOptimizer_processedBuffer = finalBuffer\n context.imageOptimizer_hasUpload = true\n\n return data\n }\n}\n"],"names":["path","resolveCollectionConfig","generateThumbHash","optimizeImage","isCloudStorage","createBeforeChangeHook","resolvedConfig","collectionSlug","context","data","originalDoc","req","imageOptimizer_skip","file","mimetype","startsWith","existingFilename","filename","name","existingOptimizer","imageOptimizer","imageOptimizer_nativeReupload","generateFilename","ext","extname","stem","altText","alt","originalFilename","mimeType","newFilename","originalSize","length","perCollectionConfig","primaryFormat","replaceOriginal","formats","undefined","processed","maxDimensions","stripMetadata","format","finalBuffer","buffer","finalSize","size","parse","imageOptimizer_originalFilename","filesize","collectionConfig","payload","collections","config","cloudStorage","needsAsyncJob","optimizedSize","status","variants","error","imageOptimizer_statusResolved","thumbHash","imageOptimizer_processedBuffer","imageOptimizer_hasUpload"],"mappings":"AAAA,OAAOA,UAAU,OAAM;AAIvB,SAASC,uBAAuB,QAAQ,iBAAgB;AACxD,SAASC,iBAAiB,EAAEC,aAAa,QAAQ,yBAAwB;AACzE,SAASC,cAAc,QAAQ,0BAAyB;AAExD,OAAO,MAAMC,yBAAyB,CACpCC,gBACAC;IAEA,OAAO,OAAO,EAAEC,OAAO,EAAEC,IAAI,EAAEC,WAAW,EAAEC,GAAG,EAAE;QAC/C,IAAIH,SAASI,qBAAqB,OAAOH;QAEzC,IAAI,CAACE,IAAIE,IAAI,IAAI,CAACF,IAAIE,IAAI,CAACJ,IAAI,IAAI,CAACE,IAAIE,IAAI,CAACC,QAAQ,EAAEC,WAAW,WAAW,OAAON;QAEpF,yFAAyF;QACzF,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,6FAA6F;QAC7F,IAAIC,aAAa;YACf,MAAMM,mBAAmB,AAACN,YAAwCO,QAAQ;YAE1E,IAAID,oBAAoBL,IAAIE,IAAI,CAACK,IAAI,KAAKF,kBAAkB;gBAC1D,MAAMG,oBAAoB,AAACT,YAAwCU,cAAc;gBACjF,IAAID,mBAAmB;oBACrBV,KAAKW,cAAc,GAAGD;gBACxB;gBACAX,QAAQa,6BAA6B,GAAG;gBACxC,OAAOZ;YACT;QACF;QAEA,gFAAgF;QAChF,sFAAsF;QACtF,wEAAwE;QACxE,IAAIH,eAAegB,gBAAgB,EAAE;YACnC,MAAMN,mBAAoBN,aAAqDO;YAC/E,MAAMM,MAAMvB,KAAKwB,OAAO,CAACb,IAAIE,IAAI,CAACK,IAAI;YACtC,MAAMO,OAAOnB,eAAegB,gBAAgB,CAAC;gBAC3CI,SAAS,AAACjB,KAAiCkB,GAAG;gBAC9CC,kBAAkBjB,IAAIE,IAAI,CAACK,IAAI;gBAC/BW,UAAUlB,IAAIE,IAAI,CAACC,QAAQ;gBAC3BP;gBACAS;YACF;YACA,MAAMc,cAAc,GAAGL,OAAOF,KAAK;YACnCZ,IAAIE,IAAI,CAACK,IAAI,GAAGY;YAChBrB,KAAKQ,QAAQ,GAAGa;QAClB;QAEA,MAAMC,eAAepB,IAAIE,IAAI,CAACJ,IAAI,CAACuB,MAAM;QAEzC,MAAMC,sBAAsBhC,wBAAwBK,gBAAgBC;QAEpE,sFAAsF;QACtF,wFAAwF;QACxF,MAAM2B,gBAAgBD,oBAAoBE,eAAe,IAAIF,oBAAoBG,OAAO,CAACJ,MAAM,GAAG,IAC9FC,oBAAoBG,OAAO,CAAC,EAAE,GAC9BC;QAEJ,MAAMC,YAAY,MAAMnC,cAAcQ,IAAIE,IAAI,CAACJ,IAAI,EAAE;YACnD8B,eAAeN,oBAAoBM,aAAa;YAChDC,eAAelC,eAAekC,aAAa;YAC3CC,QAAQP;QACV;QAEA,IAAIQ,cAAcJ,UAAUK,MAAM;QAClC,IAAIC,YAAYN,UAAUO,IAAI;QAE9B,IAAIX,iBAAiBI,UAAUT,QAAQ,EAAE;YACvC,sEAAsE;YACtE,MAAMD,mBAAmBnB,KAAKQ,QAAQ,IAAIN,IAAIE,IAAI,CAACK,IAAI,IAAI;YAC3D,MAAMY,cAAc,GAAG9B,KAAK8C,KAAK,CAAClB,kBAAkBV,IAAI,CAAC,CAAC,EAAEgB,cAAcO,MAAM,EAAE;YAClFjC,QAAQuC,+BAA+B,GAAGnB;YAC1CnB,KAAKQ,QAAQ,GAAGa;YAChBrB,KAAKoB,QAAQ,GAAGS,UAAUT,QAAQ;YAClCpB,KAAKuC,QAAQ,GAAGJ;QAClB;QAEA,2EAA2E;QAC3E,8EAA8E;QAC9E,+EAA+E;QAC/E,4CAA4C;QAC5C,MAAMK,mBAAmBtC,IAAIuC,OAAO,CAACC,WAAW,CAAC5C,eAAuD,CAAC6C,MAAM;QAC/G,MAAMC,eAAejD,eAAe6C;QACpC,MAAMK,gBAAgB,CAACD,gBAAgBpB,oBAAoBG,OAAO,CAACJ,MAAM,GAAG,KAAK,CAAEC,CAAAA,oBAAoBE,eAAe,IAAIF,oBAAoBG,OAAO,CAACJ,MAAM,IAAI,CAAA;QAEhKvB,KAAKW,cAAc,GAAG;YACpBW;YACAwB,eAAeX;YACfY,QAAQF,gBAAgB,YAAY;YACpCG,UAAUH,gBAAgBjB,YAAY,EAAE;YACxCqB,OAAO;QACT;QAEA,IAAI,CAACJ,eAAe;YAClB9C,QAAQmD,6BAA6B,GAAG;QAC1C;QAEA,4EAA4E;QAC5E,8EAA8E;QAC9E,wEAAwE;QACxE,oEAAoE;QACpE,IAAIrD,eAAeJ,iBAAiB,IAAI,CAACoD,eAAe;YACtD7C,KAAKW,cAAc,CAACwC,SAAS,GAAG,MAAM1D,kBAAkBwC;QAC1D;QAEA,oEAAoE;QACpE,gFAAgF;QAChF,8EAA8E;QAC9E,8EAA8E;QAC9E,6DAA6D;QAC7D/B,IAAIE,IAAI,CAACJ,IAAI,GAAGiC;QAChB/B,IAAIE,IAAI,CAACgC,IAAI,GAAGD;QAChB,IAAIX,oBAAoBE,eAAe,IAAIF,oBAAoBG,OAAO,CAACJ,MAAM,GAAG,GAAG;YACjFrB,IAAIE,IAAI,CAACK,IAAI,GAAGT,KAAKQ,QAAQ;YAC7BN,IAAIE,IAAI,CAACC,QAAQ,GAAGL,KAAKoB,QAAQ;QACnC;QACArB,QAAQqD,8BAA8B,GAAGnB;QACzClC,QAAQsD,wBAAwB,GAAG;QAEnC,OAAOrD;IACT;AACF,EAAC"}
@@ -8,6 +8,27 @@ export declare function stripAndResize(buffer: Buffer, maxDimensions: {
8
8
  size: number;
9
9
  }>;
10
10
  export declare function generateThumbHash(buffer: Buffer): Promise<string>;
11
+ /**
12
+ * Single-pipeline image optimization: resize + metadata strip + optional format conversion.
13
+ * Skips .rotate() because Payload's generateFileData() already auto-rotates before hooks run.
14
+ */
15
+ export declare function optimizeImage(buffer: Buffer, options: {
16
+ maxDimensions: {
17
+ width: number;
18
+ height: number;
19
+ };
20
+ stripMetadata: boolean;
21
+ format?: {
22
+ format: 'webp' | 'avif';
23
+ quality: number;
24
+ };
25
+ }): Promise<{
26
+ buffer: Buffer;
27
+ width: number;
28
+ height: number;
29
+ size: number;
30
+ mimeType?: string;
31
+ }>;
11
32
  export declare function convertFormat(buffer: Buffer, format: 'webp' | 'avif', quality: number): Promise<{
12
33
  buffer: Buffer;
13
34
  width: number;
@@ -27,6 +27,35 @@ export async function generateThumbHash(buffer) {
27
27
  const thumbHash = rgbaToThumbHash(info.width, info.height, data);
28
28
  return Buffer.from(thumbHash).toString('base64');
29
29
  }
30
+ /**
31
+ * Single-pipeline image optimization: resize + metadata strip + optional format conversion.
32
+ * Skips .rotate() because Payload's generateFileData() already auto-rotates before hooks run.
33
+ */ export async function optimizeImage(buffer, options) {
34
+ let pipeline = sharp(buffer).resize(options.maxDimensions.width, options.maxDimensions.height, {
35
+ fit: 'inside',
36
+ withoutEnlargement: true
37
+ });
38
+ if (!options.stripMetadata) {
39
+ pipeline = pipeline.keepMetadata();
40
+ }
41
+ if (options.format) {
42
+ pipeline = pipeline.toFormat(options.format.format, {
43
+ quality: options.format.quality
44
+ });
45
+ }
46
+ const { data, info } = await pipeline.toBuffer({
47
+ resolveWithObject: true
48
+ });
49
+ return {
50
+ buffer: data,
51
+ width: info.width,
52
+ height: info.height,
53
+ size: info.size,
54
+ ...options.format && {
55
+ mimeType: options.format.format === 'webp' ? 'image/webp' : 'image/avif'
56
+ }
57
+ };
58
+ }
30
59
  export async function convertFormat(buffer, format, quality) {
31
60
  const { data, info } = await sharp(buffer).toFormat(format, {
32
61
  quality
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/processing/index.ts"],"sourcesContent":["import sharp from 'sharp'\nimport { rgbaToThumbHash } from 'thumbhash'\n\nexport async function stripAndResize(\n buffer: Buffer,\n maxDimensions: { width: number; height: number },\n stripMetadata: boolean,\n): Promise<{ buffer: Buffer; width: number; height: number; size: number }> {\n let pipeline = sharp(buffer)\n .rotate()\n .resize(maxDimensions.width, maxDimensions.height, {\n fit: 'inside',\n withoutEnlargement: true,\n })\n\n if (!stripMetadata) {\n pipeline = pipeline.keepMetadata()\n }\n\n const { data, info } = await pipeline.toBuffer({ resolveWithObject: true })\n\n return {\n buffer: data,\n width: info.width,\n height: info.height,\n size: info.size,\n }\n}\n\nexport async function generateThumbHash(buffer: Buffer): Promise<string> {\n const { data, info } = await sharp(buffer)\n .resize(100, 100, { fit: 'inside' })\n .raw()\n .ensureAlpha()\n .toBuffer({ resolveWithObject: true })\n\n const thumbHash = rgbaToThumbHash(info.width, info.height, data)\n return Buffer.from(thumbHash).toString('base64')\n}\n\nexport async function convertFormat(\n buffer: Buffer,\n format: 'webp' | 'avif',\n quality: number,\n): Promise<{ buffer: Buffer; width: number; height: number; size: number; mimeType: string }> {\n const { data, info } = await sharp(buffer)\n .toFormat(format, { quality })\n .toBuffer({ resolveWithObject: true })\n\n const mimeType = format === 'webp' ? 'image/webp' : 'image/avif'\n\n return {\n buffer: data,\n width: info.width,\n height: info.height,\n size: info.size,\n mimeType,\n }\n}\n"],"names":["sharp","rgbaToThumbHash","stripAndResize","buffer","maxDimensions","stripMetadata","pipeline","rotate","resize","width","height","fit","withoutEnlargement","keepMetadata","data","info","toBuffer","resolveWithObject","size","generateThumbHash","raw","ensureAlpha","thumbHash","Buffer","from","toString","convertFormat","format","quality","toFormat","mimeType"],"mappings":"AAAA,OAAOA,WAAW,QAAO;AACzB,SAASC,eAAe,QAAQ,YAAW;AAE3C,OAAO,eAAeC,eACpBC,MAAc,EACdC,aAAgD,EAChDC,aAAsB;IAEtB,IAAIC,WAAWN,MAAMG,QAClBI,MAAM,GACNC,MAAM,CAACJ,cAAcK,KAAK,EAAEL,cAAcM,MAAM,EAAE;QACjDC,KAAK;QACLC,oBAAoB;IACtB;IAEF,IAAI,CAACP,eAAe;QAClBC,WAAWA,SAASO,YAAY;IAClC;IAEA,MAAM,EAAEC,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMT,SAASU,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEzE,OAAO;QACLd,QAAQW;QACRL,OAAOM,KAAKN,KAAK;QACjBC,QAAQK,KAAKL,MAAM;QACnBQ,MAAMH,KAAKG,IAAI;IACjB;AACF;AAEA,OAAO,eAAeC,kBAAkBhB,MAAc;IACpD,MAAM,EAAEW,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMf,MAAMG,QAChCK,MAAM,CAAC,KAAK,KAAK;QAAEG,KAAK;IAAS,GACjCS,GAAG,GACHC,WAAW,GACXL,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEtC,MAAMK,YAAYrB,gBAAgBc,KAAKN,KAAK,EAAEM,KAAKL,MAAM,EAAEI;IAC3D,OAAOS,OAAOC,IAAI,CAACF,WAAWG,QAAQ,CAAC;AACzC;AAEA,OAAO,eAAeC,cACpBvB,MAAc,EACdwB,MAAuB,EACvBC,OAAe;IAEf,MAAM,EAAEd,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMf,MAAMG,QAChC0B,QAAQ,CAACF,QAAQ;QAAEC;IAAQ,GAC3BZ,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEtC,MAAMa,WAAWH,WAAW,SAAS,eAAe;IAEpD,OAAO;QACLxB,QAAQW;QACRL,OAAOM,KAAKN,KAAK;QACjBC,QAAQK,KAAKL,MAAM;QACnBQ,MAAMH,KAAKG,IAAI;QACfY;IACF;AACF"}
1
+ {"version":3,"sources":["../../src/processing/index.ts"],"sourcesContent":["import sharp from 'sharp'\nimport { rgbaToThumbHash } from 'thumbhash'\n\nexport async function stripAndResize(\n buffer: Buffer,\n maxDimensions: { width: number; height: number },\n stripMetadata: boolean,\n): Promise<{ buffer: Buffer; width: number; height: number; size: number }> {\n let pipeline = sharp(buffer)\n .rotate()\n .resize(maxDimensions.width, maxDimensions.height, {\n fit: 'inside',\n withoutEnlargement: true,\n })\n\n if (!stripMetadata) {\n pipeline = pipeline.keepMetadata()\n }\n\n const { data, info } = await pipeline.toBuffer({ resolveWithObject: true })\n\n return {\n buffer: data,\n width: info.width,\n height: info.height,\n size: info.size,\n }\n}\n\nexport async function generateThumbHash(buffer: Buffer): Promise<string> {\n const { data, info } = await sharp(buffer)\n .resize(100, 100, { fit: 'inside' })\n .raw()\n .ensureAlpha()\n .toBuffer({ resolveWithObject: true })\n\n const thumbHash = rgbaToThumbHash(info.width, info.height, data)\n return Buffer.from(thumbHash).toString('base64')\n}\n\n/**\n * Single-pipeline image optimization: resize + metadata strip + optional format conversion.\n * Skips .rotate() because Payload's generateFileData() already auto-rotates before hooks run.\n */\nexport async function optimizeImage(\n buffer: Buffer,\n options: {\n maxDimensions: { width: number; height: number }\n stripMetadata: boolean\n format?: { format: 'webp' | 'avif'; quality: number }\n },\n): Promise<{ buffer: Buffer; width: number; height: number; size: number; mimeType?: string }> {\n let pipeline = sharp(buffer)\n .resize(options.maxDimensions.width, options.maxDimensions.height, {\n fit: 'inside',\n withoutEnlargement: true,\n })\n\n if (!options.stripMetadata) {\n pipeline = pipeline.keepMetadata()\n }\n\n if (options.format) {\n pipeline = pipeline.toFormat(options.format.format, { quality: options.format.quality })\n }\n\n const { data, info } = await pipeline.toBuffer({ resolveWithObject: true })\n\n return {\n buffer: data,\n width: info.width,\n height: info.height,\n size: info.size,\n ...(options.format && {\n mimeType: options.format.format === 'webp' ? 'image/webp' : 'image/avif',\n }),\n }\n}\n\nexport async function convertFormat(\n buffer: Buffer,\n format: 'webp' | 'avif',\n quality: number,\n): Promise<{ buffer: Buffer; width: number; height: number; size: number; mimeType: string }> {\n const { data, info } = await sharp(buffer)\n .toFormat(format, { quality })\n .toBuffer({ resolveWithObject: true })\n\n const mimeType = format === 'webp' ? 'image/webp' : 'image/avif'\n\n return {\n buffer: data,\n width: info.width,\n height: info.height,\n size: info.size,\n mimeType,\n }\n}\n"],"names":["sharp","rgbaToThumbHash","stripAndResize","buffer","maxDimensions","stripMetadata","pipeline","rotate","resize","width","height","fit","withoutEnlargement","keepMetadata","data","info","toBuffer","resolveWithObject","size","generateThumbHash","raw","ensureAlpha","thumbHash","Buffer","from","toString","optimizeImage","options","format","toFormat","quality","mimeType","convertFormat"],"mappings":"AAAA,OAAOA,WAAW,QAAO;AACzB,SAASC,eAAe,QAAQ,YAAW;AAE3C,OAAO,eAAeC,eACpBC,MAAc,EACdC,aAAgD,EAChDC,aAAsB;IAEtB,IAAIC,WAAWN,MAAMG,QAClBI,MAAM,GACNC,MAAM,CAACJ,cAAcK,KAAK,EAAEL,cAAcM,MAAM,EAAE;QACjDC,KAAK;QACLC,oBAAoB;IACtB;IAEF,IAAI,CAACP,eAAe;QAClBC,WAAWA,SAASO,YAAY;IAClC;IAEA,MAAM,EAAEC,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMT,SAASU,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEzE,OAAO;QACLd,QAAQW;QACRL,OAAOM,KAAKN,KAAK;QACjBC,QAAQK,KAAKL,MAAM;QACnBQ,MAAMH,KAAKG,IAAI;IACjB;AACF;AAEA,OAAO,eAAeC,kBAAkBhB,MAAc;IACpD,MAAM,EAAEW,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMf,MAAMG,QAChCK,MAAM,CAAC,KAAK,KAAK;QAAEG,KAAK;IAAS,GACjCS,GAAG,GACHC,WAAW,GACXL,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEtC,MAAMK,YAAYrB,gBAAgBc,KAAKN,KAAK,EAAEM,KAAKL,MAAM,EAAEI;IAC3D,OAAOS,OAAOC,IAAI,CAACF,WAAWG,QAAQ,CAAC;AACzC;AAEA;;;CAGC,GACD,OAAO,eAAeC,cACpBvB,MAAc,EACdwB,OAIC;IAED,IAAIrB,WAAWN,MAAMG,QAClBK,MAAM,CAACmB,QAAQvB,aAAa,CAACK,KAAK,EAAEkB,QAAQvB,aAAa,CAACM,MAAM,EAAE;QACjEC,KAAK;QACLC,oBAAoB;IACtB;IAEF,IAAI,CAACe,QAAQtB,aAAa,EAAE;QAC1BC,WAAWA,SAASO,YAAY;IAClC;IAEA,IAAIc,QAAQC,MAAM,EAAE;QAClBtB,WAAWA,SAASuB,QAAQ,CAACF,QAAQC,MAAM,CAACA,MAAM,EAAE;YAAEE,SAASH,QAAQC,MAAM,CAACE,OAAO;QAAC;IACxF;IAEA,MAAM,EAAEhB,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMT,SAASU,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEzE,OAAO;QACLd,QAAQW;QACRL,OAAOM,KAAKN,KAAK;QACjBC,QAAQK,KAAKL,MAAM;QACnBQ,MAAMH,KAAKG,IAAI;QACf,GAAIS,QAAQC,MAAM,IAAI;YACpBG,UAAUJ,QAAQC,MAAM,CAACA,MAAM,KAAK,SAAS,eAAe;QAC9D,CAAC;IACH;AACF;AAEA,OAAO,eAAeI,cACpB7B,MAAc,EACdyB,MAAuB,EACvBE,OAAe;IAEf,MAAM,EAAEhB,IAAI,EAAEC,IAAI,EAAE,GAAG,MAAMf,MAAMG,QAChC0B,QAAQ,CAACD,QAAQ;QAAEE;IAAQ,GAC3Bd,QAAQ,CAAC;QAAEC,mBAAmB;IAAK;IAEtC,MAAMc,WAAWH,WAAW,SAAS,eAAe;IAEpD,OAAO;QACLzB,QAAQW;QACRL,OAAOM,KAAKN,KAAK;QACjBC,QAAQK,KAAKL,MAAM;QACnBQ,MAAMH,KAAKG,IAAI;QACfa;IACF;AACF"}
@@ -1,7 +1,7 @@
1
1
  import fs from 'fs/promises';
2
2
  import path from 'path';
3
3
  import { resolveCollectionConfig } from '../defaults.js';
4
- import { convertFormat } from '../processing/index.js';
4
+ import { convertFormat, generateThumbHash } from '../processing/index.js';
5
5
  import { resolveStaticDir } from '../utilities/resolveStaticDir.js';
6
6
  import { fetchFileBuffer, isCloudStorage } from '../utilities/storage.js';
7
7
  export const createConvertFormatsHandler = (resolvedConfig)=>{
@@ -48,11 +48,11 @@ export const createConvertFormatsHandler = (resolvedConfig)=>{
48
48
  // skip it and only generate variants for the remaining formats.
49
49
  const formatsToGenerate = perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0 ? perCollectionConfig.formats.slice(1) : perCollectionConfig.formats;
50
50
  const safeFilename = path.basename(doc.filename);
51
- for (const format of formatsToGenerate){
51
+ const variantResults = await Promise.all(formatsToGenerate.map(async (format)=>{
52
52
  const result = await convertFormat(fileBuffer, format.format, format.quality);
53
53
  const variantFilename = `${path.parse(safeFilename).name}-optimized.${format.format}`;
54
54
  await fs.writeFile(path.join(staticDir, variantFilename), result.buffer);
55
- variants.push({
55
+ return {
56
56
  format: format.format,
57
57
  filename: variantFilename,
58
58
  filesize: result.size,
@@ -60,7 +60,13 @@ export const createConvertFormatsHandler = (resolvedConfig)=>{
60
60
  height: result.height,
61
61
  mimeType: result.mimeType,
62
62
  url: `/api/${input.collectionSlug}/file/${variantFilename}`
63
- });
63
+ };
64
+ }));
65
+ variants.push(...variantResults);
66
+ // Compute ThumbHash in the background job to avoid blocking the sync save path
67
+ let thumbHash;
68
+ if (resolvedConfig.generateThumbHash) {
69
+ thumbHash = await generateThumbHash(fileBuffer);
64
70
  }
65
71
  await req.payload.update({
66
72
  collection: input.collectionSlug,
@@ -70,6 +76,7 @@ export const createConvertFormatsHandler = (resolvedConfig)=>{
70
76
  ...doc.imageOptimizer,
71
77
  status: 'complete',
72
78
  variants,
79
+ thumbHash,
73
80
  error: null
74
81
  }
75
82
  },
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/tasks/convertFormats.ts"],"sourcesContent":["import fs from 'fs/promises'\nimport path from 'path'\n\nimport type { CollectionSlug } from 'payload'\n\nimport type { ResolvedImageOptimizerConfig } from '../types.js'\nimport { resolveCollectionConfig } from '../defaults.js'\nimport { convertFormat } from '../processing/index.js'\nimport { resolveStaticDir } from '../utilities/resolveStaticDir.js'\nimport { fetchFileBuffer, isCloudStorage } from '../utilities/storage.js'\n\nexport const createConvertFormatsHandler = (resolvedConfig: ResolvedImageOptimizerConfig) => {\n return async ({ input, req }: { input: { collectionSlug: string; docId: string }; req: any }) => {\n try {\n const doc = await req.payload.findByID({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n })\n\n const collectionConfig = req.payload.collections[input.collectionSlug as keyof typeof req.payload.collections].config\n const cloudStorage = isCloudStorage(collectionConfig)\n\n // Cloud storage: variant files cannot be uploaded without direct adapter access.\n // Mark as complete — CDN-level image optimization handles format conversion.\n if (cloudStorage) {\n await req.payload.update({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n data: {\n imageOptimizer: {\n ...doc.imageOptimizer,\n status: 'complete',\n variants: [],\n error: null,\n },\n },\n context: { imageOptimizer_skip: true },\n })\n return { output: { variantsGenerated: 0 } }\n }\n\n const staticDir = resolveStaticDir(collectionConfig)\n if (!staticDir) {\n throw new Error(`No staticDir configured for collection \"${input.collectionSlug}\"`)\n }\n\n const fileBuffer = await fetchFileBuffer(doc, collectionConfig)\n\n const variants: Array<{\n filename: string\n filesize: number\n format: string\n height: number\n mimeType: string\n url: string\n width: number\n }> = []\n\n const perCollectionConfig = resolveCollectionConfig(resolvedConfig, input.collectionSlug)\n\n // When replaceOriginal is on, the main file is already in the primary format —\n // skip it and only generate variants for the remaining formats.\n const formatsToGenerate = perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0\n ? perCollectionConfig.formats.slice(1)\n : perCollectionConfig.formats\n\n const safeFilename = path.basename(doc.filename)\n\n for (const format of formatsToGenerate) {\n const result = await convertFormat(fileBuffer, format.format, format.quality)\n const variantFilename = `${path.parse(safeFilename).name}-optimized.${format.format}`\n\n await fs.writeFile(path.join(staticDir, variantFilename), result.buffer)\n\n variants.push({\n format: format.format,\n filename: variantFilename,\n filesize: result.size,\n width: result.width,\n height: result.height,\n mimeType: result.mimeType,\n url: `/api/${input.collectionSlug}/file/${variantFilename}`,\n })\n }\n\n await req.payload.update({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n data: {\n imageOptimizer: {\n ...doc.imageOptimizer,\n status: 'complete',\n variants,\n error: null,\n },\n },\n context: { imageOptimizer_skip: true },\n })\n\n return { output: { variantsGenerated: variants.length } }\n } catch (err) {\n const errorMessage = err instanceof Error ? err.message : String(err)\n\n try {\n await req.payload.update({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n data: {\n imageOptimizer: {\n status: 'error',\n error: errorMessage,\n },\n },\n context: { imageOptimizer_skip: true },\n })\n } catch (updateErr) {\n req.payload.logger.error(\n { err: updateErr, docId: input.docId, collectionSlug: input.collectionSlug },\n 'Failed to persist error status for image optimizer',\n )\n }\n\n throw err\n }\n }\n}\n"],"names":["fs","path","resolveCollectionConfig","convertFormat","resolveStaticDir","fetchFileBuffer","isCloudStorage","createConvertFormatsHandler","resolvedConfig","input","req","doc","payload","findByID","collection","collectionSlug","id","docId","collectionConfig","collections","config","cloudStorage","update","data","imageOptimizer","status","variants","error","context","imageOptimizer_skip","output","variantsGenerated","staticDir","Error","fileBuffer","perCollectionConfig","formatsToGenerate","replaceOriginal","formats","length","slice","safeFilename","basename","filename","format","result","quality","variantFilename","parse","name","writeFile","join","buffer","push","filesize","size","width","height","mimeType","url","err","errorMessage","message","String","updateErr","logger"],"mappings":"AAAA,OAAOA,QAAQ,cAAa;AAC5B,OAAOC,UAAU,OAAM;AAKvB,SAASC,uBAAuB,QAAQ,iBAAgB;AACxD,SAASC,aAAa,QAAQ,yBAAwB;AACtD,SAASC,gBAAgB,QAAQ,mCAAkC;AACnE,SAASC,eAAe,EAAEC,cAAc,QAAQ,0BAAyB;AAEzE,OAAO,MAAMC,8BAA8B,CAACC;IAC1C,OAAO,OAAO,EAAEC,KAAK,EAAEC,GAAG,EAAkE;QAC1F,IAAI;YACF,MAAMC,MAAM,MAAMD,IAAIE,OAAO,CAACC,QAAQ,CAAC;gBACrCC,YAAYL,MAAMM,cAAc;gBAChCC,IAAIP,MAAMQ,KAAK;YACjB;YAEA,MAAMC,mBAAmBR,IAAIE,OAAO,CAACO,WAAW,CAACV,MAAMM,cAAc,CAAyC,CAACK,MAAM;YACrH,MAAMC,eAAef,eAAeY;YAEpC,iFAAiF;YACjF,6EAA6E;YAC7E,IAAIG,cAAc;gBAChB,MAAMX,IAAIE,OAAO,CAACU,MAAM,CAAC;oBACvBR,YAAYL,MAAMM,cAAc;oBAChCC,IAAIP,MAAMQ,KAAK;oBACfM,MAAM;wBACJC,gBAAgB;4BACd,GAAGb,IAAIa,cAAc;4BACrBC,QAAQ;4BACRC,UAAU,EAAE;4BACZC,OAAO;wBACT;oBACF;oBACAC,SAAS;wBAAEC,qBAAqB;oBAAK;gBACvC;gBACA,OAAO;oBAAEC,QAAQ;wBAAEC,mBAAmB;oBAAE;gBAAE;YAC5C;YAEA,MAAMC,YAAY5B,iBAAiBc;YACnC,IAAI,CAACc,WAAW;gBACd,MAAM,IAAIC,MAAM,CAAC,wCAAwC,EAAExB,MAAMM,cAAc,CAAC,CAAC,CAAC;YACpF;YAEA,MAAMmB,aAAa,MAAM7B,gBAAgBM,KAAKO;YAE9C,MAAMQ,WAQD,EAAE;YAEP,MAAMS,sBAAsBjC,wBAAwBM,gBAAgBC,MAAMM,cAAc;YAExF,+EAA+E;YAC/E,gEAAgE;YAChE,MAAMqB,oBAAoBD,oBAAoBE,eAAe,IAAIF,oBAAoBG,OAAO,CAACC,MAAM,GAAG,IAClGJ,oBAAoBG,OAAO,CAACE,KAAK,CAAC,KAClCL,oBAAoBG,OAAO;YAE/B,MAAMG,eAAexC,KAAKyC,QAAQ,CAAC/B,IAAIgC,QAAQ;YAE/C,KAAK,MAAMC,UAAUR,kBAAmB;gBACtC,MAAMS,SAAS,MAAM1C,cAAc+B,YAAYU,OAAOA,MAAM,EAAEA,OAAOE,OAAO;gBAC5E,MAAMC,kBAAkB,GAAG9C,KAAK+C,KAAK,CAACP,cAAcQ,IAAI,CAAC,WAAW,EAAEL,OAAOA,MAAM,EAAE;gBAErF,MAAM5C,GAAGkD,SAAS,CAACjD,KAAKkD,IAAI,CAACnB,WAAWe,kBAAkBF,OAAOO,MAAM;gBAEvE1B,SAAS2B,IAAI,CAAC;oBACZT,QAAQA,OAAOA,MAAM;oBACrBD,UAAUI;oBACVO,UAAUT,OAAOU,IAAI;oBACrBC,OAAOX,OAAOW,KAAK;oBACnBC,QAAQZ,OAAOY,MAAM;oBACrBC,UAAUb,OAAOa,QAAQ;oBACzBC,KAAK,CAAC,KAAK,EAAElD,MAAMM,cAAc,CAAC,MAAM,EAAEgC,iBAAiB;gBAC7D;YACF;YAEA,MAAMrC,IAAIE,OAAO,CAACU,MAAM,CAAC;gBACvBR,YAAYL,MAAMM,cAAc;gBAChCC,IAAIP,MAAMQ,KAAK;gBACfM,MAAM;oBACJC,gBAAgB;wBACd,GAAGb,IAAIa,cAAc;wBACrBC,QAAQ;wBACRC;wBACAC,OAAO;oBACT;gBACF;gBACAC,SAAS;oBAAEC,qBAAqB;gBAAK;YACvC;YAEA,OAAO;gBAAEC,QAAQ;oBAAEC,mBAAmBL,SAASa,MAAM;gBAAC;YAAE;QAC1D,EAAE,OAAOqB,KAAK;YACZ,MAAMC,eAAeD,eAAe3B,QAAQ2B,IAAIE,OAAO,GAAGC,OAAOH;YAEjE,IAAI;gBACF,MAAMlD,IAAIE,OAAO,CAACU,MAAM,CAAC;oBACvBR,YAAYL,MAAMM,cAAc;oBAChCC,IAAIP,MAAMQ,KAAK;oBACfM,MAAM;wBACJC,gBAAgB;4BACdC,QAAQ;4BACRE,OAAOkC;wBACT;oBACF;oBACAjC,SAAS;wBAAEC,qBAAqB;oBAAK;gBACvC;YACF,EAAE,OAAOmC,WAAW;gBAClBtD,IAAIE,OAAO,CAACqD,MAAM,CAACtC,KAAK,CACtB;oBAAEiC,KAAKI;oBAAW/C,OAAOR,MAAMQ,KAAK;oBAAEF,gBAAgBN,MAAMM,cAAc;gBAAC,GAC3E;YAEJ;YAEA,MAAM6C;QACR;IACF;AACF,EAAC"}
1
+ {"version":3,"sources":["../../src/tasks/convertFormats.ts"],"sourcesContent":["import fs from 'fs/promises'\nimport path from 'path'\n\nimport type { CollectionSlug } from 'payload'\n\nimport type { ResolvedImageOptimizerConfig } from '../types.js'\nimport { resolveCollectionConfig } from '../defaults.js'\nimport { convertFormat, generateThumbHash } from '../processing/index.js'\nimport { resolveStaticDir } from '../utilities/resolveStaticDir.js'\nimport { fetchFileBuffer, isCloudStorage } from '../utilities/storage.js'\n\nexport const createConvertFormatsHandler = (resolvedConfig: ResolvedImageOptimizerConfig) => {\n return async ({ input, req }: { input: { collectionSlug: string; docId: string }; req: any }) => {\n try {\n const doc = await req.payload.findByID({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n })\n\n const collectionConfig = req.payload.collections[input.collectionSlug as keyof typeof req.payload.collections].config\n const cloudStorage = isCloudStorage(collectionConfig)\n\n // Cloud storage: variant files cannot be uploaded without direct adapter access.\n // Mark as complete — CDN-level image optimization handles format conversion.\n if (cloudStorage) {\n await req.payload.update({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n data: {\n imageOptimizer: {\n ...doc.imageOptimizer,\n status: 'complete',\n variants: [],\n error: null,\n },\n },\n context: { imageOptimizer_skip: true },\n })\n return { output: { variantsGenerated: 0 } }\n }\n\n const staticDir = resolveStaticDir(collectionConfig)\n if (!staticDir) {\n throw new Error(`No staticDir configured for collection \"${input.collectionSlug}\"`)\n }\n\n const fileBuffer = await fetchFileBuffer(doc, collectionConfig)\n\n const variants: Array<{\n filename: string\n filesize: number\n format: string\n height: number\n mimeType: string\n url: string\n width: number\n }> = []\n\n const perCollectionConfig = resolveCollectionConfig(resolvedConfig, input.collectionSlug)\n\n // When replaceOriginal is on, the main file is already in the primary format —\n // skip it and only generate variants for the remaining formats.\n const formatsToGenerate = perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0\n ? perCollectionConfig.formats.slice(1)\n : perCollectionConfig.formats\n\n const safeFilename = path.basename(doc.filename)\n\n const variantResults = await Promise.all(\n formatsToGenerate.map(async (format) => {\n const result = await convertFormat(fileBuffer, format.format, format.quality)\n const variantFilename = `${path.parse(safeFilename).name}-optimized.${format.format}`\n await fs.writeFile(path.join(staticDir, variantFilename), result.buffer)\n return {\n format: format.format,\n filename: variantFilename,\n filesize: result.size,\n width: result.width,\n height: result.height,\n mimeType: result.mimeType,\n url: `/api/${input.collectionSlug}/file/${variantFilename}`,\n }\n }),\n )\n variants.push(...variantResults)\n\n // Compute ThumbHash in the background job to avoid blocking the sync save path\n let thumbHash: string | undefined\n if (resolvedConfig.generateThumbHash) {\n thumbHash = await generateThumbHash(fileBuffer)\n }\n\n await req.payload.update({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n data: {\n imageOptimizer: {\n ...doc.imageOptimizer,\n status: 'complete',\n variants,\n thumbHash,\n error: null,\n },\n },\n context: { imageOptimizer_skip: true },\n })\n\n return { output: { variantsGenerated: variants.length } }\n } catch (err) {\n const errorMessage = err instanceof Error ? err.message : String(err)\n\n try {\n await req.payload.update({\n collection: input.collectionSlug as CollectionSlug,\n id: input.docId,\n data: {\n imageOptimizer: {\n status: 'error',\n error: errorMessage,\n },\n },\n context: { imageOptimizer_skip: true },\n })\n } catch (updateErr) {\n req.payload.logger.error(\n { err: updateErr, docId: input.docId, collectionSlug: input.collectionSlug },\n 'Failed to persist error status for image optimizer',\n )\n }\n\n throw err\n }\n }\n}\n"],"names":["fs","path","resolveCollectionConfig","convertFormat","generateThumbHash","resolveStaticDir","fetchFileBuffer","isCloudStorage","createConvertFormatsHandler","resolvedConfig","input","req","doc","payload","findByID","collection","collectionSlug","id","docId","collectionConfig","collections","config","cloudStorage","update","data","imageOptimizer","status","variants","error","context","imageOptimizer_skip","output","variantsGenerated","staticDir","Error","fileBuffer","perCollectionConfig","formatsToGenerate","replaceOriginal","formats","length","slice","safeFilename","basename","filename","variantResults","Promise","all","map","format","result","quality","variantFilename","parse","name","writeFile","join","buffer","filesize","size","width","height","mimeType","url","push","thumbHash","err","errorMessage","message","String","updateErr","logger"],"mappings":"AAAA,OAAOA,QAAQ,cAAa;AAC5B,OAAOC,UAAU,OAAM;AAKvB,SAASC,uBAAuB,QAAQ,iBAAgB;AACxD,SAASC,aAAa,EAAEC,iBAAiB,QAAQ,yBAAwB;AACzE,SAASC,gBAAgB,QAAQ,mCAAkC;AACnE,SAASC,eAAe,EAAEC,cAAc,QAAQ,0BAAyB;AAEzE,OAAO,MAAMC,8BAA8B,CAACC;IAC1C,OAAO,OAAO,EAAEC,KAAK,EAAEC,GAAG,EAAkE;QAC1F,IAAI;YACF,MAAMC,MAAM,MAAMD,IAAIE,OAAO,CAACC,QAAQ,CAAC;gBACrCC,YAAYL,MAAMM,cAAc;gBAChCC,IAAIP,MAAMQ,KAAK;YACjB;YAEA,MAAMC,mBAAmBR,IAAIE,OAAO,CAACO,WAAW,CAACV,MAAMM,cAAc,CAAyC,CAACK,MAAM;YACrH,MAAMC,eAAef,eAAeY;YAEpC,iFAAiF;YACjF,6EAA6E;YAC7E,IAAIG,cAAc;gBAChB,MAAMX,IAAIE,OAAO,CAACU,MAAM,CAAC;oBACvBR,YAAYL,MAAMM,cAAc;oBAChCC,IAAIP,MAAMQ,KAAK;oBACfM,MAAM;wBACJC,gBAAgB;4BACd,GAAGb,IAAIa,cAAc;4BACrBC,QAAQ;4BACRC,UAAU,EAAE;4BACZC,OAAO;wBACT;oBACF;oBACAC,SAAS;wBAAEC,qBAAqB;oBAAK;gBACvC;gBACA,OAAO;oBAAEC,QAAQ;wBAAEC,mBAAmB;oBAAE;gBAAE;YAC5C;YAEA,MAAMC,YAAY5B,iBAAiBc;YACnC,IAAI,CAACc,WAAW;gBACd,MAAM,IAAIC,MAAM,CAAC,wCAAwC,EAAExB,MAAMM,cAAc,CAAC,CAAC,CAAC;YACpF;YAEA,MAAMmB,aAAa,MAAM7B,gBAAgBM,KAAKO;YAE9C,MAAMQ,WAQD,EAAE;YAEP,MAAMS,sBAAsBlC,wBAAwBO,gBAAgBC,MAAMM,cAAc;YAExF,+EAA+E;YAC/E,gEAAgE;YAChE,MAAMqB,oBAAoBD,oBAAoBE,eAAe,IAAIF,oBAAoBG,OAAO,CAACC,MAAM,GAAG,IAClGJ,oBAAoBG,OAAO,CAACE,KAAK,CAAC,KAClCL,oBAAoBG,OAAO;YAE/B,MAAMG,eAAezC,KAAK0C,QAAQ,CAAC/B,IAAIgC,QAAQ;YAE/C,MAAMC,iBAAiB,MAAMC,QAAQC,GAAG,CACtCV,kBAAkBW,GAAG,CAAC,OAAOC;gBAC3B,MAAMC,SAAS,MAAM/C,cAAcgC,YAAYc,OAAOA,MAAM,EAAEA,OAAOE,OAAO;gBAC5E,MAAMC,kBAAkB,GAAGnD,KAAKoD,KAAK,CAACX,cAAcY,IAAI,CAAC,WAAW,EAAEL,OAAOA,MAAM,EAAE;gBACrF,MAAMjD,GAAGuD,SAAS,CAACtD,KAAKuD,IAAI,CAACvB,WAAWmB,kBAAkBF,OAAOO,MAAM;gBACvE,OAAO;oBACLR,QAAQA,OAAOA,MAAM;oBACrBL,UAAUQ;oBACVM,UAAUR,OAAOS,IAAI;oBACrBC,OAAOV,OAAOU,KAAK;oBACnBC,QAAQX,OAAOW,MAAM;oBACrBC,UAAUZ,OAAOY,QAAQ;oBACzBC,KAAK,CAAC,KAAK,EAAErD,MAAMM,cAAc,CAAC,MAAM,EAAEoC,iBAAiB;gBAC7D;YACF;YAEFzB,SAASqC,IAAI,IAAInB;YAEjB,+EAA+E;YAC/E,IAAIoB;YACJ,IAAIxD,eAAeL,iBAAiB,EAAE;gBACpC6D,YAAY,MAAM7D,kBAAkB+B;YACtC;YAEA,MAAMxB,IAAIE,OAAO,CAACU,MAAM,CAAC;gBACvBR,YAAYL,MAAMM,cAAc;gBAChCC,IAAIP,MAAMQ,KAAK;gBACfM,MAAM;oBACJC,gBAAgB;wBACd,GAAGb,IAAIa,cAAc;wBACrBC,QAAQ;wBACRC;wBACAsC;wBACArC,OAAO;oBACT;gBACF;gBACAC,SAAS;oBAAEC,qBAAqB;gBAAK;YACvC;YAEA,OAAO;gBAAEC,QAAQ;oBAAEC,mBAAmBL,SAASa,MAAM;gBAAC;YAAE;QAC1D,EAAE,OAAO0B,KAAK;YACZ,MAAMC,eAAeD,eAAehC,QAAQgC,IAAIE,OAAO,GAAGC,OAAOH;YAEjE,IAAI;gBACF,MAAMvD,IAAIE,OAAO,CAACU,MAAM,CAAC;oBACvBR,YAAYL,MAAMM,cAAc;oBAChCC,IAAIP,MAAMQ,KAAK;oBACfM,MAAM;wBACJC,gBAAgB;4BACdC,QAAQ;4BACRE,OAAOuC;wBACT;oBACF;oBACAtC,SAAS;wBAAEC,qBAAqB;oBAAK;gBACvC;YACF,EAAE,OAAOwC,WAAW;gBAClB3D,IAAIE,OAAO,CAAC0D,MAAM,CAAC3C,KAAK,CACtB;oBAAEsC,KAAKI;oBAAWpD,OAAOR,MAAMQ,KAAK;oBAAEF,gBAAgBN,MAAMM,cAAc;gBAAC,GAC3E;YAEJ;YAEA,MAAMkD;QACR;IACF;AACF,EAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@inoo-ch/payload-image-optimizer",
3
- "version": "1.9.0",
3
+ "version": "1.10.0",
4
4
  "description": "Payload CMS plugin for automatic image optimization — WebP/AVIF conversion, resize, EXIF strip, ThumbHash placeholders, and bulk regeneration",
5
5
  "license": "MIT",
6
6
  "keywords": [
@@ -14,6 +14,10 @@ export const createAfterChangeHook = (
14
14
  return async ({ context, doc, req }) => {
15
15
  if (context?.imageOptimizer_skip) return doc
16
16
 
17
+ // Native re-uploads (focal point/crop changes): optimization was skipped in beforeChange.
18
+ // Payload's native image-size regeneration handles everything.
19
+ if (context?.imageOptimizer_nativeReupload) return doc
20
+
17
21
  // Use context flag from beforeChange instead of checking req.file.data directly.
18
22
  // Cloud storage adapters may consume req.file.data in their own afterChange hook
19
23
  // before ours runs, which would cause this guard to bail out and leave status as 'pending'.
@@ -3,7 +3,7 @@ import type { CollectionBeforeChangeHook } from 'payload'
3
3
 
4
4
  import type { ResolvedImageOptimizerConfig } from '../types.js'
5
5
  import { resolveCollectionConfig } from '../defaults.js'
6
- import { convertFormat, generateThumbHash, stripAndResize } from '../processing/index.js'
6
+ import { generateThumbHash, optimizeImage } from '../processing/index.js'
7
7
  import { isCloudStorage } from '../utilities/storage.js'
8
8
 
9
9
  export const createBeforeChangeHook = (
@@ -15,6 +15,25 @@ export const createBeforeChangeHook = (
15
15
 
16
16
  if (!req.file || !req.file.data || !req.file.mimetype?.startsWith('image/')) return data
17
17
 
18
+ // Detect re-upload triggered by Payload's shouldReupload() — focal point or crop change.
19
+ // shouldReupload re-fetches the stored (already-optimized) file and sets req.file.
20
+ // When re-fetching, Payload sets req.file.name to the stored filename verbatim
21
+ // (via getFileByPath or getExternalFile). For genuine user uploads, req.file.name
22
+ // comes from the user's filesystem and will differ from the stored filename.
23
+ // Skip redundant optimization; let Payload's native image-size regeneration handle cropping.
24
+ if (originalDoc) {
25
+ const existingFilename = (originalDoc as Record<string, unknown>).filename as string | undefined
26
+
27
+ if (existingFilename && req.file.name === existingFilename) {
28
+ const existingOptimizer = (originalDoc as Record<string, unknown>).imageOptimizer
29
+ if (existingOptimizer) {
30
+ data.imageOptimizer = existingOptimizer as typeof data.imageOptimizer
31
+ }
32
+ context.imageOptimizer_nativeReupload = true
33
+ return data
34
+ }
35
+ }
36
+
18
37
  // Apply custom filename strategy (seoFilename, uuidFilename, or user-provided).
19
38
  // The callback returns a stem (no extension) — we append the original extension here,
20
39
  // and replaceOriginal may swap it to the target format extension later.
@@ -37,30 +56,28 @@ export const createBeforeChangeHook = (
37
56
 
38
57
  const perCollectionConfig = resolveCollectionConfig(resolvedConfig, collectionSlug)
39
58
 
40
- // Process in memory: strip EXIF, resize, generate blur
41
- const processed = await stripAndResize(
42
- req.file.data,
43
- perCollectionConfig.maxDimensions,
44
- resolvedConfig.stripMetadata,
45
- )
59
+ // Single-pipeline optimization: resize + strip metadata + optional format conversion.
60
+ // Skips .rotate() Payload's generateFileData() already auto-rotated before hooks run.
61
+ const primaryFormat = perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0
62
+ ? perCollectionConfig.formats[0]
63
+ : undefined
64
+
65
+ const processed = await optimizeImage(req.file.data, {
66
+ maxDimensions: perCollectionConfig.maxDimensions,
67
+ stripMetadata: resolvedConfig.stripMetadata,
68
+ format: primaryFormat,
69
+ })
46
70
 
47
71
  let finalBuffer = processed.buffer
48
72
  let finalSize = processed.size
49
73
 
50
- if (perCollectionConfig.replaceOriginal && perCollectionConfig.formats.length > 0) {
51
- // Convert to primary format (first in the formats array)
52
- const primaryFormat = perCollectionConfig.formats[0]
53
- const converted = await convertFormat(processed.buffer, primaryFormat.format, primaryFormat.quality)
54
-
55
- finalBuffer = converted.buffer
56
- finalSize = converted.size
57
-
74
+ if (primaryFormat && processed.mimeType) {
58
75
  // Update filename and mimeType so Payload stores the correct metadata
59
76
  const originalFilename = data.filename || req.file.name || ''
60
77
  const newFilename = `${path.parse(originalFilename).name}.${primaryFormat.format}`
61
78
  context.imageOptimizer_originalFilename = originalFilename
62
79
  data.filename = newFilename
63
- data.mimeType = converted.mimeType
80
+ data.mimeType = processed.mimeType
64
81
  data.filesize = finalSize
65
82
  }
66
83
 
@@ -84,7 +101,11 @@ export const createBeforeChangeHook = (
84
101
  context.imageOptimizer_statusResolved = true
85
102
  }
86
103
 
87
- if (resolvedConfig.generateThumbHash) {
104
+ // When no async job will run, compute ThumbHash now so it's included in the
105
+ // initial DB write. This avoids a separate update() call that would fail with
106
+ // 404 on MongoDB due to transaction isolation. When a job WILL run, the
107
+ // convertFormats task computes ThumbHash in the background instead.
108
+ if (resolvedConfig.generateThumbHash && !needsAsyncJob) {
88
109
  data.imageOptimizer.thumbHash = await generateThumbHash(finalBuffer)
89
110
  }
90
111
 
@@ -38,6 +38,45 @@ export async function generateThumbHash(buffer: Buffer): Promise<string> {
38
38
  return Buffer.from(thumbHash).toString('base64')
39
39
  }
40
40
 
41
+ /**
42
+ * Single-pipeline image optimization: resize + metadata strip + optional format conversion.
43
+ * Skips .rotate() because Payload's generateFileData() already auto-rotates before hooks run.
44
+ */
45
+ export async function optimizeImage(
46
+ buffer: Buffer,
47
+ options: {
48
+ maxDimensions: { width: number; height: number }
49
+ stripMetadata: boolean
50
+ format?: { format: 'webp' | 'avif'; quality: number }
51
+ },
52
+ ): Promise<{ buffer: Buffer; width: number; height: number; size: number; mimeType?: string }> {
53
+ let pipeline = sharp(buffer)
54
+ .resize(options.maxDimensions.width, options.maxDimensions.height, {
55
+ fit: 'inside',
56
+ withoutEnlargement: true,
57
+ })
58
+
59
+ if (!options.stripMetadata) {
60
+ pipeline = pipeline.keepMetadata()
61
+ }
62
+
63
+ if (options.format) {
64
+ pipeline = pipeline.toFormat(options.format.format, { quality: options.format.quality })
65
+ }
66
+
67
+ const { data, info } = await pipeline.toBuffer({ resolveWithObject: true })
68
+
69
+ return {
70
+ buffer: data,
71
+ width: info.width,
72
+ height: info.height,
73
+ size: info.size,
74
+ ...(options.format && {
75
+ mimeType: options.format.format === 'webp' ? 'image/webp' : 'image/avif',
76
+ }),
77
+ }
78
+ }
79
+
41
80
  export async function convertFormat(
42
81
  buffer: Buffer,
43
82
  format: 'webp' | 'avif',
@@ -5,7 +5,7 @@ import type { CollectionSlug } from 'payload'
5
5
 
6
6
  import type { ResolvedImageOptimizerConfig } from '../types.js'
7
7
  import { resolveCollectionConfig } from '../defaults.js'
8
- import { convertFormat } from '../processing/index.js'
8
+ import { convertFormat, generateThumbHash } from '../processing/index.js'
9
9
  import { resolveStaticDir } from '../utilities/resolveStaticDir.js'
10
10
  import { fetchFileBuffer, isCloudStorage } from '../utilities/storage.js'
11
11
 
@@ -66,21 +66,28 @@ export const createConvertFormatsHandler = (resolvedConfig: ResolvedImageOptimiz
66
66
 
67
67
  const safeFilename = path.basename(doc.filename)
68
68
 
69
- for (const format of formatsToGenerate) {
70
- const result = await convertFormat(fileBuffer, format.format, format.quality)
71
- const variantFilename = `${path.parse(safeFilename).name}-optimized.${format.format}`
72
-
73
- await fs.writeFile(path.join(staticDir, variantFilename), result.buffer)
74
-
75
- variants.push({
76
- format: format.format,
77
- filename: variantFilename,
78
- filesize: result.size,
79
- width: result.width,
80
- height: result.height,
81
- mimeType: result.mimeType,
82
- url: `/api/${input.collectionSlug}/file/${variantFilename}`,
83
- })
69
+ const variantResults = await Promise.all(
70
+ formatsToGenerate.map(async (format) => {
71
+ const result = await convertFormat(fileBuffer, format.format, format.quality)
72
+ const variantFilename = `${path.parse(safeFilename).name}-optimized.${format.format}`
73
+ await fs.writeFile(path.join(staticDir, variantFilename), result.buffer)
74
+ return {
75
+ format: format.format,
76
+ filename: variantFilename,
77
+ filesize: result.size,
78
+ width: result.width,
79
+ height: result.height,
80
+ mimeType: result.mimeType,
81
+ url: `/api/${input.collectionSlug}/file/${variantFilename}`,
82
+ }
83
+ }),
84
+ )
85
+ variants.push(...variantResults)
86
+
87
+ // Compute ThumbHash in the background job to avoid blocking the sync save path
88
+ let thumbHash: string | undefined
89
+ if (resolvedConfig.generateThumbHash) {
90
+ thumbHash = await generateThumbHash(fileBuffer)
84
91
  }
85
92
 
86
93
  await req.payload.update({
@@ -91,6 +98,7 @@ export const createConvertFormatsHandler = (resolvedConfig: ResolvedImageOptimiz
91
98
  ...doc.imageOptimizer,
92
99
  status: 'complete',
93
100
  variants,
101
+ thumbHash,
94
102
  error: null,
95
103
  },
96
104
  },