simdra 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1751 @@
1
+ // =============================================================================
2
+ // microsharp — sharp-shaped fluent image-processing surface
3
+ // =============================================================================
4
+ //
5
+ // Second binding on top of the same Zig core (`zig/simdra/`) that backs
6
+ // the Canvas2D binding in `src/index.ts`. The two are independent: this
7
+ // file does not reach into Canvas2D state — it talks to `SmBitmap` directly.
8
+ //
9
+ // ## API
10
+ //
11
+ // import { microsharp } from 'simdra';
12
+ //
13
+ // const out = await microsharp(input)
14
+ // .resize(800, 600, { fit: 'cover', kernel: 'lanczos3' })
15
+ // .jpeg(0.9)
16
+ // .toBuffer();
17
+ //
18
+ // `input` is a Web-standard byte source: `Uint8Array`, `ArrayBuffer`,
19
+ // `Blob`, `ReadableStream<Uint8Array>`, or `Response`. The Workers idiom
20
+ // `microsharp(req.body).jpeg(0.8).toBuffer()` works directly.
21
+ //
22
+ // ## v0 scope
23
+ //
24
+ // - decode → encode round-trip works (PNG / JPEG / BMP / GIF first frame
25
+ // in; PNG / JPEG / BMP / raw RGBA out)
26
+ // - `metadata()` reads the **header only** via stb_image's
27
+ // `stbi_info_from_memory` + `stbi_is_16_bit_from_memory` — no pixel
28
+ // decode, no allocation — and surfaces only fields stb's public API
29
+ // exposes (format / width / height / source channel count / bits /
30
+ // size + a derived `hasAlpha`). The libvips-only fields sharp returns
31
+ // (ICC, EXIF, density, orientation, pages, isProgressive, …) are not
32
+ // populated.
33
+ // - format options are scoped to what stb_image_write actually supports:
34
+ // PNG → `compressionLevel` (0–9, mutex-guarded around stb's process
35
+ // global)
36
+ // JPEG → `quality` (HTML5 0.0–1.0 → stb's 1–100 internally)
37
+ // BMP → no options (stb writes 32-bit V4 with alpha mask)
38
+ // raw → no options (defensive copy of decoded RGBA pixels)
39
+ // WebP / AVIF / GIF / JP2 / TIFF / HEIF / JXL are NOT supported because
40
+ // stb_image_write doesn't encode them. Sharp's metadata methods
41
+ // (`keepExif`, `withExif`, `keepIccProfile`, `withIccProfile`,
42
+ // `keepXmp`, `withXmp`, `keepMetadata`, `withMetadata`) are NOT
43
+ // supported because stb_image doesn't read or write EXIF/ICC/XMP/IPTC.
44
+ // `tile()`, `toFile()`, `timeout()` are also out of scope (libvips DZI,
45
+ // Node-only fs, no cancellable underlying work).
46
+ // - `resize` / `extend` / `extract` / `trim` / `composite` are
47
+ // implemented on top of bitmap-direct primitives in
48
+ // `zig/simdra/effects/`:
49
+ // * `SmResampler.zig` — eight separable filter kernels: nearest,
50
+ // linear, cubic (Catmull-Rom), mitchell, lanczos2, lanczos3
51
+ // (sharp's default), mks2013, mks2021 (Costella's Magic
52
+ // Kernel Sharp variants). Bit-level fidelity to libvips's MKS
53
+ // is not guaranteed; the kernel shape is faithful.
54
+ // * `SmTrim.zig` — bbox scan for `.trim()` and content-aware
55
+ // entropy / attention crop strategies (sharp's
56
+ // `position: 'entropy' | 'attention'`). `attention` does NOT
57
+ // apply sharp's skin-tone bias — it's a saliency proxy from
58
+ // local-luma gradient + saturation magnitude.
59
+ // * `SmComposite.zig` — overlay one bitmap onto another with a
60
+ // blend mode (27 kernels: Porter-Duff + W3C separable +
61
+ // non-separable). Sharp's libvips/cairo blend names are
62
+ // mapped to simdra's enum at the JS layer; `clear` and
63
+ // `saturate` throw `RangeError` (no equivalent kernel).
64
+ // - `rotate` / `flip` / `flop` / `flatten` / colour ops / channel
65
+ // ops are NOT implemented yet.
66
+ //
67
+ // ## Memory
68
+ //
69
+ // Each terminal (`toBuffer` / `metadata`) decodes a fresh `SmBitmap`,
70
+ // runs the recorded ops, encodes, and frees the bitmap explicitly via
71
+ // `SmBitmap.release` — no FinalizationRegistry needed for these
72
+ // short-lived intermediates. Returned `Uint8Array`s are JS-owned
73
+ // defensive copies (safe to retain past the next call into Zig).
74
+ //
75
+ // Stream / Blob / Response inputs are materialized **once** on first
76
+ // terminal call and cached on the pipeline, so `.toBuffer()` followed by
77
+ // `.metadata()` works on a `ReadableStream`-backed pipeline (a stream
78
+ // would otherwise be locked after the first read).
79
+ import { SmBitmap, parseCssColor } from '../zig.js';
80
+ const AFFINE_INTERP_MAP = {
81
+ 'nearest': 'nearest',
82
+ 'bilinear': 'bilinear',
83
+ 'bicubic': 'bilinear',
84
+ 'nohalo': 'bilinear',
85
+ 'lbb': 'bilinear',
86
+ 'vsqbs': 'bilinear',
87
+ };
88
+ function resolveAffineInterpolator(name) {
89
+ if (name === undefined)
90
+ return 'bilinear';
91
+ const norm = String(name).toLowerCase();
92
+ const k = AFFINE_INTERP_MAP[norm];
93
+ if (k === undefined) {
94
+ throw new RangeError(`microsharp: affine interpolator '${name}' not supported; ` +
95
+ `expected one of nearest, bilinear, bicubic, nohalo, lbb, vsqbs`);
96
+ }
97
+ return k;
98
+ }
99
+ /** libvips colourspace vocabulary (`VipsInterpretation`) sharp accepts on
100
+ * `pipelineColourspace` / `toColourspace`. simdra is fixed at RGBA8 sRGB,
101
+ * so the only values that change pixel output are `b-w` / `grey16`
102
+ * (treated as greyscale); the rest are accepted-but-no-op passthroughs.
103
+ * Unrecognised strings throw `RangeError`. */
104
+ const COLOURSPACES = new Set([
105
+ 'multiband', 'b-w', 'histogram', 'xyz', 'lab', 'cmyk', 'labq', 'rgb',
106
+ 'cmc', 'lch', 'labs', 'srgb', 'yxy', 'fourier', 'rgb16', 'grey16',
107
+ 'matrix', 'scrgb', 'hsv', 'last',
108
+ ]);
109
+ /** Subset that maps to a greyscale step in our 8-bit pipeline. */
110
+ const GREY_COLOURSPACES = new Set(['b-w', 'grey16']);
111
+ const BLUR_PRECISIONS = new Set(['integer', 'float', 'approximate']);
112
+ function resolveBlurPrecision(p) {
113
+ const norm = String(p).toLowerCase();
114
+ if (!BLUR_PRECISIONS.has(norm)) {
115
+ throw new RangeError(`microsharp: blur({ precision: '${p}' }) — expected 'integer' | 'float' | 'approximate'`);
116
+ }
117
+ return norm;
118
+ }
119
+ function normaliseColourspace(cs, label) {
120
+ if (cs === undefined)
121
+ return undefined;
122
+ const norm = String(cs).toLowerCase().trim();
123
+ if (!COLOURSPACES.has(norm)) {
124
+ throw new RangeError(`microsharp: ${label}('${cs}') — not a recognised libvips colourspace; ` +
125
+ `expected one of ${[...COLOURSPACES].join(', ')}`);
126
+ }
127
+ return norm;
128
+ }
129
+ export class MicroSharpPipeline {
130
+ input;
131
+ materialized = null;
132
+ outputFormat = 'png';
133
+ jpegQuality = 92;
134
+ pngCompressionLevel = null;
135
+ ops = [];
136
+ /** Recorded `pipelineColourspace`. `'b-w'`/`'grey16'` injects a leading
137
+ * greyscale at apply time; other recognised values are no-ops on our
138
+ * RGBA8 sRGB pipeline (documented in COMPATIBILITY.md). */
139
+ pipelineColourspaceSetting;
140
+ /** Recorded `toColourspace`. Same semantics as the pipeline knob, but
141
+ * the greyscale (when triggered) runs *after* all queued ops. */
142
+ toColourspaceSetting;
143
+ constructor(input) {
144
+ this.input = input;
145
+ }
146
+ /**
147
+ * Sharp-shaped resize. Three call forms:
148
+ * .resize(width, height, opts?)
149
+ * .resize(width, opts?) — auto-scales height from source aspect
150
+ * .resize({ width, height, ...opts })
151
+ *
152
+ * Per sharp: only one resize op survives per pipeline; subsequent
153
+ * `.resize()` calls replace the recorded op rather than appending.
154
+ */
155
+ resize(widthOrOpts, heightOrOpts, opts) {
156
+ let width;
157
+ let height;
158
+ let resolved;
159
+ if (widthOrOpts != null && typeof widthOrOpts === 'object') {
160
+ width = widthOrOpts.width;
161
+ height = widthOrOpts.height;
162
+ resolved = widthOrOpts;
163
+ }
164
+ else {
165
+ width = widthOrOpts ?? undefined;
166
+ if (typeof heightOrOpts === 'number') {
167
+ height = heightOrOpts;
168
+ resolved = opts;
169
+ }
170
+ else if (heightOrOpts != null && typeof heightOrOpts === 'object') {
171
+ resolved = heightOrOpts;
172
+ }
173
+ }
174
+ // Replace any previously-recorded resize. Sharp behaves the same.
175
+ const idx = this.ops.findIndex((o) => o.kind === 'resize');
176
+ const op = { kind: 'resize', width, height, opts: resolved };
177
+ if (idx >= 0)
178
+ this.ops[idx] = op;
179
+ else
180
+ this.ops.push(op);
181
+ return this;
182
+ }
183
+ /** Pad / extrude the image. Sharp accepts a number (all four edges)
184
+ * or a per-edge object with optional `extendWith` and `background`. */
185
+ extend(opts) {
186
+ this.ops.push({ kind: 'extend', opts });
187
+ return this;
188
+ }
189
+ /** Crop a sub-rectangle. Validated against the *current* bitmap at
190
+ * apply time, not at queue time. */
191
+ extract(region) {
192
+ if (!Number.isInteger(region.left) || region.left < 0 ||
193
+ !Number.isInteger(region.top) || region.top < 0 ||
194
+ !Number.isInteger(region.width) || region.width < 1 ||
195
+ !Number.isInteger(region.height) || region.height < 1) {
196
+ throw new RangeError('microsharp: extract() requires non-negative integer left/top and positive integer width/height');
197
+ }
198
+ this.ops.push({ kind: 'extract', region });
199
+ return this;
200
+ }
201
+ /** Trim background-coloured edges. Default background = top-left
202
+ * pixel of the working bitmap (sharp parity); default threshold = 10. */
203
+ trim(opts) {
204
+ this.ops.push({ kind: 'trim', opts });
205
+ return this;
206
+ }
207
+ /** Composite one or more overlays onto the working bitmap. Each
208
+ * entry's `input` is materialized at apply time (so streams, blobs,
209
+ * and responses work for overlays just like they do for the
210
+ * pipeline's primary input). Overlays are drawn in array order;
211
+ * later entries can blend over earlier ones. */
212
+ composite(images) {
213
+ if (!Array.isArray(images)) {
214
+ throw new TypeError('microsharp: composite() expects an array of images');
215
+ }
216
+ this.ops.push({ kind: 'composite', images });
217
+ return this;
218
+ }
219
+ /** Strip alpha — sets α=255 on every pixel. Sharp's docs describe
220
+ * this as "the output image is a 3 channel image without an alpha
221
+ * channel"; in microsharp the buffer remains 4-channel for
222
+ * pipeline-shape invariance, but the result is visibly identical
223
+ * (all pixels fully opaque). */
224
+ removeAlpha() {
225
+ this.ops.push({ kind: 'removeAlpha' });
226
+ return this;
227
+ }
228
+ /** Sharp's `ensureAlpha([alpha])`. Microsharp bitmaps always carry
229
+ * an alpha channel, so this is a no-op without an argument. With
230
+ * an explicit `alpha` (0..1) the channel is set to that constant
231
+ * level — useful right after `removeAlpha` to set a non-opaque
232
+ * uniform alpha, or to force a known transparency level on a
233
+ * decoded source. */
234
+ ensureAlpha(alpha) {
235
+ if (alpha !== undefined) {
236
+ if (!Number.isFinite(alpha) || alpha < 0 || alpha > 1) {
237
+ throw new RangeError('microsharp: ensureAlpha(α) expects α in [0, 1]');
238
+ }
239
+ }
240
+ this.ops.push({ kind: 'ensureAlpha', alpha });
241
+ return this;
242
+ }
243
+ /** Extract a single channel as a greyscale image. `channel` accepts
244
+ * the integer index 0/1/2/3 or sharp's string names
245
+ * 'red'/'green'/'blue'/'alpha'. The result is RGB = chosen channel,
246
+ * α = 255. */
247
+ extractChannel(channel) {
248
+ const idx = resolveChannel(channel);
249
+ this.ops.push({ kind: 'extractChannel', channel: idx });
250
+ return this;
251
+ }
252
+ /** Per-pixel bitwise op across R, G, B channels — produces a
253
+ * greyscale image where each pixel is `(R op G op B)` broadcast.
254
+ * Accepts sharp's `'and'` / `'or'` / `'eor'` (libvips name for
255
+ * XOR); plain `'xor'` is also accepted. */
256
+ bandbool(op) {
257
+ const norm = op === 'xor' ? 'eor' : op;
258
+ if (norm !== 'and' && norm !== 'or' && norm !== 'eor') {
259
+ throw new RangeError(`microsharp: bandbool() expects 'and' | 'or' | 'eor' (got ${String(op)})`);
260
+ }
261
+ this.ops.push({ kind: 'bandbool', op: norm });
262
+ return this;
263
+ }
264
+ /** Sharp's `tint(colour)` — recolour using the given RGB tint while
265
+ * preserving the per-pixel luminance pattern. Alpha is unchanged
266
+ * (sharp spec). The colour can be a CSS string or
267
+ * `{ r, g, b, alpha? }` object; the alpha component is parsed for
268
+ * compatibility but ignored — the tint operation is RGB-only. */
269
+ tint(colour) {
270
+ const [r, g, b] = parseBackground(colour, [0, 0, 0, 255]);
271
+ this.ops.push({ kind: 'tint', r, g, b });
272
+ return this;
273
+ }
274
+ /** Sharp's `rotate([angle], [opts])`. With no arguments this is the
275
+ * back-compat alias for `autoOrient()`. With a finite angle it
276
+ * rotates by that many degrees clockwise, padding with `background`
277
+ * (default opaque black). Multiples of 90° are byte-exact (lossless
278
+ * index permutation); other angles sample through bilinear
279
+ * interpolation against the source-bbox AABB. Sharp parity:
280
+ * multi-page input is not supported — simdra decodes one frame. */
281
+ rotate(angle, opts) {
282
+ if (angle === undefined) {
283
+ this.ops.push({ kind: 'autoOrient' });
284
+ return this;
285
+ }
286
+ if (!Number.isFinite(angle)) {
287
+ throw new RangeError('microsharp: rotate(angle) must be a finite number');
288
+ }
289
+ // Normalise to [0, 360). Sharp does the same: -450 → 270.
290
+ let a = angle % 360;
291
+ if (a < 0)
292
+ a += 360;
293
+ const bg = parseBackground(opts?.background, [0, 0, 0, 255]);
294
+ this.ops.push({ kind: 'rotate', angle: a, bg });
295
+ return this;
296
+ }
297
+ /** Sharp's `autoOrient()` — read the EXIF Orientation tag from the
298
+ * input bytes and apply the corresponding rotation / mirror. simdra
299
+ * parses Orientation only (no full EXIF library); the tag is read
300
+ * by `SmBitmap.peekOrientation` against the materialised input
301
+ * bytes at apply time. Missing / malformed EXIF → no-op. */
302
+ autoOrient() {
303
+ this.ops.push({ kind: 'autoOrient' });
304
+ return this;
305
+ }
306
+ /** Sharp's `flip([on])` — mirror vertically (top↔bottom). */
307
+ flip(on = true) {
308
+ if (on)
309
+ this.ops.push({ kind: 'flip' });
310
+ return this;
311
+ }
312
+ /** Sharp's `flop([on])` — mirror horizontally (left↔right). */
313
+ flop(on = true) {
314
+ if (on)
315
+ this.ops.push({ kind: 'flop' });
316
+ return this;
317
+ }
318
+ /** Sharp's `affine(matrix, [opts])`. `matrix` is `[a, b, c, d]`
319
+ * (`[[a, b], [c, d]]`) — the linear part of `F(x, y) = M·(x+idx,
320
+ * y+idy) + (odx, ody)`. Output dims = forward bbox of the input
321
+ * rectangle; the gap is padded with `background` (default opaque
322
+ * black). `interpolator` accepts sharp/libvips kernel names; the
323
+ * three high-precision kernels libvips ships (`nohalo`/`lbb`/
324
+ * `vsqbs`) collapse to `bilinear` here — see COMPATIBILITY.md. */
325
+ affine(matrix, opts) {
326
+ const m = flattenAffineMatrix(matrix);
327
+ const bg = parseBackground(opts?.background, [0, 0, 0, 255]);
328
+ const interp = resolveAffineInterpolator(opts?.interpolator);
329
+ this.ops.push({
330
+ kind: 'affine',
331
+ m00: m[0], m01: m[1], m10: m[2], m11: m[3],
332
+ idx: opts?.idx ?? 0,
333
+ idy: opts?.idy ?? 0,
334
+ odx: opts?.odx ?? 0,
335
+ ody: opts?.ody ?? 0,
336
+ bg,
337
+ interp,
338
+ });
339
+ return this;
340
+ }
341
+ /** Sharp's `blur([opts])`.
342
+ * - No args / `true`: fast 3×3 box blur.
343
+ * - `false`: no-op (records nothing).
344
+ * - bare `sigma` number: Gaussian blur with the chosen sigma.
345
+ * - `{ sigma, precision, minAmplitude }`: same with explicit
346
+ * working-precision and kernel-amplitude cutoff.
347
+ * `precision` defaults to `'integer'`; `minAmplitude` to `0.2`
348
+ * (sharp's defaults). Sigma must be in [0.3, 1000]. */
349
+ blur(opts) {
350
+ if (opts === false)
351
+ return this;
352
+ if (opts === undefined || opts === true) {
353
+ this.ops.push({ kind: 'blur', precision: 'integer', minAmplitude: 0.2 });
354
+ return this;
355
+ }
356
+ let sigma;
357
+ let precision = 'integer';
358
+ let minAmplitude = 0.2;
359
+ if (typeof opts === 'number') {
360
+ sigma = opts;
361
+ }
362
+ else {
363
+ sigma = opts.sigma;
364
+ if (opts.precision !== undefined)
365
+ precision = resolveBlurPrecision(opts.precision);
366
+ if (opts.minAmplitude !== undefined) {
367
+ if (!Number.isFinite(opts.minAmplitude) || opts.minAmplitude <= 0 || opts.minAmplitude >= 1) {
368
+ throw new RangeError('microsharp: blur({ minAmplitude }) must be in (0, 1)');
369
+ }
370
+ minAmplitude = opts.minAmplitude;
371
+ }
372
+ }
373
+ if (sigma !== undefined) {
374
+ if (!Number.isFinite(sigma) || sigma < 0.3 || sigma > 1000) {
375
+ throw new RangeError('microsharp: blur(sigma) must be in [0.3, 1000]');
376
+ }
377
+ }
378
+ this.ops.push({ kind: 'blur', sigma, precision, minAmplitude });
379
+ return this;
380
+ }
381
+ /** Sharp's `sharpen([opts], [flat], [jagged])`.
382
+ * - No args: fast 3×3 unsharp kernel `[[0,-1,0],[-1,5,-1],[0,-1,0]]`.
383
+ * - `{ sigma, m1, m2, x1, y2, y3 }`: libvips USM with the flat /
384
+ * jagged piecewise gain. Per-channel in 8-bit sRGB (sharp's
385
+ * LAB-L pipeline isn't available — documented 🟡 in COMPATIBILITY).
386
+ * - Deprecated 2-positional form `sharpen(sigma, flat, jagged)`:
387
+ * surfaces with `flat = m1`, `jagged = m2`. Sharp parity. */
388
+ sharpen(opts, flat, jagged) {
389
+ let sigma;
390
+ let m1 = 1.0;
391
+ let m2 = 2.0;
392
+ let x1 = 2.0;
393
+ let y2 = 10.0;
394
+ let y3 = 20.0;
395
+ if (typeof opts === 'number') {
396
+ sigma = opts;
397
+ if (typeof flat === 'number')
398
+ m1 = flat;
399
+ if (typeof jagged === 'number')
400
+ m2 = jagged;
401
+ }
402
+ else if (opts !== undefined) {
403
+ sigma = opts.sigma;
404
+ if (opts.m1 !== undefined)
405
+ m1 = opts.m1;
406
+ if (opts.m2 !== undefined)
407
+ m2 = opts.m2;
408
+ if (opts.x1 !== undefined)
409
+ x1 = opts.x1;
410
+ if (opts.y2 !== undefined)
411
+ y2 = opts.y2;
412
+ if (opts.y3 !== undefined)
413
+ y3 = opts.y3;
414
+ }
415
+ // Sharp's published bounds: sigma ∈ [0.000001, 10] when set.
416
+ if (sigma !== undefined) {
417
+ if (!Number.isFinite(sigma) || sigma < 0.000001 || sigma > 10) {
418
+ throw new RangeError('microsharp: sharpen({ sigma }) must be in [0.000001, 10]');
419
+ }
420
+ }
421
+ for (const [name, v] of [
422
+ ['m1', m1], ['m2', m2], ['x1', x1], ['y2', y2], ['y3', y3],
423
+ ]) {
424
+ if (!Number.isFinite(v) || v < 0 || v > 1_000_000) {
425
+ throw new RangeError(`microsharp: sharpen({ ${name} }) must be in [0, 1000000]`);
426
+ }
427
+ }
428
+ this.ops.push({ kind: 'sharpen', sigma, m1, m2, x1, y2, y3 });
429
+ return this;
430
+ }
431
+ /** Sharp's `median([size])`. Square `size × size` window per RGB
432
+ * channel; α preserved. `size` defaults to 3 and must be odd. */
433
+ median(size = 3) {
434
+ if (!Number.isInteger(size) || size < 1 || (size & 1) === 0 || size > 99) {
435
+ throw new RangeError('microsharp: median(size) must be an odd integer in [1, 99]');
436
+ }
437
+ this.ops.push({ kind: 'median', size });
438
+ return this;
439
+ }
440
+ /** Sharp's `dilate([width])`. Foreground expansion by a separable
441
+ * `(2·width+1)`-square max-window. `width` defaults to 1. */
442
+ dilate(width = 1) {
443
+ if (!Number.isInteger(width) || width < 0 || width > 250) {
444
+ throw new RangeError('microsharp: dilate(width) must be an integer in [0, 250]');
445
+ }
446
+ this.ops.push({ kind: 'dilate', width });
447
+ return this;
448
+ }
449
+ /** Sharp's `erode([width])`. Foreground shrinking — same shape as
450
+ * `dilate`, opposite kernel direction. */
451
+ erode(width = 1) {
452
+ if (!Number.isInteger(width) || width < 0 || width > 250) {
453
+ throw new RangeError('microsharp: erode(width) must be an integer in [0, 250]');
454
+ }
455
+ this.ops.push({ kind: 'erode', width });
456
+ return this;
457
+ }
458
+ /** Sharp's `convolve(kernel)`. `kernel.kernel` length must equal
459
+ * `width · height`; both dims must be odd. `scale` defaults to the
460
+ * sum of kernel values; `offset` defaults to 0. Edge mode is
461
+ * clamp (libvips's default). */
462
+ convolve(spec) {
463
+ if (!Number.isInteger(spec.width) || spec.width < 1 || (spec.width & 1) === 0) {
464
+ throw new RangeError('microsharp: convolve.width must be an odd positive integer');
465
+ }
466
+ if (!Number.isInteger(spec.height) || spec.height < 1 || (spec.height & 1) === 0) {
467
+ throw new RangeError('microsharp: convolve.height must be an odd positive integer');
468
+ }
469
+ const expected = spec.width * spec.height;
470
+ if (spec.kernel.length !== expected) {
471
+ throw new RangeError(`microsharp: convolve.kernel length ${spec.kernel.length} ≠ width·height (${expected})`);
472
+ }
473
+ const k = new Float64Array(expected);
474
+ let sum = 0;
475
+ for (let i = 0; i < expected; i++) {
476
+ const v = Number(spec.kernel[i]);
477
+ if (!Number.isFinite(v)) {
478
+ throw new RangeError('microsharp: convolve.kernel entries must be finite numbers');
479
+ }
480
+ k[i] = v;
481
+ sum += v;
482
+ }
483
+ let scale;
484
+ if (spec.scale === undefined) {
485
+ // Sharp's default: kernel sum. Fallback to 1 when sum is zero
486
+ // (e.g. derivative kernels like Sobel).
487
+ scale = sum === 0 ? 1 : sum;
488
+ }
489
+ else {
490
+ if (!Number.isFinite(spec.scale) || spec.scale === 0) {
491
+ throw new RangeError('microsharp: convolve.scale must be a finite non-zero number');
492
+ }
493
+ scale = spec.scale;
494
+ }
495
+ const offset = spec.offset ?? 0;
496
+ if (!Number.isFinite(offset)) {
497
+ throw new RangeError('microsharp: convolve.offset must be a finite number');
498
+ }
499
+ this.ops.push({ kind: 'convolve', width: spec.width, height: spec.height, kernel: k, scale, offset });
500
+ return this;
501
+ }
502
+ /** Sharp's `gamma([gamma], [gammaOut])`. Applies a single LUT
503
+ * `(in/255)^(gIn/gOut)·255` per RGB channel; α preserved. Sharp's
504
+ * pre-/post-resize coupling collapses to a single pass without an
505
+ * intervening resize — documented 🟡 in COMPATIBILITY.md. Both
506
+ * values must be in [1.0, 3.0]; `gOut` defaults to `gIn`. */
507
+ gamma(g = 2.2, gOut) {
508
+ if (!Number.isFinite(g) || g < 1.0 || g > 3.0) {
509
+ throw new RangeError('microsharp: gamma(g) must be in [1.0, 3.0]');
510
+ }
511
+ const out = gOut ?? g;
512
+ if (!Number.isFinite(out) || out < 1.0 || out > 3.0) {
513
+ throw new RangeError('microsharp: gamma(_, gOut) must be in [1.0, 3.0]');
514
+ }
515
+ this.ops.push({ kind: 'gamma', gIn: g, gOut: out });
516
+ return this;
517
+ }
518
+ /** Sharp's `negate([opts])`. RGB inverted; α negated when
519
+ * `opts.alpha !== false` (sharp default `true`). */
520
+ negate(opts) {
521
+ const alpha = opts?.alpha !== false;
522
+ this.ops.push({ kind: 'negate', alpha });
523
+ return this;
524
+ }
525
+ /** Sharp's `linear([a], [b])`. Per-channel `a·C + b` with output
526
+ * clipped to [0, 255]. Both arguments accept a single number (RGB
527
+ * broadcast, alpha untouched), a length-3 array (RGB), or a
528
+ * length-4 array (RGBA). Defaults: `a=1`, `b=0` per channel. */
529
+ linear(a, b) {
530
+ const aArr = expandLinearVec(a, 1, 'a');
531
+ const bArr = expandLinearVec(b, 0, 'b');
532
+ this.ops.push({ kind: 'linear', a: aArr, b: bArr });
533
+ return this;
534
+ }
535
+ /** Sharp's `threshold([t], [opts])`. `t` defaults to 128; sharp
536
+ * accepts 0..255. With `greyscale=true` (default), Rec.601 luma is
537
+ * computed first and broadcast. */
538
+ threshold(t = 128, opts) {
539
+ if (!Number.isInteger(t) || t < 0 || t > 255) {
540
+ throw new RangeError('microsharp: threshold(t) must be an integer in [0, 255]');
541
+ }
542
+ // Sharp accepts both spellings; greyscale wins if both set.
543
+ const grey = opts?.greyscale ?? opts?.grayscale ?? true;
544
+ this.ops.push({ kind: 'threshold', t, greyscale: grey });
545
+ return this;
546
+ }
547
+ /** Sharp's `recomb(matrix)`. 3×3 (RGB only, α preserved) or 4×4
548
+ * (full RGBA) row-major colour matrix. Accepts nested form
549
+ * `[[a,b,c],[d,e,f],[g,h,i]]` or flat `[a,b,c,d,e,f,g,h,i]`. */
550
+ recomb(matrix) {
551
+ const m = flattenRecombMatrix(matrix);
552
+ this.ops.push({ kind: 'recomb', matrix: m });
553
+ return this;
554
+ }
555
+ /** Sharp's `flatten([opts])`. Alpha-blend onto an opaque background
556
+ * and drop alpha. Buffer remains 4-channel for pipeline-shape
557
+ * invariance (α=255 across the result). */
558
+ flatten(opts) {
559
+ const bg = parseBackground(opts?.background, [0, 0, 0, 255]);
560
+ this.ops.push({ kind: 'flatten', bg });
561
+ return this;
562
+ }
563
+ /** Sharp's `unflatten()`. Every pixel where `R=G=B=255` becomes
564
+ * fully transparent (α=0); other pixels are unchanged. */
565
+ unflatten() {
566
+ this.ops.push({ kind: 'unflatten' });
567
+ return this;
568
+ }
569
+ /** Sharp's `boolean(operand, operator, [opts])`. Per-pixel bitwise
570
+ * `and` / `or` / `eor` (libvips name for XOR) across all four
571
+ * RGBA bands between this bitmap and `operand`. The operand is
572
+ * materialised at apply time (encoded image bytes by default;
573
+ * `opts.raw` for pre-decoded pixels — same shape as `joinChannel`). */
574
+ boolean(operand, operator, opts) {
575
+ const op = operator === 'xor' ? 'eor' : operator;
576
+ if (op !== 'and' && op !== 'or' && op !== 'eor') {
577
+ throw new RangeError(`microsharp: boolean(_, '${operator}') — expected 'and' | 'or' | 'eor' | 'xor'`);
578
+ }
579
+ this.ops.push({ kind: 'boolean', operand, raw: opts?.raw, op });
580
+ return this;
581
+ }
582
+ /** Sharp's `normalise([opts])`. Stretch the luma percentile range
583
+ * `[lower, upper]` to `[0, 255]` and broadcast the same affine map
584
+ * to all RGB channels. α preserved. Defaults `lower: 1`, `upper: 99`. */
585
+ normalise(opts) {
586
+ const lower = opts?.lower ?? 1;
587
+ const upper = opts?.upper ?? 99;
588
+ if (!Number.isFinite(lower) || !Number.isFinite(upper) ||
589
+ lower < 0 || upper > 100 || lower >= upper) {
590
+ throw new RangeError('microsharp: normalise({ lower, upper }) requires 0 ≤ lower < upper ≤ 100');
591
+ }
592
+ this.ops.push({ kind: 'normalise', lower, upper });
593
+ return this;
594
+ }
595
+ /** Alternative spelling of `normalise()` (sharp parity). */
596
+ normalize(opts) {
597
+ return this.normalise(opts);
598
+ }
599
+ /** Sharp's `clahe({ width, height, maxSlope? })`. Tile-based local
600
+ * histogram equalisation with bilinear interpolation between tile
601
+ * centres. `maxSlope` defaults to 3 (sharp parity); 0 disables the
602
+ * contrast clip and reduces to plain AHE. */
603
+ clahe(opts) {
604
+ if (!Number.isInteger(opts.width) || opts.width < 1 || opts.width > 10000) {
605
+ throw new RangeError('microsharp: clahe.width must be a positive integer');
606
+ }
607
+ if (!Number.isInteger(opts.height) || opts.height < 1 || opts.height > 10000) {
608
+ throw new RangeError('microsharp: clahe.height must be a positive integer');
609
+ }
610
+ const maxSlope = opts.maxSlope ?? 3;
611
+ if (!Number.isFinite(maxSlope) || maxSlope < 0 || maxSlope > 100) {
612
+ throw new RangeError('microsharp: clahe.maxSlope must be in [0, 100]');
613
+ }
614
+ this.ops.push({ kind: 'clahe', width: opts.width, height: opts.height, maxSlope });
615
+ return this;
616
+ }
617
+ /** Sharp's `modulate({ brightness, saturation, hue, lightness })`.
618
+ * Applied in HSV space (sharp uses LCh; documented 🟡 in
619
+ * COMPATIBILITY.md). All four arguments are optional. α preserved. */
620
+ modulate(opts) {
621
+ const brightness = opts?.brightness ?? 1;
622
+ const saturation = opts?.saturation ?? 1;
623
+ const hue = opts?.hue ?? 0;
624
+ const lightness = opts?.lightness ?? 0;
625
+ if (!Number.isFinite(brightness) || brightness < 0) {
626
+ throw new RangeError('microsharp: modulate({ brightness }) must be ≥ 0');
627
+ }
628
+ if (!Number.isFinite(saturation) || saturation < 0) {
629
+ throw new RangeError('microsharp: modulate({ saturation }) must be ≥ 0');
630
+ }
631
+ if (!Number.isFinite(hue)) {
632
+ throw new RangeError('microsharp: modulate({ hue }) must be a finite number');
633
+ }
634
+ if (!Number.isFinite(lightness)) {
635
+ throw new RangeError('microsharp: modulate({ lightness }) must be a finite number');
636
+ }
637
+ this.ops.push({ kind: 'modulate', brightness, saturation, hue, lightness });
638
+ return this;
639
+ }
640
+ /** Sharp's `greyscale([on])`. `on` defaults to `true`; passing `false`
641
+ * records nothing (sharp parity). Computes Rec.601 luma in 8-bit
642
+ * sRGB space — for a true linear-space conversion sharp recommends
643
+ * chaining a future `gamma()` op. */
644
+ greyscale(on = true) {
645
+ if (on)
646
+ this.ops.push({ kind: 'greyscale' });
647
+ return this;
648
+ }
649
+ /** Alternative spelling of `greyscale()` (sharp parity). */
650
+ grayscale(on = true) {
651
+ return this.greyscale(on);
652
+ }
653
+ /** Sharp's `pipelineColourspace([cs])`. Records the requested input
654
+ * colourspace; `b-w` / `grey16` triggers a leading greyscale so the
655
+ * rest of the pipeline runs on luma values. Other recognised libvips
656
+ * colourspace names are accepted as no-ops because simdra has no
657
+ * 16-bit / LAB / CMYK pipeline (documented in COMPATIBILITY.md).
658
+ * Unrecognised strings throw `RangeError`. */
659
+ pipelineColourspace(cs) {
660
+ this.pipelineColourspaceSetting = normaliseColourspace(cs, 'pipelineColourspace');
661
+ return this;
662
+ }
663
+ /** Alternative spelling of `pipelineColourspace()` (sharp parity). */
664
+ pipelineColorspace(cs) {
665
+ this.pipelineColourspaceSetting = normaliseColourspace(cs, 'pipelineColorspace');
666
+ return this;
667
+ }
668
+ /** Sharp's `toColourspace([cs])`. Records the requested output
669
+ * colourspace; `b-w` / `grey16` triggers a tail greyscale (buffer
670
+ * stays 4-channel for pipeline-shape invariance). All other
671
+ * recognised libvips colourspace names are 8-bit sRGB passthrough
672
+ * no-ops. Unrecognised strings throw `RangeError`. */
673
+ toColourspace(cs) {
674
+ this.toColourspaceSetting = normaliseColourspace(cs, 'toColourspace');
675
+ return this;
676
+ }
677
+ /** Alternative spelling of `toColourspace()` (sharp parity). */
678
+ toColorspace(cs) {
679
+ this.toColourspaceSetting = normaliseColourspace(cs, 'toColorspace');
680
+ return this;
681
+ }
682
+ /** Sharp's `joinChannel(image, options?)` — replace this bitmap's
683
+ * alpha channel with Rec.601 luma of the joined image's RGB.
684
+ *
685
+ * Microsharp's always-RGBA model can't grow beyond 4 channels, so
686
+ * we handle the common case (single mask image → new alpha) rather
687
+ * than libvips's full N-band append. Multi-image arrays and
688
+ * multi-band joins beyond RGBA aren't supported: pass a single
689
+ * Buffer/typed-array, or a `{ raw: { width, height, channels: 1|3|4 } }`
690
+ * options descriptor for pre-decoded mask pixels.
691
+ *
692
+ * Greyscale (1-channel) and grey+alpha (1- or 4-channel with
693
+ * R=G=B) masks round-trip exactly: luma collapses to R. RGB masks
694
+ * are converted via `0.299·R + 0.587·G + 0.114·B`. */
695
+ joinChannel(image, options) {
696
+ if (Array.isArray(image)) {
697
+ if (image.length !== 1) {
698
+ throw new RangeError('microsharp: joinChannel() accepts a single image (libvips multi-band join not supported in the always-RGBA model)');
699
+ }
700
+ image = image[0];
701
+ }
702
+ this.ops.push({ kind: 'joinChannel', image, raw: options?.raw });
703
+ return this;
704
+ }
705
+ png(opts) {
706
+ this.outputFormat = 'png';
707
+ if (opts?.compressionLevel !== undefined) {
708
+ const lvl = opts.compressionLevel;
709
+ if (!Number.isInteger(lvl) || lvl < 0 || lvl > 9) {
710
+ throw new RangeError('microsharp: png({ compressionLevel }) must be an integer in [0, 9]');
711
+ }
712
+ this.pngCompressionLevel = lvl;
713
+ }
714
+ else {
715
+ this.pngCompressionLevel = null;
716
+ }
717
+ return this;
718
+ }
719
+ /** `quality` is the HTML5 0.0–1.0 range; default 0.92 (Chromium default).
720
+ * Accepts either a bare number or sharp's `{ quality }` object form. */
721
+ jpeg(opts) {
722
+ this.outputFormat = 'jpeg';
723
+ const quality = typeof opts === 'number' ? opts : opts?.quality;
724
+ if (quality !== undefined) {
725
+ const f = Number.isFinite(quality) && quality >= 0 && quality <= 1 ? quality : 0.92;
726
+ this.jpegQuality = Math.max(1, Math.min(100, Math.round(f * 100)));
727
+ }
728
+ return this;
729
+ }
730
+ /** 32-bit BMP V4 (stb's `comp=4` path; preserves alpha via BI_BITFIELDS). */
731
+ bmp() {
732
+ this.outputFormat = 'bmp';
733
+ return this;
734
+ }
735
+ /** Raw RGBA pixel data. Channel ordering is RGBA, top-to-bottom, no
736
+ * padding (forced 4-channel by the stb_image decode path). */
737
+ raw() {
738
+ this.outputFormat = 'raw';
739
+ return this;
740
+ }
741
+ /** Force output format. Sharp accepts an object with an `id` attribute
742
+ * for libvips-specific format options; we accept the string only. */
743
+ toFormat(format) {
744
+ if (format !== 'png' && format !== 'jpeg' && format !== 'bmp' && format !== 'raw') {
745
+ throw new RangeError(`microsharp: toFormat() expects 'png' | 'jpeg' | 'bmp' | 'raw' (got ${String(format)})`);
746
+ }
747
+ this.outputFormat = format;
748
+ return this;
749
+ }
750
+ async toBuffer(opts) {
751
+ const bytes = await this.getInput();
752
+ let bitmap = SmBitmap.decode(bytes);
753
+ try {
754
+ bitmap = await this.applyOps(bitmap);
755
+ const data = this.encodeBitmap(bitmap);
756
+ if (opts?.resolveWithObject) {
757
+ const info = {
758
+ format: this.outputFormat,
759
+ size: data.byteLength,
760
+ width: bitmap.width,
761
+ height: bitmap.height,
762
+ channels: outputChannelsFor(this.outputFormat),
763
+ };
764
+ return { data, info };
765
+ }
766
+ return data;
767
+ }
768
+ finally {
769
+ SmBitmap.release(bitmap);
770
+ }
771
+ }
772
+ encodeBitmap(bitmap) {
773
+ switch (this.outputFormat) {
774
+ case 'png':
775
+ return zigBytesToCopy(this.pngCompressionLevel === null
776
+ ? bitmap.encodePng()
777
+ : bitmap.encodePngWithLevel(this.pngCompressionLevel));
778
+ case 'jpeg':
779
+ return zigBytesToCopy(bitmap.encodeJpeg(this.jpegQuality));
780
+ case 'bmp':
781
+ return zigBytesToCopy(bitmap.encodeBmp());
782
+ case 'raw':
783
+ return bitmapPixelsToCopy(bitmap);
784
+ }
785
+ }
786
+ async metadata() {
787
+ const bytes = await this.getInput();
788
+ const info = SmBitmap.peekInfo(bytes);
789
+ const channels = info.channels;
790
+ return {
791
+ format: detectFormat(bytes),
792
+ width: info.width,
793
+ height: info.height,
794
+ channels,
795
+ hasAlpha: channels === 2 || channels === 4,
796
+ bitsPerSample: info.bits_per_sample,
797
+ size: bytes.byteLength,
798
+ };
799
+ }
800
+ getInput() {
801
+ if (!this.materialized)
802
+ this.materialized = readToUint8Array(this.input);
803
+ return this.materialized;
804
+ }
805
+ /** Run each queued op in order. Each op consumes the previous bitmap
806
+ * and returns a fresh one; the consumed bitmap is released
807
+ * immediately to keep peak memory bounded. Async because
808
+ * `composite` may need to await overlay materialization (decode
809
+ * bytes, drain a stream, etc.). */
810
+ async applyOps(initial) {
811
+ let bitmap = initial;
812
+ // pipelineColourspace runs at the head — sharp's "input is converted
813
+ // to the provided colourspace at the start of the pipeline".
814
+ if (this.pipelineColourspaceSetting !== undefined &&
815
+ GREY_COLOURSPACES.has(this.pipelineColourspaceSetting)) {
816
+ const next = bitmap.greyscale();
817
+ SmBitmap.release(bitmap);
818
+ bitmap = next;
819
+ }
820
+ for (const op of this.ops) {
821
+ const next = await this.runOp(op, bitmap);
822
+ if (next !== bitmap) {
823
+ SmBitmap.release(bitmap);
824
+ bitmap = next;
825
+ }
826
+ }
827
+ // toColourspace runs at the tail — sharp's "before converting to the
828
+ // output colourspace, as defined by toColourspace".
829
+ if (this.toColourspaceSetting !== undefined &&
830
+ GREY_COLOURSPACES.has(this.toColourspaceSetting)) {
831
+ const next = bitmap.greyscale();
832
+ SmBitmap.release(bitmap);
833
+ bitmap = next;
834
+ }
835
+ return bitmap;
836
+ }
837
+ runOp(op, bitmap) {
838
+ switch (op.kind) {
839
+ case 'resize':
840
+ return runResize(bitmap, op);
841
+ case 'extend':
842
+ return runExtend(bitmap, op);
843
+ case 'extract':
844
+ return runExtract(bitmap, op);
845
+ case 'trim':
846
+ return runTrim(bitmap, op);
847
+ case 'composite':
848
+ return runComposite(bitmap, op);
849
+ case 'removeAlpha':
850
+ return bitmap.removeAlpha();
851
+ case 'ensureAlpha':
852
+ if (op.alpha === undefined) {
853
+ // Our bitmaps always have alpha. No-op; emit a fresh copy so
854
+ // the applyOps release contract holds.
855
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
856
+ }
857
+ return bitmap.setAlphaConstant(Math.max(0, Math.min(255, Math.round(op.alpha * 255))));
858
+ case 'extractChannel':
859
+ return bitmap.extractChannel(op.channel);
860
+ case 'bandbool':
861
+ return bitmap.bandbool(op.op);
862
+ case 'joinChannel':
863
+ return runJoinChannel(bitmap, op);
864
+ case 'greyscale':
865
+ return bitmap.greyscale();
866
+ case 'tint':
867
+ return bitmap.tint(op.r, op.g, op.b);
868
+ case 'rotate':
869
+ return runRotate(bitmap, op);
870
+ case 'autoOrient':
871
+ return runAutoOrient(bitmap, this.materialized);
872
+ case 'flip':
873
+ return bitmap.flipV();
874
+ case 'flop':
875
+ return bitmap.flipH();
876
+ case 'affine':
877
+ return bitmap.affine(op.m00, op.m01, op.m10, op.m11, op.idx, op.idy, op.odx, op.ody, op.bg[0], op.bg[1], op.bg[2], op.bg[3], op.interp);
878
+ case 'blur':
879
+ return op.sigma === undefined
880
+ ? bitmap.blurBox3()
881
+ : bitmap.blurGaussian(op.sigma, op.precision, op.minAmplitude);
882
+ case 'sharpen':
883
+ return op.sigma === undefined
884
+ ? bitmap.sharpenFast()
885
+ : bitmap.sharpenUSM(op.sigma, op.m1, op.m2, op.x1, op.y2, op.y3);
886
+ case 'convolve':
887
+ return bitmap.convolve(op.width, op.height, op.kernel, op.scale, op.offset);
888
+ case 'median':
889
+ return bitmap.median(op.size);
890
+ case 'dilate':
891
+ return bitmap.dilate(op.width);
892
+ case 'erode':
893
+ return bitmap.erode(op.width);
894
+ case 'gamma':
895
+ return bitmap.gamma(op.gIn, op.gOut);
896
+ case 'negate':
897
+ return bitmap.negate(op.alpha);
898
+ case 'linear':
899
+ return bitmap.linear(op.a, op.b);
900
+ case 'threshold':
901
+ return bitmap.threshold(op.t, op.greyscale);
902
+ case 'recomb':
903
+ return bitmap.recomb(op.matrix);
904
+ case 'flatten':
905
+ return bitmap.flatten(op.bg[0], op.bg[1], op.bg[2]);
906
+ case 'unflatten':
907
+ return bitmap.unflatten();
908
+ case 'boolean':
909
+ return runBoolean(bitmap, op);
910
+ case 'normalise':
911
+ return bitmap.normalise(op.lower, op.upper);
912
+ case 'clahe':
913
+ return bitmap.clahe(op.width, op.height, op.maxSlope);
914
+ case 'modulate':
915
+ return bitmap.modulate(op.brightness, op.saturation, op.hue, op.lightness);
916
+ }
917
+ }
918
+ }
919
+ export function microsharp(input) {
920
+ return new MicroSharpPipeline(input);
921
+ }
922
+ // ---- internal helpers -------------------------------------------------------
923
+ async function readToUint8Array(input) {
924
+ if (input instanceof Uint8Array)
925
+ return input;
926
+ if (input instanceof ArrayBuffer)
927
+ return new Uint8Array(input);
928
+ if (typeof Blob !== 'undefined' && input instanceof Blob) {
929
+ return new Uint8Array(await input.arrayBuffer());
930
+ }
931
+ if (typeof Response !== 'undefined' && input instanceof Response) {
932
+ return new Uint8Array(await input.arrayBuffer());
933
+ }
934
+ if (typeof ReadableStream !== 'undefined' && input instanceof ReadableStream) {
935
+ const reader = input.getReader();
936
+ const chunks = [];
937
+ let total = 0;
938
+ for (;;) {
939
+ const { done, value } = await reader.read();
940
+ if (done)
941
+ break;
942
+ chunks.push(value);
943
+ total += value.byteLength;
944
+ }
945
+ const out = new Uint8Array(total);
946
+ let offset = 0;
947
+ for (const c of chunks) {
948
+ out.set(c, offset);
949
+ offset += c.byteLength;
950
+ }
951
+ return out;
952
+ }
953
+ throw new TypeError('microsharp: input must be Uint8Array, ArrayBuffer, Blob, ReadableStream, or Response');
954
+ }
955
+ function detectFormat(bytes) {
956
+ // stb_image's public API doesn't expose which decoder it picked, so we
957
+ // mirror its detection set with a signature sniff. Order matches the
958
+ // formats stb_image actually decodes (see `decode/stb.zig`).
959
+ if (bytes.length >= 8 &&
960
+ bytes[0] === 0x89 && bytes[1] === 0x50 && bytes[2] === 0x4e && bytes[3] === 0x47 &&
961
+ bytes[4] === 0x0d && bytes[5] === 0x0a && bytes[6] === 0x1a && bytes[7] === 0x0a) {
962
+ return 'png';
963
+ }
964
+ if (bytes.length >= 3 && bytes[0] === 0xff && bytes[1] === 0xd8 && bytes[2] === 0xff) {
965
+ return 'jpeg';
966
+ }
967
+ if (bytes.length >= 2 && bytes[0] === 0x42 && bytes[1] === 0x4d) {
968
+ return 'bmp';
969
+ }
970
+ if (bytes.length >= 6 &&
971
+ bytes[0] === 0x47 && bytes[1] === 0x49 && bytes[2] === 0x46 && bytes[3] === 0x38 &&
972
+ (bytes[4] === 0x37 || bytes[4] === 0x39) && bytes[5] === 0x61) {
973
+ return 'gif';
974
+ }
975
+ return 'unknown';
976
+ }
977
+ const VALID_KERNELS = [
978
+ 'nearest', 'linear', 'cubic', 'mitchell', 'lanczos2', 'lanczos3', 'mks2013', 'mks2021',
979
+ ];
980
+ function resolveKernel(k) {
981
+ if (k === undefined)
982
+ return 'lanczos3'; // sharp's default
983
+ if (!VALID_KERNELS.includes(k)) {
984
+ throw new RangeError(`microsharp: kernel must be one of ${VALID_KERNELS.join(', ')} (got ${String(k)})`);
985
+ }
986
+ return k;
987
+ }
988
+ function parsePosition(p) {
989
+ if (p === undefined)
990
+ return { x: 'centre', y: 'centre' };
991
+ const norm = String(p).toLowerCase().trim();
992
+ if (norm === 'entropy' || norm === 'attention')
993
+ return norm;
994
+ switch (norm) {
995
+ case 'centre':
996
+ case 'center': return { x: 'centre', y: 'centre' };
997
+ case 'top':
998
+ case 'north': return { x: 'centre', y: 'top' };
999
+ case 'bottom':
1000
+ case 'south': return { x: 'centre', y: 'bottom' };
1001
+ case 'left':
1002
+ case 'west': return { x: 'left', y: 'centre' };
1003
+ case 'right':
1004
+ case 'east': return { x: 'right', y: 'centre' };
1005
+ case 'top right':
1006
+ case 'right top':
1007
+ case 'northeast':
1008
+ return { x: 'right', y: 'top' };
1009
+ case 'top left':
1010
+ case 'left top':
1011
+ case 'northwest':
1012
+ return { x: 'left', y: 'top' };
1013
+ case 'bottom right':
1014
+ case 'right bottom':
1015
+ case 'southeast':
1016
+ return { x: 'right', y: 'bottom' };
1017
+ case 'bottom left':
1018
+ case 'left bottom':
1019
+ case 'southwest':
1020
+ return { x: 'left', y: 'bottom' };
1021
+ }
1022
+ throw new RangeError(`microsharp: unknown position '${String(p)}'`);
1023
+ }
1024
+ function parseBackground(bg, fallback) {
1025
+ if (bg === undefined)
1026
+ return fallback;
1027
+ if (typeof bg === 'string') {
1028
+ const packed = parseCssColor(bg);
1029
+ if (packed === null) {
1030
+ throw new RangeError(`microsharp: invalid background colour '${bg}'`);
1031
+ }
1032
+ const u = packed >>> 0;
1033
+ return [u & 0xff, (u >>> 8) & 0xff, (u >>> 16) & 0xff, (u >>> 24) & 0xff];
1034
+ }
1035
+ if (typeof bg === 'object') {
1036
+ const r = clampByte(bg.r);
1037
+ const g = clampByte(bg.g);
1038
+ const b = clampByte(bg.b);
1039
+ const a = bg.alpha === undefined ? 255 : clampByte(Math.round(bg.alpha * 255));
1040
+ return [r, g, b, a];
1041
+ }
1042
+ throw new RangeError('microsharp: background must be a CSS string or {r,g,b,alpha?} object');
1043
+ }
1044
+ function clampByte(n) {
1045
+ if (!Number.isFinite(n))
1046
+ return 0;
1047
+ return Math.max(0, Math.min(255, Math.round(n)));
1048
+ }
1049
+ function computeTargetDims(srcW, srcH, op) {
1050
+ const aspect = srcW / srcH;
1051
+ let { width, height } = op;
1052
+ // Object-form `{ width, height }` is also surfaced via op.opts (sharp parity).
1053
+ if (op.opts) {
1054
+ if (op.opts.width !== undefined && width === undefined)
1055
+ width = op.opts.width;
1056
+ if (op.opts.height !== undefined && height === undefined)
1057
+ height = op.opts.height;
1058
+ }
1059
+ if (width != null && height != null)
1060
+ return { width, height };
1061
+ if (width != null)
1062
+ return { width, height: Math.max(1, Math.round(width / aspect)) };
1063
+ if (height != null)
1064
+ return { width: Math.max(1, Math.round(height * aspect)), height };
1065
+ // Neither given: behave as a no-op (sharp behavior).
1066
+ return { width: srcW, height: srcH };
1067
+ }
1068
+ function computeFitDims(srcW, srcH, target, fit) {
1069
+ const { width: tW, height: tH } = target;
1070
+ const sx = tW / srcW;
1071
+ const sy = tH / srcH;
1072
+ switch (fit) {
1073
+ case 'fill':
1074
+ return { resampleW: tW, resampleH: tH, outW: tW, outH: tH };
1075
+ case 'inside': {
1076
+ const s = Math.min(sx, sy);
1077
+ const w = Math.max(1, Math.round(srcW * s));
1078
+ const h = Math.max(1, Math.round(srcH * s));
1079
+ return { resampleW: w, resampleH: h, outW: w, outH: h };
1080
+ }
1081
+ case 'outside': {
1082
+ const s = Math.max(sx, sy);
1083
+ const w = Math.max(1, Math.round(srcW * s));
1084
+ const h = Math.max(1, Math.round(srcH * s));
1085
+ return { resampleW: w, resampleH: h, outW: w, outH: h };
1086
+ }
1087
+ case 'contain': {
1088
+ const s = Math.min(sx, sy);
1089
+ const w = Math.max(1, Math.round(srcW * s));
1090
+ const h = Math.max(1, Math.round(srcH * s));
1091
+ return { resampleW: w, resampleH: h, outW: tW, outH: tH };
1092
+ }
1093
+ case 'cover':
1094
+ default: {
1095
+ const s = Math.max(sx, sy);
1096
+ const w = Math.max(1, Math.round(srcW * s));
1097
+ const h = Math.max(1, Math.round(srcH * s));
1098
+ return { resampleW: w, resampleH: h, outW: tW, outH: tH };
1099
+ }
1100
+ }
1101
+ }
1102
+ function runResize(bitmap, op) {
1103
+ const target = computeTargetDims(bitmap.width, bitmap.height, op);
1104
+ if (target.width === bitmap.width && target.height === bitmap.height) {
1105
+ // No-op: dims unchanged.
1106
+ return bitmap;
1107
+ }
1108
+ const opts = op.opts ?? {};
1109
+ const fit = opts.fit ?? 'cover';
1110
+ const kernel = resolveKernel(opts.kernel);
1111
+ // withoutEnlargement / withoutReduction veto if the resize would go
1112
+ // the disallowed direction. Sharp defines these per-axis: the resize
1113
+ // is skipped if BOTH dims would scale in the forbidden direction.
1114
+ const grows = target.width > bitmap.width || target.height > bitmap.height;
1115
+ const shrinks = target.width < bitmap.width || target.height < bitmap.height;
1116
+ if (opts.withoutEnlargement && grows && !shrinks)
1117
+ return bitmap;
1118
+ if (opts.withoutReduction && shrinks && !grows)
1119
+ return bitmap;
1120
+ const { resampleW, resampleH, outW, outH } = computeFitDims(bitmap.width, bitmap.height, target, fit);
1121
+ // Stage 1: resample to the intermediate size (skipped if already there).
1122
+ const needsResample = resampleW !== bitmap.width || resampleH !== bitmap.height;
1123
+ const scaled = needsResample
1124
+ ? bitmap.resample(resampleW, resampleH, kernel)
1125
+ : bitmap;
1126
+ // Helper: free `scaled` iff we own it AND we're not returning it.
1127
+ // We OWN `scaled` when `needsResample` is true (the resample call
1128
+ // produced a fresh bitmap); when it aliases the input, the caller
1129
+ // (applyOps) is responsible for it.
1130
+ const finish = (out) => {
1131
+ if (needsResample && out !== scaled)
1132
+ SmBitmap.release(scaled);
1133
+ return out;
1134
+ };
1135
+ if (fit === 'fill' || fit === 'inside' || fit === 'outside') {
1136
+ // No post-processing. Transfer ownership: if we resampled, return
1137
+ // the fresh bitmap directly; otherwise copy so applyOps's release
1138
+ // contract still holds (it releases the old when next !== old).
1139
+ if (needsResample)
1140
+ return scaled;
1141
+ return passthroughCopy(scaled);
1142
+ }
1143
+ if (fit === 'cover') {
1144
+ const pos = parsePosition(opts.position);
1145
+ let cropLeft;
1146
+ let cropTop;
1147
+ if (pos === 'entropy' || pos === 'attention') {
1148
+ const bounds = scaled.contentBounds(outW, outH, pos);
1149
+ cropLeft = bounds.left;
1150
+ cropTop = bounds.top;
1151
+ }
1152
+ else {
1153
+ ({ cropLeft, cropTop } = computeCornerCrop(resampleW, resampleH, outW, outH, pos));
1154
+ }
1155
+ const out = scaled.extract(cropLeft, cropTop, outW, outH);
1156
+ return finish(out);
1157
+ }
1158
+ // contain: letterbox onto outW × outH
1159
+ const pos = parsePosition(opts.position);
1160
+ // Sharp ignores entropy/attention for contain; we match by falling
1161
+ // back to centre.
1162
+ const anchor = (pos === 'entropy' || pos === 'attention')
1163
+ ? { x: 'centre', y: 'centre' }
1164
+ : pos;
1165
+ const { offsetX, offsetY } = computeOffset(resampleW, resampleH, outW, outH, anchor);
1166
+ const padTop = offsetY;
1167
+ const padLeft = offsetX;
1168
+ const padBottom = outH - resampleH - offsetY;
1169
+ const padRight = outW - resampleW - offsetX;
1170
+ const bg = parseBackground(opts.background, [0, 0, 0, 255]);
1171
+ const padded = scaled.extend(padTop, padRight, padBottom, padLeft, 'background', bg[0], bg[1], bg[2], bg[3]);
1172
+ return finish(padded);
1173
+ }
1174
+ function passthroughCopy(bitmap) {
1175
+ // We need to return a NEW bitmap (caller releases the input) so a
1176
+ // no-op resize at fit='inside' with same dims doesn't free our
1177
+ // input. Use extract(0,0,w,h) — bitmap-direct copy.
1178
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1179
+ }
1180
+ function computeCornerCrop(intW, intH, outW, outH, anchor) {
1181
+ let cropLeft = 0;
1182
+ let cropTop = 0;
1183
+ switch (anchor.x) {
1184
+ case 'left':
1185
+ cropLeft = 0;
1186
+ break;
1187
+ case 'right':
1188
+ cropLeft = intW - outW;
1189
+ break;
1190
+ case 'centre':
1191
+ cropLeft = Math.round((intW - outW) / 2);
1192
+ break;
1193
+ }
1194
+ switch (anchor.y) {
1195
+ case 'top':
1196
+ cropTop = 0;
1197
+ break;
1198
+ case 'bottom':
1199
+ cropTop = intH - outH;
1200
+ break;
1201
+ case 'centre':
1202
+ cropTop = Math.round((intH - outH) / 2);
1203
+ break;
1204
+ }
1205
+ return { cropLeft, cropTop };
1206
+ }
1207
+ function computeOffset(intW, intH, outW, outH, anchor) {
1208
+ let offsetX = 0;
1209
+ let offsetY = 0;
1210
+ switch (anchor.x) {
1211
+ case 'left':
1212
+ offsetX = 0;
1213
+ break;
1214
+ case 'right':
1215
+ offsetX = outW - intW;
1216
+ break;
1217
+ case 'centre':
1218
+ offsetX = Math.round((outW - intW) / 2);
1219
+ break;
1220
+ }
1221
+ switch (anchor.y) {
1222
+ case 'top':
1223
+ offsetY = 0;
1224
+ break;
1225
+ case 'bottom':
1226
+ offsetY = outH - intH;
1227
+ break;
1228
+ case 'centre':
1229
+ offsetY = Math.round((outH - intH) / 2);
1230
+ break;
1231
+ }
1232
+ return { offsetX, offsetY };
1233
+ }
1234
+ function runExtend(bitmap, op) {
1235
+ let top = 0, right = 0, bottom = 0, left = 0;
1236
+ let mode = 'background';
1237
+ let bgInput;
1238
+ if (typeof op.opts === 'number') {
1239
+ const n = op.opts;
1240
+ if (!Number.isInteger(n) || n < 0) {
1241
+ throw new RangeError('microsharp: extend(n) requires a non-negative integer');
1242
+ }
1243
+ top = right = bottom = left = n;
1244
+ }
1245
+ else {
1246
+ top = op.opts.top ?? 0;
1247
+ right = op.opts.right ?? 0;
1248
+ bottom = op.opts.bottom ?? 0;
1249
+ left = op.opts.left ?? 0;
1250
+ mode = op.opts.extendWith ?? 'background';
1251
+ bgInput = op.opts.background;
1252
+ }
1253
+ if (mode !== 'background' && mode !== 'copy' && mode !== 'repeat' && mode !== 'mirror') {
1254
+ throw new RangeError(`microsharp: extendWith must be 'background' | 'copy' | 'repeat' | 'mirror' (got ${String(mode)})`);
1255
+ }
1256
+ for (const [name, v] of [['top', top], ['right', right], ['bottom', bottom], ['left', left]]) {
1257
+ if (!Number.isInteger(v) || v < 0) {
1258
+ throw new RangeError(`microsharp: extend.${name} must be a non-negative integer`);
1259
+ }
1260
+ }
1261
+ if (top === 0 && right === 0 && bottom === 0 && left === 0) {
1262
+ // No-op; return a fresh copy so caller release semantics stay uniform.
1263
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1264
+ }
1265
+ const bg = parseBackground(bgInput, [0, 0, 0, 255]);
1266
+ return bitmap.extend(top, right, bottom, left, mode, bg[0], bg[1], bg[2], bg[3]);
1267
+ }
1268
+ function runExtract(bitmap, op) {
1269
+ const { left, top, width, height } = op.region;
1270
+ if (left + width > bitmap.width || top + height > bitmap.height) {
1271
+ throw new RangeError(`microsharp: extract region (${left},${top} ${width}×${height}) ` +
1272
+ `out of bounds for ${bitmap.width}×${bitmap.height} bitmap`);
1273
+ }
1274
+ return bitmap.extract(left, top, width, height);
1275
+ }
1276
+ // =============================================================================
1277
+ // composite — sharp's libvips/cairo blend names → simdra's HTML5 enum.
1278
+ // =============================================================================
1279
+ const BLEND_MAP = {
1280
+ // Cairo / libvips short names
1281
+ 'over': 'src_over',
1282
+ 'source': 'copy',
1283
+ 'in': 'src_in',
1284
+ 'out': 'src_out',
1285
+ 'atop': 'src_atop',
1286
+ 'dest': 'dest', // identity — caller skips draw
1287
+ 'dest-over': 'dst_over',
1288
+ 'dest-in': 'dst_in',
1289
+ 'dest-out': 'dst_out',
1290
+ 'dest-atop': 'dst_atop',
1291
+ 'xor': 'xor',
1292
+ 'add': 'add',
1293
+ // Separable W3C blends
1294
+ 'multiply': 'multiply',
1295
+ 'screen': 'screen',
1296
+ 'overlay': 'overlay',
1297
+ 'darken': 'darken',
1298
+ 'lighten': 'lighten',
1299
+ 'colour-dodge': 'color_dodge',
1300
+ 'color-dodge': 'color_dodge',
1301
+ 'colour-burn': 'color_burn',
1302
+ 'color-burn': 'color_burn',
1303
+ 'hard-light': 'hard_light',
1304
+ 'soft-light': 'soft_light',
1305
+ 'difference': 'difference',
1306
+ 'exclusion': 'exclusion',
1307
+ };
1308
+ function resolveBlend(b) {
1309
+ const key = (b ?? 'over').toLowerCase();
1310
+ // 'clear' and 'saturate' are libvips/cairo modes simdra doesn't ship.
1311
+ if (key === 'clear' || key === 'saturate') {
1312
+ throw new RangeError(`microsharp: composite blend '${b}' not supported (no equivalent in simdra's blend kernel set)`);
1313
+ }
1314
+ const m = BLEND_MAP[key];
1315
+ if (m === undefined) {
1316
+ throw new RangeError(`microsharp: unknown composite blend '${b}'`);
1317
+ }
1318
+ return m;
1319
+ }
1320
+ function resolveCompositeGravity(g) {
1321
+ if (g === undefined)
1322
+ return { x: 'centre', y: 'centre' };
1323
+ const norm = String(g).toLowerCase().trim();
1324
+ switch (norm) {
1325
+ case 'centre':
1326
+ case 'center': return { x: 'centre', y: 'centre' };
1327
+ case 'top':
1328
+ case 'north': return { x: 'centre', y: 'top' };
1329
+ case 'bottom':
1330
+ case 'south': return { x: 'centre', y: 'bottom' };
1331
+ case 'left':
1332
+ case 'west': return { x: 'left', y: 'centre' };
1333
+ case 'right':
1334
+ case 'east': return { x: 'right', y: 'centre' };
1335
+ case 'top right':
1336
+ case 'right top':
1337
+ case 'northeast':
1338
+ return { x: 'right', y: 'top' };
1339
+ case 'top left':
1340
+ case 'left top':
1341
+ case 'northwest':
1342
+ return { x: 'left', y: 'top' };
1343
+ case 'bottom right':
1344
+ case 'right bottom':
1345
+ case 'southeast':
1346
+ return { x: 'right', y: 'bottom' };
1347
+ case 'bottom left':
1348
+ case 'left bottom':
1349
+ case 'southwest':
1350
+ return { x: 'left', y: 'bottom' };
1351
+ }
1352
+ throw new RangeError(`microsharp: unknown composite gravity '${String(g)}'`);
1353
+ }
1354
+ function gravityOffset(baseW, baseH, ovW, ovH, anchor) {
1355
+ let dx = 0;
1356
+ let dy = 0;
1357
+ switch (anchor.x) {
1358
+ case 'left':
1359
+ dx = 0;
1360
+ break;
1361
+ case 'right':
1362
+ dx = baseW - ovW;
1363
+ break;
1364
+ case 'centre':
1365
+ dx = Math.round((baseW - ovW) / 2);
1366
+ break;
1367
+ }
1368
+ switch (anchor.y) {
1369
+ case 'top':
1370
+ dy = 0;
1371
+ break;
1372
+ case 'bottom':
1373
+ dy = baseH - ovH;
1374
+ break;
1375
+ case 'centre':
1376
+ dy = Math.round((baseH - ovH) / 2);
1377
+ break;
1378
+ }
1379
+ return { dx, dy };
1380
+ }
1381
+ async function materializeOverlay(input, raw) {
1382
+ // `{ create }` path first — bypasses the byte readers.
1383
+ if (input && typeof input === 'object' && !(input instanceof Uint8Array) &&
1384
+ !(input instanceof ArrayBuffer) &&
1385
+ !(typeof Blob !== 'undefined' && input instanceof Blob) &&
1386
+ !(typeof Response !== 'undefined' && input instanceof Response) &&
1387
+ !(typeof ReadableStream !== 'undefined' && input instanceof ReadableStream)) {
1388
+ if ('create' in input && input.create) {
1389
+ const cre = input.create;
1390
+ if (cre.channels !== 3 && cre.channels !== 4) {
1391
+ throw new RangeError(`microsharp: composite create.channels must be 3 or 4; got ${cre.channels}`);
1392
+ }
1393
+ const bg = parseBackground(cre.background, [0, 0, 0, 255]);
1394
+ const a = cre.channels === 3 ? 255 : bg[3];
1395
+ const data = new Uint8Array(cre.width * cre.height * 4);
1396
+ for (let i = 0; i < data.length; i += 4) {
1397
+ data[i + 0] = bg[0];
1398
+ data[i + 1] = bg[1];
1399
+ data[i + 2] = bg[2];
1400
+ data[i + 3] = a;
1401
+ }
1402
+ return SmBitmap.createFromBuffer(data, cre.width, cre.height, {});
1403
+ }
1404
+ }
1405
+ // Materialize bytes once.
1406
+ const bytes = await readToUint8Array(input);
1407
+ // Sharp-style raw descriptor: bytes are pre-decoded pixels at the
1408
+ // given band count. Microsharp expands 1- and 3-channel inputs into
1409
+ // 4-channel RGBA on the fly so the rest of the pipeline doesn't
1410
+ // need a per-call channel-count switch.
1411
+ if (raw !== undefined) {
1412
+ return rawDescriptorToRgba(bytes, raw);
1413
+ }
1414
+ // Encoded image bytes.
1415
+ return SmBitmap.decode(bytes);
1416
+ }
1417
+ /** Convert a sharp-style raw descriptor into an RGBA `SmBitmap`.
1418
+ * Supports `channels` ∈ {1, 3, 4}; rejects 2 (rare grey+alpha) and
1419
+ * anything else. */
1420
+ function rawDescriptorToRgba(bytes, raw) {
1421
+ const px = raw.width * raw.height;
1422
+ const expected = px * raw.channels;
1423
+ if (bytes.byteLength !== expected) {
1424
+ throw new RangeError(`microsharp: raw bytes length ${bytes.byteLength} ≠ width*height*channels ` +
1425
+ `(${raw.width}×${raw.height}×${raw.channels} = ${expected})`);
1426
+ }
1427
+ if (raw.channels === 4) {
1428
+ return SmBitmap.createFromBuffer(bytes, raw.width, raw.height, {});
1429
+ }
1430
+ if (raw.channels === 1) {
1431
+ const out = new Uint8Array(px * 4);
1432
+ for (let i = 0; i < px; i++) {
1433
+ const v = bytes[i];
1434
+ out[i * 4 + 0] = v;
1435
+ out[i * 4 + 1] = v;
1436
+ out[i * 4 + 2] = v;
1437
+ out[i * 4 + 3] = 255;
1438
+ }
1439
+ return SmBitmap.createFromBuffer(out, raw.width, raw.height, {});
1440
+ }
1441
+ if (raw.channels === 3) {
1442
+ const out = new Uint8Array(px * 4);
1443
+ for (let i = 0; i < px; i++) {
1444
+ out[i * 4 + 0] = bytes[i * 3 + 0];
1445
+ out[i * 4 + 1] = bytes[i * 3 + 1];
1446
+ out[i * 4 + 2] = bytes[i * 3 + 2];
1447
+ out[i * 4 + 3] = 255;
1448
+ }
1449
+ return SmBitmap.createFromBuffer(out, raw.width, raw.height, {});
1450
+ }
1451
+ throw new RangeError(`microsharp: raw.channels must be 1, 3, or 4 (got ${raw.channels}); 2-channel grey+alpha is not supported`);
1452
+ }
1453
+ async function runComposite(bitmap, op) {
1454
+ let current = bitmap;
1455
+ let ownsCurrent = false;
1456
+ // `current` starts aliased to the caller's bitmap. We replace it with
1457
+ // a freshly-allocated composited bitmap on first iteration; subsequent
1458
+ // iterations release the previous owned bitmap.
1459
+ try {
1460
+ for (const img of op.images) {
1461
+ const blend = resolveBlend(img.blend);
1462
+ // 'dest' is sharp's identity blend — keep destination, ignore
1463
+ // source. Skip the draw entirely.
1464
+ if (blend === 'dest')
1465
+ continue;
1466
+ const overlay = await materializeOverlay(img.input, img.raw);
1467
+ try {
1468
+ let dx;
1469
+ let dy;
1470
+ if (img.top !== undefined && img.left !== undefined) {
1471
+ dx = img.left;
1472
+ dy = img.top;
1473
+ }
1474
+ else {
1475
+ const anchor = resolveCompositeGravity(img.gravity);
1476
+ ({ dx, dy } = gravityOffset(current.width, current.height, overlay.width, overlay.height, anchor));
1477
+ }
1478
+ const next = current.composite(overlay, blend, dx, dy, img.tile === true);
1479
+ if (ownsCurrent)
1480
+ SmBitmap.release(current);
1481
+ current = next;
1482
+ ownsCurrent = true;
1483
+ }
1484
+ finally {
1485
+ SmBitmap.release(overlay);
1486
+ }
1487
+ }
1488
+ }
1489
+ catch (err) {
1490
+ if (ownsCurrent)
1491
+ SmBitmap.release(current);
1492
+ throw err;
1493
+ }
1494
+ // If we never composited (empty list, or every entry was 'dest'),
1495
+ // produce a fresh copy so applyOps's release contract holds.
1496
+ if (!ownsCurrent) {
1497
+ return current.extract(0, 0, current.width, current.height);
1498
+ }
1499
+ return current;
1500
+ }
1501
+ async function runJoinChannel(bitmap, op) {
1502
+ const mask = await materializeOverlay(op.image, op.raw);
1503
+ try {
1504
+ if (mask.width !== bitmap.width || mask.height !== bitmap.height) {
1505
+ throw new RangeError(`microsharp: joinChannel image ${mask.width}×${mask.height} must match base ${bitmap.width}×${bitmap.height}`);
1506
+ }
1507
+ return bitmap.joinAlphaFromMask(mask);
1508
+ }
1509
+ finally {
1510
+ SmBitmap.release(mask);
1511
+ }
1512
+ }
1513
+ function resolveChannel(c) {
1514
+ if (typeof c === 'number') {
1515
+ if (!Number.isInteger(c) || c < 0 || c > 3) {
1516
+ throw new RangeError(`microsharp: extractChannel() expects 0..3 or 'red'|'green'|'blue'|'alpha' (got ${c})`);
1517
+ }
1518
+ return c;
1519
+ }
1520
+ switch (c) {
1521
+ case 'red': return 0;
1522
+ case 'green': return 1;
1523
+ case 'blue': return 2;
1524
+ case 'alpha': return 3;
1525
+ }
1526
+ throw new RangeError(`microsharp: extractChannel() unknown channel '${String(c)}'`);
1527
+ }
1528
+ function expandLinearVec(v, fallback, label) {
1529
+ const out = new Float64Array(4);
1530
+ // Default: per-RGB has the supplied value; alpha keeps identity (a=1, b=0).
1531
+ const alphaIdentity = label === 'a' ? 1 : 0;
1532
+ if (v === undefined) {
1533
+ out[0] = fallback;
1534
+ out[1] = fallback;
1535
+ out[2] = fallback;
1536
+ out[3] = alphaIdentity;
1537
+ return out;
1538
+ }
1539
+ if (typeof v === 'number') {
1540
+ if (!Number.isFinite(v)) {
1541
+ throw new RangeError(`microsharp: linear(${label}) must be a finite number`);
1542
+ }
1543
+ out[0] = v;
1544
+ out[1] = v;
1545
+ out[2] = v;
1546
+ out[3] = alphaIdentity;
1547
+ return out;
1548
+ }
1549
+ if (v.length === 3 || v.length === 4) {
1550
+ for (let i = 0; i < v.length; i++) {
1551
+ const n = Number(v[i]);
1552
+ if (!Number.isFinite(n)) {
1553
+ throw new RangeError(`microsharp: linear(${label})[${i}] must be a finite number`);
1554
+ }
1555
+ out[i] = n;
1556
+ }
1557
+ if (v.length === 3)
1558
+ out[3] = alphaIdentity;
1559
+ return out;
1560
+ }
1561
+ throw new RangeError(`microsharp: linear(${label}) must be a number, length-3 array, or length-4 array (got length ${v.length})`);
1562
+ }
1563
+ function flattenRecombMatrix(m) {
1564
+ // Nested form: outer is an array whose first element is itself an array.
1565
+ if (Array.isArray(m) && m.length > 0 && Array.isArray(m[0])) {
1566
+ const rows = m;
1567
+ const cols = rows[0].length;
1568
+ if (!((rows.length === 3 && cols === 3) || (rows.length === 4 && cols === 4))) {
1569
+ throw new RangeError('microsharp: recomb nested matrix must be 3×3 or 4×4');
1570
+ }
1571
+ const flat = new Float64Array(rows.length * cols);
1572
+ for (let r = 0; r < rows.length; r++) {
1573
+ if (rows[r].length !== cols) {
1574
+ throw new RangeError('microsharp: recomb matrix is jagged — every row must have the same length');
1575
+ }
1576
+ for (let c = 0; c < cols; c++) {
1577
+ const v = Number(rows[r][c]);
1578
+ if (!Number.isFinite(v)) {
1579
+ throw new RangeError(`microsharp: recomb[${r}][${c}] must be finite`);
1580
+ }
1581
+ flat[r * cols + c] = v;
1582
+ }
1583
+ }
1584
+ return flat;
1585
+ }
1586
+ // Flat number list: length 9 (3×3) or 16 (4×4).
1587
+ const arr = m;
1588
+ const n = arr.length;
1589
+ if (n !== 9 && n !== 16) {
1590
+ throw new RangeError('microsharp: recomb flat matrix length must be 9 (3×3) or 16 (4×4)');
1591
+ }
1592
+ const flat = new Float64Array(n);
1593
+ for (let i = 0; i < n; i++) {
1594
+ const v = Number(arr[i]);
1595
+ if (!Number.isFinite(v)) {
1596
+ throw new RangeError(`microsharp: recomb[${i}] must be finite`);
1597
+ }
1598
+ flat[i] = v;
1599
+ }
1600
+ return flat;
1601
+ }
1602
+ async function runBoolean(bitmap, op) {
1603
+ const operand = await materializeOverlay(op.operand, op.raw);
1604
+ try {
1605
+ return bitmap.booleanWith(operand, op.op);
1606
+ }
1607
+ finally {
1608
+ SmBitmap.release(operand);
1609
+ }
1610
+ }
1611
+ function flattenAffineMatrix(m) {
1612
+ // Sharp accepts both `[a, b, c, d]` and `[[a, b], [c, d]]`. Reject other
1613
+ // shapes (e.g. 3-element list, jagged nested array) with a RangeError so
1614
+ // bad input lands at the wrapper, not deep in Zig.
1615
+ if (Array.isArray(m) && m.length === 4 && m.every((x) => typeof x === 'number')) {
1616
+ return [m[0], m[1], m[2], m[3]];
1617
+ }
1618
+ if (Array.isArray(m) && m.length === 2 &&
1619
+ Array.isArray(m[0]) && m[0].length === 2 &&
1620
+ Array.isArray(m[1]) && m[1].length === 2) {
1621
+ const r0 = m[0];
1622
+ const r1 = m[1];
1623
+ return [r0[0], r0[1], r1[0], r1[1]];
1624
+ }
1625
+ throw new RangeError('microsharp: affine matrix must be [a, b, c, d] or [[a, b], [c, d]]');
1626
+ }
1627
+ function runRotate(bitmap, op) {
1628
+ // Multiples of 90° → byte-exact lossless permutation. Compare with a
1629
+ // small float epsilon so users typing 90.0000000001 still hit the fast
1630
+ // path. The sampler-driven slow path is ~10–20× slower for the same
1631
+ // pixel count and introduces filtering artifacts.
1632
+ const a = op.angle;
1633
+ const eps = 1e-9;
1634
+ if (Math.abs(a) < eps || Math.abs(a - 360) < eps) {
1635
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1636
+ }
1637
+ if (Math.abs(a - 90) < eps)
1638
+ return bitmap.rotate90();
1639
+ if (Math.abs(a - 180) < eps)
1640
+ return bitmap.rotate180();
1641
+ if (Math.abs(a - 270) < eps)
1642
+ return bitmap.rotate270();
1643
+ return bitmap.rotateArbitrary(a, op.bg[0], op.bg[1], op.bg[2], op.bg[3], 'bilinear');
1644
+ }
1645
+ function runAutoOrient(bitmap, inputBytes) {
1646
+ // We need the materialised input bytes to read the EXIF Orientation
1647
+ // tag. `applyOps` always runs after `getInput` returned, so the
1648
+ // promise has already resolved — but we still resolve through it for
1649
+ // safety and to satisfy the type contract of `runOp`.
1650
+ if (inputBytes === null) {
1651
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1652
+ }
1653
+ return inputBytes.then((bytes) => applyOrientation(bitmap, bytes));
1654
+ }
1655
+ function applyOrientation(bitmap, bytes) {
1656
+ const o = SmBitmap.peekOrientation(bytes);
1657
+ // 1 = no rotation; we still produce a fresh bitmap so applyOps's
1658
+ // release contract holds (it releases the previous when next !== prev).
1659
+ switch (o) {
1660
+ case 1:
1661
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1662
+ case 2:
1663
+ return bitmap.flipH();
1664
+ case 3:
1665
+ return bitmap.rotate180();
1666
+ case 4:
1667
+ return bitmap.flipV();
1668
+ case 5: {
1669
+ // Transpose: 90° CW then h-flip (equivalent to mirroring across
1670
+ // the main diagonal). Two-step routes through page_allocator
1671
+ // bitmaps; release the intermediate.
1672
+ const r = bitmap.rotate90();
1673
+ try {
1674
+ return r.flipH();
1675
+ }
1676
+ finally {
1677
+ SmBitmap.release(r);
1678
+ }
1679
+ }
1680
+ case 6:
1681
+ return bitmap.rotate90();
1682
+ case 7: {
1683
+ // Transverse: 90° CW then v-flip (mirror across anti-diagonal).
1684
+ const r = bitmap.rotate90();
1685
+ try {
1686
+ return r.flipV();
1687
+ }
1688
+ finally {
1689
+ SmBitmap.release(r);
1690
+ }
1691
+ }
1692
+ case 8:
1693
+ return bitmap.rotate270();
1694
+ default:
1695
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1696
+ }
1697
+ }
1698
+ function runTrim(bitmap, op) {
1699
+ const opts = op.opts ?? {};
1700
+ const threshold = opts.threshold ?? 10;
1701
+ if (!Number.isFinite(threshold) || threshold < 0 || threshold > 255) {
1702
+ throw new RangeError('microsharp: trim threshold must be 0..255');
1703
+ }
1704
+ // Default background = top-left pixel of the working bitmap (sharp parity).
1705
+ let bg;
1706
+ if (opts.background !== undefined) {
1707
+ bg = parseBackground(opts.background, [0, 0, 0, 255]);
1708
+ }
1709
+ else {
1710
+ const px = bitmap.data;
1711
+ bg = [px[0], px[1], px[2], px[3]];
1712
+ }
1713
+ let bounds;
1714
+ try {
1715
+ bounds = bitmap.findOpaqueBounds(bg[0], bg[1], bg[2], bg[3], Math.round(threshold));
1716
+ }
1717
+ catch (err) {
1718
+ // NoContent: every pixel matches background within threshold.
1719
+ // Sharp's behaviour: leave the image untouched.
1720
+ return bitmap.extract(0, 0, bitmap.width, bitmap.height);
1721
+ }
1722
+ return bitmap.extract(bounds.left, bounds.top, bounds.width, bounds.height);
1723
+ }
1724
+ function outputChannelsFor(format) {
1725
+ // PNG: stb writes 4-channel RGBA. JPEG: stb's encoder drops alpha — the
1726
+ // file always carries 3 channels regardless of `comp`. BMP: stb writes a
1727
+ // 32-bit V4 header with explicit alpha mask when comp=4. Raw: the
1728
+ // decoded RGBA buffer that backs the bitmap (forced 4-channel by
1729
+ // `decode/stb.zig`).
1730
+ return format === 'jpeg' ? 3 : 4;
1731
+ }
1732
+ function bitmapPixelsToCopy(bitmap) {
1733
+ // bitmap.data is a slice proxy into Zig memory; same defensive-copy
1734
+ // story as `zigBytesToCopy`. The slice's `dataView` accessor lets us
1735
+ // do a single `.set()` rather than a byte-by-byte indexer loop.
1736
+ const src = bitmap.data;
1737
+ const dv = src.dataView;
1738
+ const out = new Uint8Array(dv.byteLength);
1739
+ out.set(new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength));
1740
+ return out;
1741
+ }
1742
+ function zigBytesToCopy(bytes) {
1743
+ // Defensive copy out of WASM linear memory — see the equivalent helper
1744
+ // in src/index.ts. Live `dataView` views become invalid when Zig grows
1745
+ // the heap, so anything that survives the next allocator call has to be
1746
+ // in a JS-owned ArrayBuffer.
1747
+ const dv = bytes.dataView;
1748
+ const out = new Uint8Array(dv.byteLength);
1749
+ out.set(new Uint8Array(dv.buffer, dv.byteOffset, dv.byteLength));
1750
+ return out;
1751
+ }