tokimeki-image-editor 0.1.8 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,296 @@
1
+ struct VertexOutput {
2
+ @builtin(position) position: vec4<f32>,
3
+ @location(0) uv: vec2<f32>,
4
+ };
5
+
6
+ struct Uniforms {
7
+ // Adjustments (11 params)
8
+ brightness: f32,
9
+ contrast: f32,
10
+ exposure: f32,
11
+ highlights: f32,
12
+ shadows: f32,
13
+ saturation: f32,
14
+ temperature: f32,
15
+ sepia: f32,
16
+ grayscale: f32,
17
+ vignette: f32,
18
+ grain: f32,
19
+
20
+ // Viewport (4 params)
21
+ viewportZoom: f32,
22
+ viewportOffsetX: f32,
23
+ viewportOffsetY: f32,
24
+ viewportScale: f32,
25
+
26
+ // Transform (4 params)
27
+ rotation: f32, // in radians
28
+ flipHorizontal: f32, // 1.0 or -1.0
29
+ flipVertical: f32, // 1.0 or -1.0
30
+ transformScale: f32,
31
+
32
+ // Canvas dimensions (2 params)
33
+ canvasWidth: f32,
34
+ canvasHeight: f32,
35
+
36
+ // Image dimensions (2 params)
37
+ imageWidth: f32,
38
+ imageHeight: f32,
39
+
40
+ // Crop area (4 params)
41
+ cropX: f32,
42
+ cropY: f32,
43
+ cropWidth: f32,
44
+ cropHeight: f32,
45
+ };
46
+
47
+ @group(0) @binding(0) var mySampler: sampler;
48
+ @group(0) @binding(1) var myTexture: texture_2d<f32>;
49
+ @group(0) @binding(2) var<uniform> params: Uniforms;
50
+
51
+ // Full-screen triangle vertex shader
52
+ @vertex
53
+ fn vs_main(@builtin(vertex_index) VertexIndex: u32) -> VertexOutput {
54
+ var pos = array<vec2<f32>, 3>(
55
+ vec2<f32>(-1.0, -1.0),
56
+ vec2<f32>( 3.0, -1.0),
57
+ vec2<f32>(-1.0, 3.0)
58
+ );
59
+ var output: VertexOutput;
60
+ output.position = vec4<f32>(pos[VertexIndex], 0.0, 1.0);
61
+
62
+ // Convert NDC (-1 to 1) to canvas coordinates centered at origin
63
+ // NDC: -1 → -canvasWidth/2, 0 → 0, 1 → canvasWidth/2
64
+ // Note: NDC Y increases upward, but canvas Y increases downward, so flip Y
65
+ var coord = pos[VertexIndex] * vec2<f32>(params.canvasWidth, params.canvasHeight) * 0.5;
66
+ coord.y = -coord.y;
67
+
68
+ // Reverse the 2D canvas transformations:
69
+ // In 2D: translate(center + offset) → scale(zoom) → rotate → flip → draw(-w/2, -h/2)
70
+ // In WebGPU (reverse): screen → un-translate → un-scale → un-rotate → un-flip → texture
71
+
72
+ // 1. Subtract viewport offset (inverse of translate)
73
+ coord = coord - vec2<f32>(params.viewportOffsetX, params.viewportOffsetY);
74
+
75
+ // 2. Inverse zoom/scale
76
+ let totalScale = params.viewportScale * params.viewportZoom * params.transformScale;
77
+ coord = coord / totalScale;
78
+
79
+ // 3. Inverse rotation
80
+ if (params.rotation != 0.0) {
81
+ let cos_r = cos(-params.rotation);
82
+ let sin_r = sin(-params.rotation);
83
+ coord = vec2<f32>(
84
+ coord.x * cos_r - coord.y * sin_r,
85
+ coord.x * sin_r + coord.y * cos_r
86
+ );
87
+ }
88
+
89
+ // 4. Inverse flip
90
+ coord.x = coord.x * params.flipHorizontal;
91
+ coord.y = coord.y * params.flipVertical;
92
+
93
+ // 5. Convert to texture coordinates
94
+ // After inverse transformations, coord is in drawing space (centered at origin)
95
+
96
+ // When there's a crop, viewport.scale is adjusted to fit crop area to canvas
97
+ // This means coord values are scaled according to crop dimensions
98
+ // We need to account for this when mapping to texture coordinates
99
+
100
+ // ALWAYS use crop-aware logic
101
+ if (params.cropWidth > 0.0 && params.cropHeight > 0.0) {
102
+ // coord is in crop-centered space (units: pixels in crop area after inverse transforms)
103
+ // The 2D canvas draws: drawImage(img, cropX, cropY, cropW, cropH, -cropW/2, -cropH/2, cropW, cropH)
104
+ // This means the crop region is drawn centered, from (-cropW/2, -cropH/2) to (cropW/2, cropH/2)
105
+
106
+ // Map from drawing space to texture coordinates:
107
+ // Drawing space: coord ranges from (-cropW/2, -cropH/2) to (cropW/2, cropH/2)
108
+ // Texture space: we want to read from (cropX, cropY) to (cropX+cropW, cropY+cropH)
109
+
110
+ // Convert from centered coordinates to 0-based coordinates within crop region
111
+ let cropLocalX = coord.x + params.cropWidth * 0.5;
112
+ let cropLocalY = coord.y + params.cropHeight * 0.5;
113
+
114
+ // Convert to texture coordinates by adding crop offset and normalizing by image size
115
+ output.uv = vec2<f32>(
116
+ (params.cropX + cropLocalX) / params.imageWidth,
117
+ (params.cropY + cropLocalY) / params.imageHeight
118
+ );
119
+ } else {
120
+ // No crop - standard transformation
121
+ // Convert from image-centered space to top-left origin
122
+ coord = coord + vec2<f32>(params.imageWidth * 0.5, params.imageHeight * 0.5);
123
+
124
+ // Normalize to 0-1 range
125
+ output.uv = coord / vec2<f32>(params.imageWidth, params.imageHeight);
126
+ }
127
+
128
+ return output;
129
+ }
130
+
131
+ // Helper functions
132
+ fn getLuminance(color: vec3<f32>) -> f32 {
133
+ return dot(color, vec3<f32>(0.2126, 0.7152, 0.0722));
134
+ }
135
+
136
+ // Improved 2D hash function for better randomness
137
+ fn hash2d(p: vec2<f32>) -> f32 {
138
+ let p3 = fract(vec3<f32>(p.x, p.y, p.x) * vec3<f32>(0.1031, 0.1030, 0.0973));
139
+ let dot_p3 = dot(p3, vec3<f32>(p3.y, p3.z, p3.x) + 33.33);
140
+ return fract((p3.x + p3.y) * p3.z + dot_p3);
141
+ }
142
+
143
+ fn rgbToHsl(rgb: vec3<f32>) -> vec3<f32> {
144
+ let maxVal = max(rgb.r, max(rgb.g, rgb.b));
145
+ let minVal = min(rgb.r, min(rgb.g, rgb.b));
146
+ var h = 0.0;
147
+ var s = 0.0;
148
+ let l = (maxVal + minVal) / 2.0;
149
+
150
+ if (maxVal != minVal) {
151
+ let d = maxVal - minVal;
152
+ s = select(d / (maxVal + minVal), d / (2.0 - maxVal - minVal), l > 0.5);
153
+
154
+ if (maxVal == rgb.r) {
155
+ h = ((rgb.g - rgb.b) / d + select(0.0, 6.0, rgb.g < rgb.b)) / 6.0;
156
+ } else if (maxVal == rgb.g) {
157
+ h = ((rgb.b - rgb.r) / d + 2.0) / 6.0;
158
+ } else {
159
+ h = ((rgb.r - rgb.g) / d + 4.0) / 6.0;
160
+ }
161
+ }
162
+
163
+ return vec3<f32>(h, s, l);
164
+ }
165
+
166
+ fn hslToRgb(hsl: vec3<f32>) -> vec3<f32> {
167
+ let h = hsl.x;
168
+ let s = hsl.y;
169
+ let l = hsl.z;
170
+
171
+ if (s == 0.0) {
172
+ return vec3<f32>(l, l, l);
173
+ }
174
+
175
+ let q = select(l + s - l * s, l * (1.0 + s), l < 0.5);
176
+ let p = 2.0 * l - q;
177
+
178
+ let r = hue2rgb(p, q, h + 1.0 / 3.0);
179
+ let g = hue2rgb(p, q, h);
180
+ let b = hue2rgb(p, q, h - 1.0 / 3.0);
181
+
182
+ return vec3<f32>(r, g, b);
183
+ }
184
+
185
+ fn hue2rgb(p: f32, q: f32, t_: f32) -> f32 {
186
+ var t = t_;
187
+ if (t < 0.0) { t += 1.0; }
188
+ if (t > 1.0) { t -= 1.0; }
189
+ if (t < 1.0 / 6.0) { return p + (q - p) * 6.0 * t; }
190
+ if (t < 1.0 / 2.0) { return q; }
191
+ if (t < 2.0 / 3.0) { return p + (q - p) * (2.0 / 3.0 - t) * 6.0; }
192
+ return p;
193
+ }
194
+
195
+ @fragment
196
+ fn fs_main(@location(0) uv: vec2<f32>) -> @location(0) vec4<f32> {
197
+ // Sample texture FIRST (must be in uniform control flow before any branching)
198
+ var color = textureSample(myTexture, mySampler, clamp(uv, vec2<f32>(0.0), vec2<f32>(1.0)));
199
+ var rgb = color.rgb;
200
+
201
+ // Check if outside texture bounds (0-1) and set to black
202
+ if (uv.x < 0.0 || uv.x > 1.0 || uv.y < 0.0 || uv.y > 1.0) {
203
+ rgb = vec3<f32>(0.0);
204
+ }
205
+
206
+ // When crop is active, only show the crop region - black out everything else
207
+ if (params.cropWidth > 0.0) {
208
+ let cropMinU = params.cropX / params.imageWidth;
209
+ let cropMaxU = (params.cropX + params.cropWidth) / params.imageWidth;
210
+ let cropMinV = params.cropY / params.imageHeight;
211
+ let cropMaxV = (params.cropY + params.cropHeight) / params.imageHeight;
212
+
213
+ // If UV is outside the crop region, render black
214
+ if (uv.x < cropMinU || uv.x > cropMaxU || uv.y < cropMinV || uv.y > cropMaxV) {
215
+ rgb = vec3<f32>(0.0);
216
+ }
217
+ }
218
+
219
+ // 1. Brightness
220
+ if (params.brightness != 0.0) {
221
+ let factor = 1.0 + (params.brightness / 200.0);
222
+ rgb = rgb * factor;
223
+ }
224
+
225
+ // 2. Contrast
226
+ if (params.contrast != 0.0) {
227
+ let factor = 1.0 + (params.contrast / 200.0);
228
+ rgb = (rgb - 0.5) * factor + 0.5;
229
+ }
230
+
231
+ // 3. Exposure
232
+ if (params.exposure != 0.0) {
233
+ rgb = rgb * exp2(params.exposure / 100.0);
234
+ }
235
+
236
+ // 4. Shadows and Highlights
237
+ if (params.shadows != 0.0 || params.highlights != 0.0) {
238
+ let luma = getLuminance(rgb);
239
+
240
+ if (params.shadows != 0.0) {
241
+ let shadowMask = pow(1.0 - luma, 2.0);
242
+ rgb = rgb - rgb * (params.shadows / 100.0) * shadowMask * 0.5;
243
+ }
244
+
245
+ if (params.highlights != 0.0) {
246
+ let highlightMask = pow(luma, 2.0);
247
+ rgb = rgb + rgb * (params.highlights / 100.0) * highlightMask * 0.5;
248
+ }
249
+ }
250
+
251
+ // 5. Saturation
252
+ if (params.saturation != 0.0) {
253
+ rgb = clamp(rgb, vec3<f32>(0.0), vec3<f32>(1.0));
254
+ var hsl = rgbToHsl(rgb);
255
+ hsl.y = clamp(hsl.y * (1.0 + params.saturation / 100.0), 0.0, 1.0);
256
+ rgb = hslToRgb(hsl);
257
+ }
258
+
259
+ // 5.5. Color Temperature
260
+ // Warm (positive): add red, subtract blue
261
+ // Cool (negative): subtract red, add blue
262
+ if (params.temperature != 0.0) {
263
+ let temp = params.temperature / 100.0;
264
+ rgb.r = rgb.r + temp * 0.1;
265
+ rgb.b = rgb.b - temp * 0.1;
266
+ }
267
+
268
+ // 6. Sepia
269
+ if (params.sepia != 0.0) {
270
+ let sepiaAmount = params.sepia / 100.0;
271
+ let tr = 0.393 * rgb.r + 0.769 * rgb.g + 0.189 * rgb.b;
272
+ let tg = 0.349 * rgb.r + 0.686 * rgb.g + 0.168 * rgb.b;
273
+ let tb = 0.272 * rgb.r + 0.534 * rgb.g + 0.131 * rgb.b;
274
+ rgb = mix(rgb, vec3<f32>(tr, tg, tb), sepiaAmount);
275
+ }
276
+
277
+ // 7. Grayscale
278
+ if (params.grayscale != 0.0) {
279
+ let gray = getLuminance(rgb);
280
+ rgb = mix(rgb, vec3<f32>(gray), params.grayscale / 100.0);
281
+ }
282
+
283
+ // 8. Vignette
284
+ if (params.vignette != 0.0) {
285
+ let center = vec2<f32>(0.5, 0.5);
286
+ let dist = distance(uv, center);
287
+ let vignetteFactor = params.vignette / 100.0;
288
+ let vignetteAmount = pow(dist * 1.4, 2.0);
289
+ rgb = rgb * (1.0 + vignetteFactor * vignetteAmount * 1.5);
290
+ }
291
+
292
+ // Clamp final result
293
+ rgb = clamp(rgb, vec3<f32>(0.0), vec3<f32>(1.0));
294
+
295
+ return vec4<f32>(rgb, color.a);
296
+ }
package/dist/types.d.ts CHANGED
@@ -50,10 +50,12 @@ export interface AdjustmentsState {
50
50
  shadows: number;
51
51
  brightness: number;
52
52
  saturation: number;
53
- hue: number;
53
+ temperature: number;
54
54
  vignette: number;
55
55
  sepia: number;
56
56
  grayscale: number;
57
+ blur: number;
58
+ grain: number;
57
59
  }
58
60
  export interface FilterPreset {
59
61
  id: string;
@@ -11,9 +11,10 @@ export declare function createDefaultAdjustments(): AdjustmentsState;
11
11
  export declare function applyAdjustments(ctx: CanvasRenderingContext2D, adjustments: AdjustmentsState): void;
12
12
  /**
13
13
  * Apply ALL adjustments via pixel manipulation
14
+ * Uses WebGPU acceleration when available, falls back to CPU otherwise
14
15
  * This works in all browsers including Safari (no ctx.filter needed)
15
16
  */
16
- export declare function applyAllAdjustments(canvas: HTMLCanvasElement, img: HTMLImageElement, viewport: Viewport, adjustments: AdjustmentsState, cropArea?: CropArea | null): void;
17
+ export declare function applyAllAdjustments(canvas: HTMLCanvasElement, img: HTMLImageElement, viewport: Viewport, adjustments: AdjustmentsState, cropArea?: CropArea | null): Promise<void>;
17
18
  /**
18
19
  * Apply Gaussian blur to a region of canvas via pixel manipulation (Safari-compatible)
19
20
  * Uses optimized separable box blur with running sums for O(n) performance
@@ -9,10 +9,12 @@ export function createDefaultAdjustments() {
9
9
  shadows: 0,
10
10
  brightness: 0,
11
11
  saturation: 0,
12
- hue: 0,
12
+ temperature: 0,
13
13
  vignette: 0,
14
14
  sepia: 0,
15
- grayscale: 0
15
+ grayscale: 0,
16
+ blur: 0,
17
+ grain: 0
16
18
  };
17
19
  }
18
20
  /**
@@ -88,9 +90,10 @@ export function applyAdjustments(ctx, adjustments) {
88
90
  }
89
91
  /**
90
92
  * Apply ALL adjustments via pixel manipulation
93
+ * Uses WebGPU acceleration when available, falls back to CPU otherwise
91
94
  * This works in all browsers including Safari (no ctx.filter needed)
92
95
  */
93
- export function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea) {
96
+ export async function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea) {
94
97
  // Skip if no adjustments needed
95
98
  if (adjustments.exposure === 0 &&
96
99
  adjustments.contrast === 0 &&
@@ -98,15 +101,36 @@ export function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea
98
101
  adjustments.shadows === 0 &&
99
102
  adjustments.brightness === 0 &&
100
103
  adjustments.saturation === 0 &&
101
- adjustments.hue === 0 &&
104
+ adjustments.temperature === 0 &&
102
105
  adjustments.vignette === 0 &&
103
106
  adjustments.sepia === 0 &&
104
- adjustments.grayscale === 0) {
107
+ adjustments.grayscale === 0 &&
108
+ adjustments.blur === 0 &&
109
+ adjustments.grain === 0) {
105
110
  return;
106
111
  }
112
+ // Calculate image dimensions for GPU
113
+ const imgWidth = cropArea ? cropArea.width : img.width;
114
+ const imgHeight = cropArea ? cropArea.height : img.height;
115
+ const totalScale = viewport.scale * viewport.zoom;
116
+ const scaledImageWidth = imgWidth * totalScale;
117
+ const scaledImageHeight = imgHeight * totalScale;
118
+ // NOTE: WebGPU compute shader approach is disabled because Canvas.svelte uses 2D context
119
+ // A canvas cannot have both 2D and WebGPU contexts simultaneously
120
+ // Future: Implement WebGPU render pipeline in a separate canvas layer
121
+ // Use CPU implementation
122
+ applyAllAdjustmentsCPU(canvas, img, viewport, adjustments, cropArea);
123
+ }
124
+ /**
125
+ * CPU-based implementation of adjustments (original implementation)
126
+ * Used as fallback when WebGPU is unavailable
127
+ */
128
+ function applyAllAdjustmentsCPU(canvas, img, viewport, adjustments, cropArea) {
107
129
  const ctx = canvas.getContext('2d');
108
- if (!ctx)
130
+ if (!ctx) {
131
+ console.error('Failed to get 2D context!');
109
132
  return;
133
+ }
110
134
  const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
111
135
  const data = imageData.data;
112
136
  // Pre-calculate adjustment factors
@@ -116,19 +140,22 @@ export function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea
116
140
  const hasShadows = adjustments.shadows !== 0;
117
141
  const hasBrightness = adjustments.brightness !== 0;
118
142
  const hasSaturation = adjustments.saturation !== 0;
119
- const hasHue = adjustments.hue !== 0;
143
+ const hasTemperature = adjustments.temperature !== 0;
120
144
  const hasVignette = adjustments.vignette !== 0;
121
145
  const hasSepia = adjustments.sepia !== 0;
122
146
  const hasGrayscale = adjustments.grayscale !== 0;
147
+ const hasBlur = adjustments.blur > 0;
148
+ const hasGrain = adjustments.grain > 0;
123
149
  const exposureFactor = hasExposure ? Math.pow(2, adjustments.exposure / 100) : 1;
124
150
  const contrastFactor = hasContrast ? 1 + (adjustments.contrast / 200) : 1;
125
151
  const brightnessFactor = hasBrightness ? 1 + (adjustments.brightness / 200) : 1;
126
152
  const highlightsFactor = adjustments.highlights / 100;
127
153
  const shadowsFactor = adjustments.shadows / 100;
128
154
  const saturationFactor = hasSaturation ? adjustments.saturation / 100 : 0;
129
- const hueShift = adjustments.hue;
155
+ const temperatureFactor = adjustments.temperature / 100;
130
156
  const sepiaAmount = adjustments.sepia / 100;
131
157
  const grayscaleAmount = adjustments.grayscale / 100;
158
+ const grainAmount = hasGrain ? adjustments.grain / 100 : 0;
132
159
  // Vignette pre-calculations
133
160
  const imgWidth = cropArea ? cropArea.width : img.width;
134
161
  const imgHeight = cropArea ? cropArea.height : img.height;
@@ -142,7 +169,7 @@ export function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea
142
169
  const vignetteFactor = adjustments.vignette / 100;
143
170
  const vignetteStrength = 1.5;
144
171
  const needsLuminance = hasHighlights || hasShadows;
145
- const needsHSL = hasSaturation || hasHue;
172
+ const needsHSL = hasSaturation;
146
173
  for (let i = 0; i < data.length; i += 4) {
147
174
  let r = data[i];
148
175
  let g = data[i + 1];
@@ -198,11 +225,16 @@ export function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea
198
225
  s = Math.max(0, Math.min(100, s * (1 + saturationFactor)));
199
226
  }
200
227
  // Adjust hue
201
- if (hasHue) {
202
- h = (h + hueShift + 360) % 360;
203
- }
228
+ /* if (hasHue) {
229
+ h = (h + hueShift + 360) % 360;
230
+ } */
204
231
  [r, g, b] = hslToRgb(h, s, l);
205
232
  }
233
+ // Apply temperature
234
+ if (hasTemperature) {
235
+ r = r + temperatureFactor * 0.1 * 255;
236
+ b = b - temperatureFactor * 0.1 * 255;
237
+ }
206
238
  // Apply sepia
207
239
  if (hasSepia) {
208
240
  const tr = (0.393 * r + 0.769 * g + 0.189 * b);
@@ -234,12 +266,67 @@ export function applyAllAdjustments(canvas, img, viewport, adjustments, cropArea
234
266
  g *= vignetteMultiplier;
235
267
  b *= vignetteMultiplier;
236
268
  }
237
- // Clamp final values to 0-255
269
+ // Clamp values before grain processing
238
270
  data[i] = Math.max(0, Math.min(255, r));
239
271
  data[i + 1] = Math.max(0, Math.min(255, g));
240
272
  data[i + 2] = Math.max(0, Math.min(255, b));
241
273
  }
274
+ // Put adjusted image data back to canvas
242
275
  ctx.putImageData(imageData, 0, 0);
276
+ // Apply Gaussian blur to entire image if blur adjustment is enabled
277
+ if (hasBlur) {
278
+ const blurAmount = adjustments.blur / 100;
279
+ // Map blur 0-100 to radius 0-10 (blur tool scale)
280
+ const blurRadius = blurAmount * 10.0 * totalScale;
281
+ if (blurRadius > 0.1) {
282
+ applyGaussianBlur(canvas, 0, 0, canvas.width, canvas.height, blurRadius);
283
+ }
284
+ }
285
+ // Apply film grain - Applied after blur for sharp grain on top
286
+ if (hasGrain) {
287
+ // Get image data after blur has been applied
288
+ const grainedData = ctx.getImageData(0, 0, canvas.width, canvas.height);
289
+ const gData = grainedData.data;
290
+ // Helper function for hash
291
+ const hash2d = (x, y) => {
292
+ const p3x = (x * 0.1031) % 1;
293
+ const p3y = (y * 0.1030) % 1;
294
+ const p3z = (x * 0.0973) % 1;
295
+ const dotP3 = p3x * (p3y + 33.33) + p3y * (p3z + 33.33) + p3z * (p3x + 33.33);
296
+ return ((p3x + p3y) * p3z + dotP3) % 1;
297
+ };
298
+ for (let i = 0; i < gData.length; i += 4) {
299
+ let r = gData[i];
300
+ let g = gData[i + 1];
301
+ let b = gData[i + 2];
302
+ const pixelIndex = i / 4;
303
+ const canvasX = pixelIndex % canvas.width;
304
+ const canvasY = Math.floor(pixelIndex / canvas.width);
305
+ // Convert canvas coordinates to image coordinates
306
+ const imageX = ((canvasX - imageCenterX) / totalScale + imgWidth / 2);
307
+ const imageY = ((canvasY - imageCenterY) / totalScale + imgHeight / 2);
308
+ // Calculate luminance for grain masking
309
+ const luma = (0.2126 * r + 0.7152 * g + 0.0722 * b) / 255;
310
+ // Grain visibility mask: most visible in midtones
311
+ let lumaMask = 1.0 - Math.abs(luma - 0.5) * 2.0;
312
+ lumaMask = Math.pow(lumaMask, 0.5); // Softer falloff
313
+ // Multi-scale grain for organic film look
314
+ const fineGrain = hash2d(Math.floor(imageX / 2.5), Math.floor(imageY / 2.5)) - 0.5;
315
+ const mediumGrain = hash2d(Math.floor(imageX / 5.5) + 123.45, Math.floor(imageY / 5.5) + 678.90) - 0.5;
316
+ const coarseGrain = hash2d(Math.floor(imageX / 9.0) + 345.67, Math.floor(imageY / 9.0) + 890.12) - 0.5;
317
+ // Combine grain layers
318
+ const grainNoise = fineGrain * 0.5 + mediumGrain * 0.3 + coarseGrain * 0.2;
319
+ // Strong grain intensity
320
+ const strength = lumaMask * grainAmount * 0.5 * 255;
321
+ r += grainNoise * strength;
322
+ g += grainNoise * strength;
323
+ b += grainNoise * strength;
324
+ gData[i] = Math.max(0, Math.min(255, r));
325
+ gData[i + 1] = Math.max(0, Math.min(255, g));
326
+ gData[i + 2] = Math.max(0, Math.min(255, b));
327
+ }
328
+ ctx.putImageData(grainedData, 0, 0);
329
+ }
243
330
  }
244
331
  /**
245
332
  * Apply Gaussian blur to a region of canvas via pixel manipulation (Safari-compatible)
@@ -3,10 +3,15 @@ export declare function preloadStampImage(url: string): Promise<HTMLImageElement
3
3
  export declare function getStampImage(url: string): HTMLImageElement | null;
4
4
  export declare function loadImage(file: File): Promise<HTMLImageElement>;
5
5
  export declare function calculateFitScale(imageWidth: number, imageHeight: number, canvasWidth: number, canvasHeight: number): number;
6
- export declare function drawImage(canvas: HTMLCanvasElement, img: HTMLImageElement, viewport: Viewport, transform: TransformState, adjustments: AdjustmentsState, cropArea?: CropArea | null, blurAreas?: BlurArea[], stampAreas?: StampArea[]): void;
6
+ export declare function drawImage(canvas: HTMLCanvasElement, img: HTMLImageElement, viewport: Viewport, transform: TransformState, adjustments: AdjustmentsState, cropArea?: CropArea | null, blurAreas?: BlurArea[], stampAreas?: StampArea[]): Promise<void>;
7
7
  export declare function exportCanvas(canvas: HTMLCanvasElement, options: ExportOptions): string;
8
8
  export declare function downloadImage(dataUrl: string, filename: string): void;
9
- export declare function applyTransform(img: HTMLImageElement, transform: TransformState, adjustments: AdjustmentsState, cropArea?: CropArea | null, blurAreas?: BlurArea[], stampAreas?: StampArea[]): HTMLCanvasElement;
9
+ export declare function applyTransform(img: HTMLImageElement, transform: TransformState, adjustments: AdjustmentsState, cropArea?: CropArea | null, blurAreas?: BlurArea[], stampAreas?: StampArea[]): Promise<HTMLCanvasElement>;
10
+ /**
11
+ * Apply all transformations and export using WebGPU (when available)
12
+ * Falls back to Canvas2D if WebGPU is not supported
13
+ */
14
+ export declare function applyTransformWithWebGPU(img: HTMLImageElement, transform: TransformState, adjustments: AdjustmentsState, cropArea?: CropArea | null, blurAreas?: BlurArea[], stampAreas?: StampArea[]): Promise<HTMLCanvasElement>;
10
15
  export declare function screenToImageCoords(screenX: number, screenY: number, canvas: HTMLCanvasElement, img: HTMLImageElement, viewport: Viewport, transform: TransformState): {
11
16
  x: number;
12
17
  y: number;
@@ -42,7 +42,7 @@ export function calculateFitScale(imageWidth, imageHeight, canvasWidth, canvasHe
42
42
  const scaleY = canvasHeight / imageHeight;
43
43
  return Math.min(scaleX, scaleY, 1); // Don't scale up, only down
44
44
  }
45
- export function drawImage(canvas, img, viewport, transform, adjustments, cropArea, blurAreas, stampAreas) {
45
+ export async function drawImage(canvas, img, viewport, transform, adjustments, cropArea, blurAreas, stampAreas) {
46
46
  const ctx = canvas.getContext('2d');
47
47
  if (!ctx)
48
48
  return;
@@ -73,8 +73,8 @@ export function drawImage(canvas, img, viewport, transform, adjustments, cropAre
73
73
  }
74
74
  ctx.restore();
75
75
  // Apply all adjustments via pixel manipulation (Safari-compatible)
76
- // This modifies the canvas pixels after drawing
77
- applyAllAdjustments(canvas, img, viewport, adjustments, cropArea);
76
+ // Uses WebGPU acceleration when available, falls back to CPU
77
+ await applyAllAdjustments(canvas, img, viewport, adjustments, cropArea);
78
78
  // Apply blur areas
79
79
  if (blurAreas && blurAreas.length > 0) {
80
80
  applyBlurAreas(canvas, img, viewport, blurAreas, cropArea);
@@ -96,7 +96,7 @@ export function downloadImage(dataUrl, filename) {
96
96
  link.href = dataUrl;
97
97
  link.click();
98
98
  }
99
- export function applyTransform(img, transform, adjustments, cropArea = null, blurAreas = [], stampAreas = []) {
99
+ export async function applyTransform(img, transform, adjustments, cropArea = null, blurAreas = [], stampAreas = []) {
100
100
  const canvas = document.createElement('canvas');
101
101
  const ctx = canvas.getContext('2d');
102
102
  if (!ctx)
@@ -122,6 +122,7 @@ export function applyTransform(img, transform, adjustments, cropArea = null, blu
122
122
  }
123
123
  ctx.restore();
124
124
  // Apply all adjustments via pixel manipulation (Safari-compatible)
125
+ // Uses WebGPU acceleration when available, falls back to CPU
125
126
  // For export, create a centered viewport with no offset
126
127
  const exportViewport = {
127
128
  zoom: 1,
@@ -129,7 +130,7 @@ export function applyTransform(img, transform, adjustments, cropArea = null, blu
129
130
  offsetY: 0,
130
131
  scale: 1
131
132
  };
132
- applyAllAdjustments(canvas, img, exportViewport, adjustments, cropArea);
133
+ await applyAllAdjustments(canvas, img, exportViewport, adjustments, cropArea);
133
134
  // Apply blur areas for export
134
135
  if (blurAreas.length > 0) {
135
136
  applyBlurAreas(canvas, img, exportViewport, blurAreas, cropArea);
@@ -140,6 +141,48 @@ export function applyTransform(img, transform, adjustments, cropArea = null, blu
140
141
  }
141
142
  return canvas;
142
143
  }
144
+ /**
145
+ * Apply all transformations and export using WebGPU (when available)
146
+ * Falls back to Canvas2D if WebGPU is not supported
147
+ */
148
+ export async function applyTransformWithWebGPU(img, transform, adjustments, cropArea = null, blurAreas = [], stampAreas = []) {
149
+ // Try WebGPU export first
150
+ if (navigator.gpu) {
151
+ try {
152
+ const { exportWithWebGPU } = await import('./webgpu-render');
153
+ const webgpuCanvas = await exportWithWebGPU(img, adjustments, transform, cropArea, blurAreas);
154
+ if (webgpuCanvas) {
155
+ // Apply stamps on top (WebGPU doesn't handle stamps yet)
156
+ if (stampAreas.length > 0) {
157
+ // Create a new Canvas2D to composite WebGPU result + stamps
158
+ const finalCanvas = document.createElement('canvas');
159
+ finalCanvas.width = webgpuCanvas.width;
160
+ finalCanvas.height = webgpuCanvas.height;
161
+ const ctx = finalCanvas.getContext('2d');
162
+ if (ctx) {
163
+ // Draw WebGPU result
164
+ ctx.drawImage(webgpuCanvas, 0, 0);
165
+ // Apply stamps on top
166
+ const exportViewport = {
167
+ zoom: 1,
168
+ offsetX: 0,
169
+ offsetY: 0,
170
+ scale: 1
171
+ };
172
+ applyStamps(finalCanvas, img, exportViewport, stampAreas, cropArea);
173
+ return finalCanvas;
174
+ }
175
+ }
176
+ return webgpuCanvas;
177
+ }
178
+ }
179
+ catch (error) {
180
+ console.warn('WebGPU export failed, falling back to Canvas2D:', error);
181
+ }
182
+ }
183
+ // Fallback to Canvas2D
184
+ return applyTransform(img, transform, adjustments, cropArea, blurAreas, stampAreas);
185
+ }
143
186
  export function screenToImageCoords(screenX, screenY, canvas, img, viewport, transform) {
144
187
  const rect = canvas.getBoundingClientRect();
145
188
  // Convert screen coordinates to canvas coordinates