tokimeki-image-editor 0.4.6 → 0.4.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -193,13 +193,22 @@ async function handleComplete() {
193
193
  return;
194
194
  isApplying = true;
195
195
  haptic('success');
196
+ const _DEV = import.meta.env.DEV;
197
+ if (_DEV)
198
+ console.time('[handleComplete] total');
196
199
  try {
197
200
  const result = await exportImage(state);
201
+ if (_DEV)
202
+ console.time('[handleComplete] onComplete callback');
198
203
  if (result)
199
204
  onComplete(result.dataUrl, { blob: result.blob, width: result.width, height: result.height });
205
+ if (_DEV)
206
+ console.timeEnd('[handleComplete] onComplete callback');
200
207
  }
201
208
  finally {
202
209
  isApplying = false;
210
+ if (_DEV)
211
+ console.timeEnd('[handleComplete] total');
203
212
  }
204
213
  }
205
214
  function handleCancel() {
@@ -220,33 +220,41 @@ export async function applyTransformWithWebGPU(img, transform, adjustments, crop
220
220
  const { exportWithWebGPU } = await import('./webgpu-render');
221
221
  const webgpuCanvas = await exportWithWebGPU(img, adjustments, transform, cropArea, blurAreas);
222
222
  if (webgpuCanvas) {
223
- // Apply stamps and annotations on top (WebGPU doesn't handle these yet)
224
- if (stampAreas.length > 0 || annotations.length > 0) {
225
- // Create a new Canvas2D to composite WebGPU result + stamps + annotations
226
- const finalCanvas = document.createElement('canvas');
227
- finalCanvas.width = webgpuCanvas.width;
228
- finalCanvas.height = webgpuCanvas.height;
223
+ // Copy WebGPU result to a Canvas2D canvas.
224
+ // Calling toBlob directly on a WebGPU canvas triggers expensive browser
225
+ // compositor layerization in complex DOM trees. Canvas2D avoids this.
226
+ // Use OffscreenCanvas for the initial blit to bypass the compositor entirely.
227
+ const w = webgpuCanvas.width;
228
+ const h = webgpuCanvas.height;
229
+ let finalCanvas;
230
+ if (typeof OffscreenCanvas !== 'undefined') {
231
+ // OffscreenCanvas path: completely avoids DOM compositor
232
+ const offscreen = new OffscreenCanvas(w, h);
233
+ const offCtx = offscreen.getContext('2d');
234
+ offCtx.drawImage(webgpuCanvas, 0, 0);
235
+ // Transfer to a regular canvas for stamps/annotations (which need DOM canvas APIs)
236
+ finalCanvas = document.createElement('canvas');
237
+ finalCanvas.width = w;
238
+ finalCanvas.height = h;
229
239
  const ctx = finalCanvas.getContext('2d');
230
- if (ctx) {
231
- // Draw WebGPU result
232
- ctx.drawImage(webgpuCanvas, 0, 0);
233
- // Apply stamps on top
234
- const exportViewport = {
235
- zoom: 1,
236
- offsetX: 0,
237
- offsetY: 0,
238
- scale: 1
239
- };
240
- if (annotations.length > 0) {
241
- applyAnnotations(finalCanvas, img, exportViewport, annotations, cropArea);
242
- }
243
- if (stampAreas.length > 0) {
244
- applyStamps(finalCanvas, img, exportViewport, stampAreas, cropArea);
245
- }
246
- return finalCanvas;
247
- }
240
+ ctx.drawImage(offscreen, 0, 0);
241
+ }
242
+ else {
243
+ finalCanvas = document.createElement('canvas');
244
+ finalCanvas.width = w;
245
+ finalCanvas.height = h;
246
+ const ctx = finalCanvas.getContext('2d');
247
+ ctx.drawImage(webgpuCanvas, 0, 0);
248
+ }
249
+ // Apply stamps and annotations on top
250
+ const exportViewport = { zoom: 1, offsetX: 0, offsetY: 0, scale: 1 };
251
+ if (annotations.length > 0) {
252
+ applyAnnotations(finalCanvas, img, exportViewport, annotations, cropArea);
253
+ }
254
+ if (stampAreas.length > 0) {
255
+ applyStamps(finalCanvas, img, exportViewport, stampAreas, cropArea);
248
256
  }
249
- return webgpuCanvas;
257
+ return finalCanvas;
250
258
  }
251
259
  }
252
260
  catch (error) {
@@ -49,8 +49,8 @@ export function createDefaultTransform() {
49
49
  */
50
50
  export function createDefaultExportOptions() {
51
51
  return {
52
- format: 'png',
53
- quality: 0.9
52
+ format: 'jpeg',
53
+ quality: 0.92
54
54
  };
55
55
  }
56
56
  /**
@@ -350,18 +350,35 @@ export function applyKeyboardAction(state, action) {
350
350
  export async function exportImage(state) {
351
351
  if (!state.imageData.original)
352
352
  return null;
353
+ const _DEV = import.meta.env.DEV;
354
+ if (_DEV)
355
+ console.time('[export] total');
356
+ if (_DEV)
357
+ console.time('[export] 1. WebGPU render');
353
358
  const exportCanvas = await applyTransformWithWebGPU(state.imageData.original, state.transform, state.adjustments, state.cropArea, state.blurAreas, state.stampAreas, state.annotations);
359
+ if (_DEV)
360
+ console.timeEnd('[export] 1. WebGPU render');
354
361
  const format = state.exportOptions.format === 'png' ? 'image/png' : 'image/jpeg';
355
- // Single async encode (toBlob) — avoids synchronous toDataURL blocking the main thread
362
+ if (_DEV)
363
+ console.time('[export] 2. toBlob');
356
364
  const blob = await new Promise((resolve) => {
357
365
  exportCanvas.toBlob(b => resolve(b), format, state.exportOptions.quality);
358
366
  });
359
- // Convert blob to data URL without re-encoding (just base64-wraps the existing bytes)
367
+ if (_DEV) {
368
+ console.timeEnd('[export] 2. toBlob');
369
+ console.log('[export] blob:', (blob.size / 1024 / 1024).toFixed(1) + 'MB');
370
+ }
371
+ if (_DEV)
372
+ console.time('[export] 3. readAsDataURL');
360
373
  const dataUrl = await new Promise((resolve) => {
361
374
  const reader = new FileReader();
362
375
  reader.onload = () => resolve(reader.result);
363
376
  reader.readAsDataURL(blob);
364
377
  });
378
+ if (_DEV) {
379
+ console.timeEnd('[export] 3. readAsDataURL');
380
+ console.timeEnd('[export] total');
381
+ }
365
382
  // Dimensions directly from the export canvas (already crop+rotation adjusted)
366
383
  return {
367
384
  dataUrl,
@@ -1035,6 +1035,9 @@ function getExportPipelines(device) {
1035
1035
  */
1036
1036
  export async function exportWithWebGPU(imageSource, adjustments, transform, cropArea = null, blurAreas = []) {
1037
1037
  try {
1038
+ const _DEV = import.meta.env.DEV;
1039
+ if (_DEV)
1040
+ console.time('[exportGPU] total');
1038
1041
  if (!navigator.gpu) {
1039
1042
  console.warn('WebGPU not supported for export');
1040
1043
  return null;
@@ -1066,10 +1069,13 @@ export async function exportWithWebGPU(imageSource, adjustments, transform, crop
1066
1069
  return null;
1067
1070
  }
1068
1071
  context.configure({ device, format, alphaMode: 'premultiplied' });
1069
- // Upload image as texture
1072
+ if (_DEV)
1073
+ console.time('[exportGPU] a. createImageBitmap');
1070
1074
  const bitmap = imageSource instanceof ImageBitmap
1071
1075
  ? imageSource
1072
1076
  : await createImageBitmap(imageSource);
1077
+ if (_DEV)
1078
+ console.timeEnd('[exportGPU] a. createImageBitmap');
1073
1079
  const texture = device.createTexture({
1074
1080
  size: [bitmap.width, bitmap.height, 1],
1075
1081
  format: 'rgba8unorm',
@@ -1156,6 +1162,8 @@ export async function exportWithWebGPU(imageSource, adjustments, transform, crop
1156
1162
  { binding: 4, resource: curveLUTTexture.createView() },
1157
1163
  ],
1158
1164
  });
1165
+ if (_DEV)
1166
+ console.time('[exportGPU] b. render passes');
1159
1167
  // Track which texture holds the current result for the grain/final pass
1160
1168
  let currentResultView = intermediate1.createView();
1161
1169
  // Helper: submit a single render pass with its own command encoder
@@ -1382,7 +1390,13 @@ export async function exportWithWebGPU(imageSource, adjustments, transform, crop
1382
1390
  const canvasView = context.getCurrentTexture().createView();
1383
1391
  createRenderPass(finalEncoder, canvasView, grainPipeline, grainBindGroup);
1384
1392
  device.queue.submit([finalEncoder.finish()]);
1393
+ if (_DEV)
1394
+ console.timeEnd('[exportGPU] b. render passes');
1395
+ if (_DEV)
1396
+ console.time('[exportGPU] c. onSubmittedWorkDone');
1385
1397
  await device.queue.onSubmittedWorkDone();
1398
+ if (_DEV)
1399
+ console.timeEnd('[exportGPU] c. onSubmittedWorkDone');
1386
1400
  // Cleanup per-export resources (pipelines/sampler are cached)
1387
1401
  texture.destroy();
1388
1402
  intermediate1.destroy();
@@ -1395,6 +1409,8 @@ export async function exportWithWebGPU(imageSource, adjustments, transform, crop
1395
1409
  sharpenUniformBuffer.destroy();
1396
1410
  denoiseUniformBuffer.destroy();
1397
1411
  curveLUTTexture.destroy();
1412
+ if (_DEV)
1413
+ console.timeEnd('[exportGPU] total');
1398
1414
  return canvas;
1399
1415
  }
1400
1416
  catch (error) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "tokimeki-image-editor",
3
- "version": "0.4.6",
3
+ "version": "0.4.9",
4
4
  "description": "A image editor for svelte.",
5
5
  "type": "module",
6
6
  "license": "MIT",