@remotion/transitions 4.0.455 → 4.0.457

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -528,9 +528,179 @@ var useTransitionProgress = () => {
528
528
  exiting: exiting?.exitingProgress ?? 0
529
529
  };
530
530
  };
531
+ // src/html-in-canvas-presentation.tsx
532
+ import { useLayoutEffect, useMemo as useMemo4, useRef as useRef2, useState, useCallback as useCallback2 } from "react";
533
+ import { HtmlInCanvas, useDelayRender } from "remotion";
534
+ import { AbsoluteFill as AbsoluteFill2, Internals as Internals2, useCurrentFrame as useCurrentFrame2 } from "remotion";
535
+ import { jsx as jsx4 } from "react/jsx-runtime";
536
+ var HtmlInCanvasPresentation = ({
537
+ children,
538
+ onElementImage,
539
+ onUnmount,
540
+ presentationProgress,
541
+ presentationDirection,
542
+ shader,
543
+ _experimentalEffects,
544
+ passedProps,
545
+ bothEnteringAndExiting
546
+ }) => {
547
+ if (!HtmlInCanvas.isSupported()) {
548
+ throw new Error("HTML in Canvas is not supported. Open this page in Chrome Canary with chrome://flags/#canvas-draw-element enabled.");
549
+ }
550
+ const canvasRef = useRef2(null);
551
+ const canvasSubtreeStyle = useMemo4(() => {
552
+ return {
553
+ width: "100%",
554
+ height: "100%",
555
+ position: "absolute",
556
+ top: 0,
557
+ left: 0,
558
+ right: 0,
559
+ bottom: 0
560
+ };
561
+ }, []);
562
+ const [offscreenCanvas] = useState(() => new OffscreenCanvas(1, 1));
563
+ const passedPropsRef = useRef2(passedProps);
564
+ passedPropsRef.current = passedProps;
565
+ const frame = useCurrentFrame2();
566
+ const frameRef = useRef2(frame);
567
+ frameRef.current = frame;
568
+ const effectsRef = useRef2(_experimentalEffects);
569
+ effectsRef.current = _experimentalEffects;
570
+ const [instance] = useState(() => shader(offscreenCanvas));
571
+ useLayoutEffect(() => {
572
+ return () => {
573
+ instance.cleanup();
574
+ };
575
+ }, [offscreenCanvas, instance]);
576
+ const chainState = Internals2.useEffectChainState();
577
+ const { delayRender, continueRender } = useDelayRender();
578
+ const draw = useCallback2(async (prevImage, nextImage, progress) => {
579
+ if (!canvasRef.current) {
580
+ throw new Error("Canvas not found");
581
+ }
582
+ const handle = delayRender("onPaint");
583
+ if (!prevImage && !nextImage) {
584
+ continueRender(handle);
585
+ instance.clear();
586
+ return;
587
+ }
588
+ const width = prevImage?.width ?? nextImage?.width ?? 0;
589
+ const height = prevImage?.height ?? nextImage?.height ?? 0;
590
+ if (width === 0 || height === 0) {
591
+ continueRender(handle);
592
+ instance.clear();
593
+ return;
594
+ }
595
+ offscreenCanvas.width = width;
596
+ offscreenCanvas.height = height;
597
+ instance.draw({
598
+ prevImage,
599
+ nextImage,
600
+ width,
601
+ height,
602
+ time: progress,
603
+ passedProps: passedPropsRef.current
604
+ });
605
+ await Internals2.runEffectChain({
606
+ state: chainState.get(width, height),
607
+ source: offscreenCanvas,
608
+ effects: effectsRef.current ?? [],
609
+ frame: frameRef.current,
610
+ width,
611
+ height,
612
+ output: canvasRef.current
613
+ });
614
+ continueRender(handle);
615
+ }, [chainState, instance, offscreenCanvas, continueRender, delayRender]);
616
+ const passThrough = bothEnteringAndExiting && presentationDirection === "exiting";
617
+ useLayoutEffect(() => {
618
+ if (passThrough) {
619
+ return;
620
+ }
621
+ const canvas = canvasRef.current;
622
+ if (!canvas) {
623
+ throw new Error("Canvas not found");
624
+ }
625
+ canvas.layoutSubtree = true;
626
+ const onPaint = () => {
627
+ const firstChild = canvas.firstChild;
628
+ if (!firstChild) {
629
+ return;
630
+ }
631
+ const elementImage = canvas.captureElementImage(firstChild);
632
+ onElementImage(elementImage, draw);
633
+ };
634
+ canvas.addEventListener("paint", onPaint);
635
+ return () => {
636
+ canvas.removeEventListener("paint", onPaint);
637
+ };
638
+ }, [onElementImage, presentationDirection, draw, passThrough]);
639
+ useLayoutEffect(() => {
640
+ if (passThrough) {
641
+ return;
642
+ }
643
+ const canvas = canvasRef.current;
644
+ if (!canvas) {
645
+ throw new Error("Canvas not found");
646
+ }
647
+ canvas.requestPaint?.();
648
+ }, [presentationProgress, passThrough]);
649
+ useLayoutEffect(() => {
650
+ if (passThrough) {
651
+ return;
652
+ }
653
+ return () => {
654
+ onUnmount();
655
+ };
656
+ }, [onUnmount, passThrough]);
657
+ useLayoutEffect(() => {
658
+ if (passThrough) {
659
+ return;
660
+ }
661
+ const canvas = canvasRef.current;
662
+ if (!canvas) {
663
+ return;
664
+ }
665
+ const observer = new ResizeObserver(([entry]) => {
666
+ canvas.width = entry.devicePixelContentBoxSize[0].inlineSize;
667
+ canvas.height = entry.devicePixelContentBoxSize[0].blockSize;
668
+ });
669
+ observer.observe(canvas, { box: "device-pixel-content-box" });
670
+ }, [passThrough]);
671
+ if (passThrough) {
672
+ return children;
673
+ }
674
+ return /* @__PURE__ */ jsx4(AbsoluteFill2, {
675
+ children: /* @__PURE__ */ jsx4("canvas", {
676
+ ref: canvasRef,
677
+ style: canvasSubtreeStyle,
678
+ children
679
+ })
680
+ });
681
+ };
682
+ var makeHtmlInCanvasPresentation = (shader) => {
683
+ const CompWithShader = (props) => {
684
+ const { passedProps, ...otherProps } = props;
685
+ const { _experimentalEffects, ...restPassedProps } = props.passedProps;
686
+ return /* @__PURE__ */ jsx4(HtmlInCanvasPresentation, {
687
+ shader,
688
+ passedProps: restPassedProps,
689
+ _experimentalEffects,
690
+ ...otherProps
691
+ });
692
+ };
693
+ return (props) => {
694
+ return {
695
+ component: CompWithShader,
696
+ props
697
+ };
698
+ };
699
+ };
531
700
  export {
532
701
  useTransitionProgress,
533
702
  springTiming,
703
+ makeHtmlInCanvasPresentation,
534
704
  linearTiming,
535
705
  TransitionSeries
536
706
  };
@@ -14,7 +14,7 @@ var HtmlInCanvasPresentation = ({
14
14
  passedProps,
15
15
  bothEnteringAndExiting
16
16
  }) => {
17
- if (!HtmlInCanvas.isHtmlInCanvasSupported()) {
17
+ if (!HtmlInCanvas.isSupported()) {
18
18
  throw new Error("HTML in Canvas is not supported. Open this page in Chrome Canary with chrome://flags/#canvas-draw-element enabled.");
19
19
  }
20
20
  const canvasRef = useRef(null);
@@ -37,9 +37,8 @@ var HtmlInCanvasPresentation = ({
37
37
  frameRef.current = frame;
38
38
  const effectsRef = useRef(_experimentalEffects);
39
39
  effectsRef.current = _experimentalEffects;
40
- const [instance] = useState(() => shader());
40
+ const [instance] = useState(() => shader(offscreenCanvas));
41
41
  useLayoutEffect(() => {
42
- instance.init(offscreenCanvas);
43
42
  return () => {
44
43
  instance.cleanup();
45
44
  };
@@ -278,52 +277,33 @@ var createTexture = (gl) => {
278
277
  gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0, 0]));
279
278
  return tex;
280
279
  };
281
- var zoomBlurShader = () => {
282
- let state = null;
283
- const init = (canvas) => {
284
- const gl = canvas.getContext("webgl2", { premultipliedAlpha: true });
285
- if (!gl) {
286
- return () => {};
287
- }
288
- const program = createProgram(gl);
289
- const prevTex = createTexture(gl);
290
- const nextTex = createTexture(gl);
291
- const vao = gl.createVertexArray();
292
- gl.bindVertexArray(vao);
293
- const buffer = gl.createBuffer();
294
- gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
295
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]), gl.STATIC_DRAW);
296
- const aPos = gl.getAttribLocation(program, "a_pos");
297
- gl.enableVertexAttribArray(aPos);
298
- gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
299
- state = {
300
- gl,
301
- program,
302
- prevTex,
303
- nextTex,
304
- uTime: gl.getUniformLocation(program, "u_time"),
305
- uPrev: gl.getUniformLocation(program, "u_prev"),
306
- uNext: gl.getUniformLocation(program, "u_next"),
307
- uAspect: gl.getUniformLocation(program, "u_aspect"),
308
- uMaxAngle: gl.getUniformLocation(program, "u_max_angle")
309
- };
310
- return () => {};
311
- };
280
+ var zoomBlurShader = (canvas) => {
281
+ const gl = canvas.getContext("webgl2", { premultipliedAlpha: true });
282
+ if (!gl) {
283
+ throw new Error("Failed to create WebGL2 context");
284
+ }
285
+ const program = createProgram(gl);
286
+ const prevTex = createTexture(gl);
287
+ const nextTex = createTexture(gl);
288
+ const vao = gl.createVertexArray();
289
+ gl.bindVertexArray(vao);
290
+ const buffer = gl.createBuffer();
291
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
292
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]), gl.STATIC_DRAW);
293
+ const aPos = gl.getAttribLocation(program, "a_pos");
294
+ gl.enableVertexAttribArray(aPos);
295
+ gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
296
+ const uTime = gl.getUniformLocation(program, "u_time");
297
+ const uPrev = gl.getUniformLocation(program, "u_prev");
298
+ const uNext = gl.getUniformLocation(program, "u_next");
299
+ const uAspect = gl.getUniformLocation(program, "u_aspect");
300
+ const uMaxAngle = gl.getUniformLocation(program, "u_max_angle");
312
301
  const cleanup = () => {
313
- if (!state) {
314
- throw new Error("Zoom blur state not initialized");
315
- }
316
- const { gl, program, prevTex, nextTex } = state;
317
302
  gl.deleteProgram(program);
318
303
  gl.deleteTexture(prevTex);
319
304
  gl.deleteTexture(nextTex);
320
- state = null;
321
305
  };
322
306
  const clear = () => {
323
- if (!state) {
324
- throw new Error("Zoom blur state not initialized");
325
- }
326
- const { gl } = state;
327
307
  gl.clearColor(0, 0, 0, 0);
328
308
  gl.clear(gl.COLOR_BUFFER_BIT);
329
309
  };
@@ -335,21 +315,7 @@ var zoomBlurShader = () => {
335
315
  time,
336
316
  passedProps
337
317
  }) => {
338
- if (!state) {
339
- throw new Error("Zoom blur state not initialized");
340
- }
341
318
  const { rotation = Math.PI / 6 } = passedProps;
342
- const {
343
- gl,
344
- program,
345
- prevTex,
346
- nextTex,
347
- uTime,
348
- uPrev,
349
- uNext,
350
- uAspect,
351
- uMaxAngle
352
- } = state;
353
319
  if (!prevImage && !nextImage) {
354
320
  return;
355
321
  }
@@ -382,10 +348,9 @@ var zoomBlurShader = () => {
382
348
  gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
383
349
  };
384
350
  return {
385
- init,
386
351
  clear,
387
- draw,
388
- cleanup
352
+ cleanup,
353
+ draw
389
354
  };
390
355
  };
391
356
  var zoomBlur = makeHtmlInCanvasPresentation(zoomBlurShader);
@@ -0,0 +1,322 @@
1
+ // src/html-in-canvas-presentation.tsx
2
+ import { useLayoutEffect, useMemo, useRef, useState, useCallback } from "react";
3
+ import { HtmlInCanvas, useDelayRender } from "remotion";
4
+ import { AbsoluteFill, Internals, useCurrentFrame } from "remotion";
5
+ import { jsx } from "react/jsx-runtime";
6
+ var HtmlInCanvasPresentation = ({
7
+ children,
8
+ onElementImage,
9
+ onUnmount,
10
+ presentationProgress,
11
+ presentationDirection,
12
+ shader,
13
+ _experimentalEffects,
14
+ passedProps,
15
+ bothEnteringAndExiting
16
+ }) => {
17
+ if (!HtmlInCanvas.isSupported()) {
18
+ throw new Error("HTML in Canvas is not supported. Open this page in Chrome Canary with chrome://flags/#canvas-draw-element enabled.");
19
+ }
20
+ const canvasRef = useRef(null);
21
+ const canvasSubtreeStyle = useMemo(() => {
22
+ return {
23
+ width: "100%",
24
+ height: "100%",
25
+ position: "absolute",
26
+ top: 0,
27
+ left: 0,
28
+ right: 0,
29
+ bottom: 0
30
+ };
31
+ }, []);
32
+ const [offscreenCanvas] = useState(() => new OffscreenCanvas(1, 1));
33
+ const passedPropsRef = useRef(passedProps);
34
+ passedPropsRef.current = passedProps;
35
+ const frame = useCurrentFrame();
36
+ const frameRef = useRef(frame);
37
+ frameRef.current = frame;
38
+ const effectsRef = useRef(_experimentalEffects);
39
+ effectsRef.current = _experimentalEffects;
40
+ const [instance] = useState(() => shader(offscreenCanvas));
41
+ useLayoutEffect(() => {
42
+ return () => {
43
+ instance.cleanup();
44
+ };
45
+ }, [offscreenCanvas, instance]);
46
+ const chainState = Internals.useEffectChainState();
47
+ const { delayRender, continueRender } = useDelayRender();
48
+ const draw = useCallback(async (prevImage, nextImage, progress) => {
49
+ if (!canvasRef.current) {
50
+ throw new Error("Canvas not found");
51
+ }
52
+ const handle = delayRender("onPaint");
53
+ if (!prevImage && !nextImage) {
54
+ continueRender(handle);
55
+ instance.clear();
56
+ return;
57
+ }
58
+ const width = prevImage?.width ?? nextImage?.width ?? 0;
59
+ const height = prevImage?.height ?? nextImage?.height ?? 0;
60
+ if (width === 0 || height === 0) {
61
+ continueRender(handle);
62
+ instance.clear();
63
+ return;
64
+ }
65
+ offscreenCanvas.width = width;
66
+ offscreenCanvas.height = height;
67
+ instance.draw({
68
+ prevImage,
69
+ nextImage,
70
+ width,
71
+ height,
72
+ time: progress,
73
+ passedProps: passedPropsRef.current
74
+ });
75
+ await Internals.runEffectChain({
76
+ state: chainState.get(width, height),
77
+ source: offscreenCanvas,
78
+ effects: effectsRef.current ?? [],
79
+ frame: frameRef.current,
80
+ width,
81
+ height,
82
+ output: canvasRef.current
83
+ });
84
+ continueRender(handle);
85
+ }, [chainState, instance, offscreenCanvas, continueRender, delayRender]);
86
+ const passThrough = bothEnteringAndExiting && presentationDirection === "exiting";
87
+ useLayoutEffect(() => {
88
+ if (passThrough) {
89
+ return;
90
+ }
91
+ const canvas = canvasRef.current;
92
+ if (!canvas) {
93
+ throw new Error("Canvas not found");
94
+ }
95
+ canvas.layoutSubtree = true;
96
+ const onPaint = () => {
97
+ const firstChild = canvas.firstChild;
98
+ if (!firstChild) {
99
+ return;
100
+ }
101
+ const elementImage = canvas.captureElementImage(firstChild);
102
+ onElementImage(elementImage, draw);
103
+ };
104
+ canvas.addEventListener("paint", onPaint);
105
+ return () => {
106
+ canvas.removeEventListener("paint", onPaint);
107
+ };
108
+ }, [onElementImage, presentationDirection, draw, passThrough]);
109
+ useLayoutEffect(() => {
110
+ if (passThrough) {
111
+ return;
112
+ }
113
+ const canvas = canvasRef.current;
114
+ if (!canvas) {
115
+ throw new Error("Canvas not found");
116
+ }
117
+ canvas.requestPaint?.();
118
+ }, [presentationProgress, passThrough]);
119
+ useLayoutEffect(() => {
120
+ if (passThrough) {
121
+ return;
122
+ }
123
+ return () => {
124
+ onUnmount();
125
+ };
126
+ }, [onUnmount, passThrough]);
127
+ useLayoutEffect(() => {
128
+ if (passThrough) {
129
+ return;
130
+ }
131
+ const canvas = canvasRef.current;
132
+ if (!canvas) {
133
+ return;
134
+ }
135
+ const observer = new ResizeObserver(([entry]) => {
136
+ canvas.width = entry.devicePixelContentBoxSize[0].inlineSize;
137
+ canvas.height = entry.devicePixelContentBoxSize[0].blockSize;
138
+ });
139
+ observer.observe(canvas, { box: "device-pixel-content-box" });
140
+ }, [passThrough]);
141
+ if (passThrough) {
142
+ return children;
143
+ }
144
+ return /* @__PURE__ */ jsx(AbsoluteFill, {
145
+ children: /* @__PURE__ */ jsx("canvas", {
146
+ ref: canvasRef,
147
+ style: canvasSubtreeStyle,
148
+ children
149
+ })
150
+ });
151
+ };
152
+ var makeHtmlInCanvasPresentation = (shader) => {
153
+ const CompWithShader = (props) => {
154
+ const { passedProps, ...otherProps } = props;
155
+ const { _experimentalEffects, ...restPassedProps } = props.passedProps;
156
+ return /* @__PURE__ */ jsx(HtmlInCanvasPresentation, {
157
+ shader,
158
+ passedProps: restPassedProps,
159
+ _experimentalEffects,
160
+ ...otherProps
161
+ });
162
+ };
163
+ return (props) => {
164
+ return {
165
+ component: CompWithShader,
166
+ props
167
+ };
168
+ };
169
+ };
170
+
171
+ // src/presentations/zoom-in-out.tsx
172
+ var VERTEX_SHADER = `#version 300 es
173
+ in vec2 a_pos;
174
+ out vec2 v_uv;
175
+ void main() {
176
+ v_uv = vec2(a_pos.x * 0.5 + 0.5, 0.5 - a_pos.y * 0.5);
177
+ gl_Position = vec4(a_pos, 0.0, 1.0);
178
+ }`;
179
+ var FRAGMENT_SHADER = `#version 300 es
180
+ precision highp float;
181
+
182
+ uniform sampler2D u_prev;
183
+ uniform sampler2D u_next;
184
+ uniform float u_time;
185
+
186
+ in vec2 v_uv;
187
+ out vec4 outColor;
188
+
189
+ vec2 zoom(vec2 uv, float amount) {
190
+ return 0.5 + ((uv - 0.5) * (1.0 - amount));
191
+ }
192
+
193
+ void main() {
194
+ float progress = 1.0 - u_time;
195
+ float zoomFrom = smoothstep(0.0, 1.0, progress * 2.0);
196
+ float zoomTo = smoothstep(0.0, 1.0, (1.0 - progress) * 2.0);
197
+ float crossfade = smoothstep(0.4, 0.6, progress);
198
+ outColor = mix(
199
+ texture(u_prev, zoom(v_uv, zoomFrom)),
200
+ texture(u_next, zoom(v_uv, zoomTo)),
201
+ crossfade
202
+ );
203
+ }`;
204
+ var compileShader = (gl, source, type) => {
205
+ const shader = gl.createShader(type);
206
+ if (!shader) {
207
+ throw new Error("Failed to create shader");
208
+ }
209
+ gl.shaderSource(shader, source);
210
+ gl.compileShader(shader);
211
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
212
+ const log = gl.getShaderInfoLog(shader);
213
+ gl.deleteShader(shader);
214
+ throw new Error(`Failed to compile shader: ${log}`);
215
+ }
216
+ return shader;
217
+ };
218
+ var createProgram = (gl) => {
219
+ const program = gl.createProgram();
220
+ if (!program) {
221
+ throw new Error("Failed to create WebGL program");
222
+ }
223
+ const vs = compileShader(gl, VERTEX_SHADER, gl.VERTEX_SHADER);
224
+ const fs = compileShader(gl, FRAGMENT_SHADER, gl.FRAGMENT_SHADER);
225
+ gl.attachShader(program, vs);
226
+ gl.attachShader(program, fs);
227
+ gl.linkProgram(program);
228
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
229
+ const log = gl.getProgramInfoLog(program);
230
+ gl.deleteProgram(program);
231
+ throw new Error(`Failed to link program: ${log}`);
232
+ }
233
+ return program;
234
+ };
235
+ var createTexture = (gl) => {
236
+ const tex = gl.createTexture();
237
+ if (!tex) {
238
+ throw new Error("Failed to create texture");
239
+ }
240
+ gl.bindTexture(gl.TEXTURE_2D, tex);
241
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
242
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
243
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
244
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
245
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0, 0]));
246
+ return tex;
247
+ };
248
+ var zoomInOutShader = (canvas) => {
249
+ const gl = canvas.getContext("webgl2", { premultipliedAlpha: true });
250
+ if (!gl) {
251
+ throw new Error("Failed to create WebGL2 context");
252
+ }
253
+ const program = createProgram(gl);
254
+ const prevTex = createTexture(gl);
255
+ const nextTex = createTexture(gl);
256
+ const vao = gl.createVertexArray();
257
+ gl.bindVertexArray(vao);
258
+ const buffer = gl.createBuffer();
259
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
260
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]), gl.STATIC_DRAW);
261
+ const aPos = gl.getAttribLocation(program, "a_pos");
262
+ gl.enableVertexAttribArray(aPos);
263
+ gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
264
+ const uTime = gl.getUniformLocation(program, "u_time");
265
+ const uPrev = gl.getUniformLocation(program, "u_prev");
266
+ const uNext = gl.getUniformLocation(program, "u_next");
267
+ const cleanup = () => {
268
+ gl.deleteProgram(program);
269
+ gl.deleteTexture(prevTex);
270
+ gl.deleteTexture(nextTex);
271
+ };
272
+ const clear = () => {
273
+ gl.clearColor(0, 0, 0, 0);
274
+ gl.clear(gl.COLOR_BUFFER_BIT);
275
+ };
276
+ const draw = ({
277
+ prevImage,
278
+ nextImage,
279
+ width,
280
+ height,
281
+ time
282
+ }) => {
283
+ if (!prevImage && !nextImage) {
284
+ return;
285
+ }
286
+ if (prevImage && (prevImage.width === 0 || prevImage.height === 0)) {
287
+ return;
288
+ }
289
+ if (nextImage && (nextImage.width === 0 || nextImage.height === 0)) {
290
+ return;
291
+ }
292
+ const effectiveTime = !prevImage ? 0 : !nextImage ? 1 : time;
293
+ gl.viewport(0, 0, width, height);
294
+ gl.clearColor(0, 0, 0, 0);
295
+ gl.clear(gl.COLOR_BUFFER_BIT);
296
+ gl.useProgram(program);
297
+ gl.activeTexture(gl.TEXTURE0);
298
+ gl.bindTexture(gl.TEXTURE_2D, prevTex);
299
+ if (prevImage) {
300
+ gl.texElementImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, prevImage);
301
+ }
302
+ gl.uniform1i(uPrev, 0);
303
+ gl.activeTexture(gl.TEXTURE1);
304
+ gl.bindTexture(gl.TEXTURE_2D, nextTex);
305
+ if (nextImage) {
306
+ gl.texElementImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, nextImage);
307
+ }
308
+ gl.uniform1i(uNext, 1);
309
+ gl.uniform1f(uTime, effectiveTime);
310
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
311
+ };
312
+ return {
313
+ clear,
314
+ cleanup,
315
+ draw
316
+ };
317
+ };
318
+ var zoomInOut = makeHtmlInCanvasPresentation(zoomInOutShader);
319
+ export {
320
+ zoomInOutShader,
321
+ zoomInOut
322
+ };
@@ -1,23 +1,24 @@
1
1
  import { type EffectsProp } from 'remotion';
2
2
  import type { TransitionPresentation, TransitionPresentationComponentProps } from './types';
3
3
  export declare const HtmlInCanvasPresentation: <TPassedProps extends Record<string, unknown>>({ children, onElementImage, onUnmount, presentationProgress, presentationDirection, shader, _experimentalEffects, passedProps, bothEnteringAndExiting, }: TransitionPresentationComponentProps<TPassedProps> & {
4
- readonly shader: () => HtmlInCanvasShader<TPassedProps>;
4
+ readonly shader: HtmlInCanvasShader<TPassedProps>;
5
5
  readonly _experimentalEffects?: EffectsProp | undefined;
6
6
  }) => string | number | bigint | boolean | import("react/jsx-runtime").JSX.Element | Iterable<import("react").ReactNode> | Promise<string | number | bigint | boolean | Iterable<import("react").ReactNode> | import("react").ReactElement<unknown, string | import("react").JSXElementConstructor<any>> | import("react").ReactPortal | null | undefined> | null | undefined;
7
- export type HtmlInCanvasShader<TPassedProps> = {
8
- init: (canvas: OffscreenCanvas) => void;
7
+ export type HtmlInCanvasShaderDrawParams<Props> = {
8
+ prevImage: ElementImage | null;
9
+ nextImage: ElementImage | null;
10
+ width: number;
11
+ height: number;
12
+ time: number;
13
+ passedProps: Props;
14
+ };
15
+ export type HtmlInCanvasShaderDraw<Props> = (params: HtmlInCanvasShaderDrawParams<Props>) => void;
16
+ export type HtmlInCanvasShader<Props> = (canvas: OffscreenCanvas) => {
9
17
  clear: () => void;
10
- draw: (params: {
11
- prevImage: ElementImage | null;
12
- nextImage: ElementImage | null;
13
- width: number;
14
- height: number;
15
- time: number;
16
- passedProps: TPassedProps;
17
- }) => void;
18
18
  cleanup: () => void;
19
+ draw: HtmlInCanvasShaderDraw<Props>;
19
20
  };
20
- export declare const makeHtmlInCanvasPresentation: <TPassedProps extends Record<string, unknown>>(shader: () => HtmlInCanvasShader<TPassedProps>) => (props: TPassedProps & {
21
+ export declare const makeHtmlInCanvasPresentation: <TPassedProps extends Record<string, unknown>>(shader: HtmlInCanvasShader<TPassedProps>) => (props: TPassedProps & {
21
22
  _experimentalEffects?: EffectsProp | undefined;
22
23
  }) => TransitionPresentation<TPassedProps & {
23
24
  _experimentalEffects?: EffectsProp | undefined;
@@ -6,7 +6,7 @@ const react_1 = require("react");
6
6
  const remotion_1 = require("remotion");
7
7
  const remotion_2 = require("remotion");
8
8
  const HtmlInCanvasPresentation = ({ children, onElementImage, onUnmount, presentationProgress, presentationDirection, shader, _experimentalEffects, passedProps, bothEnteringAndExiting, }) => {
9
- if (!remotion_1.HtmlInCanvas.isHtmlInCanvasSupported()) {
9
+ if (!remotion_1.HtmlInCanvas.isSupported()) {
10
10
  throw new Error('HTML in Canvas is not supported. Open this page in Chrome Canary with chrome://flags/#canvas-draw-element enabled.');
11
11
  }
12
12
  const canvasRef = (0, react_1.useRef)(null);
@@ -29,9 +29,8 @@ const HtmlInCanvasPresentation = ({ children, onElementImage, onUnmount, present
29
29
  frameRef.current = frame;
30
30
  const effectsRef = (0, react_1.useRef)(_experimentalEffects);
31
31
  effectsRef.current = _experimentalEffects;
32
- const [instance] = (0, react_1.useState)(() => shader());
32
+ const [instance] = (0, react_1.useState)(() => shader(offscreenCanvas));
33
33
  (0, react_1.useLayoutEffect)(() => {
34
- instance.init(offscreenCanvas);
35
34
  return () => {
36
35
  instance.cleanup();
37
36
  };
package/dist/index.d.ts CHANGED
@@ -3,3 +3,4 @@ export { springTiming } from './timings/spring-timing.js';
3
3
  export { TransitionSeries } from './TransitionSeries.js';
4
4
  export { TransitionPresentation, TransitionPresentationComponentProps, TransitionSeriesOverlayProps, TransitionTiming, } from './types.js';
5
5
  export { TransitionState, useTransitionProgress, } from './use-transition-progress.js';
6
+ export { HtmlInCanvasShader, HtmlInCanvasShaderDraw, HtmlInCanvasShaderDrawParams, makeHtmlInCanvasPresentation, } from './html-in-canvas-presentation.js';
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.useTransitionProgress = exports.TransitionSeries = exports.springTiming = exports.linearTiming = void 0;
3
+ exports.makeHtmlInCanvasPresentation = exports.useTransitionProgress = exports.TransitionSeries = exports.springTiming = exports.linearTiming = void 0;
4
4
  // Timings
5
5
  const linear_timing_js_1 = require("./timings/linear-timing.js");
6
6
  Object.defineProperty(exports, "linearTiming", { enumerable: true, get: function () { return linear_timing_js_1.linearTiming; } });
@@ -12,3 +12,6 @@ Object.defineProperty(exports, "TransitionSeries", { enumerable: true, get: func
12
12
  // Hooks
13
13
  const use_transition_progress_js_1 = require("./use-transition-progress.js");
14
14
  Object.defineProperty(exports, "useTransitionProgress", { enumerable: true, get: function () { return use_transition_progress_js_1.useTransitionProgress; } });
15
+ // HTML-in-canvas
16
+ const html_in_canvas_presentation_js_1 = require("./html-in-canvas-presentation.js");
17
+ Object.defineProperty(exports, "makeHtmlInCanvasPresentation", { enumerable: true, get: function () { return html_in_canvas_presentation_js_1.makeHtmlInCanvasPresentation; } });
@@ -1,8 +1,11 @@
1
- import type { HtmlInCanvasShader } from '../html-in-canvas-presentation';
2
1
  export type ZoomBlurProps = {
3
2
  rotation?: number;
4
3
  };
5
- export declare const zoomBlurShader: () => HtmlInCanvasShader<ZoomBlurProps>;
4
+ export declare const zoomBlurShader: (canvas: OffscreenCanvas) => {
5
+ clear: () => void;
6
+ cleanup: () => void;
7
+ draw: import("..").HtmlInCanvasShaderDraw<ZoomBlurProps>;
8
+ };
6
9
  export declare const zoomBlur: (props: ZoomBlurProps & {
7
10
  _experimentalEffects?: import("remotion").EffectsProp | undefined;
8
11
  }) => import("..").TransitionPresentation<ZoomBlurProps & {
@@ -110,61 +110,38 @@ const createTexture = (gl) => {
110
110
  gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0, 0]));
111
111
  return tex;
112
112
  };
113
- const zoomBlurShader = () => {
114
- let state = null;
115
- const init = (canvas) => {
116
- const gl = canvas.getContext('webgl2', { premultipliedAlpha: true });
117
- if (!gl) {
118
- return () => { };
119
- }
120
- const program = createProgram(gl);
121
- const prevTex = createTexture(gl);
122
- const nextTex = createTexture(gl);
123
- const vao = gl.createVertexArray();
124
- gl.bindVertexArray(vao);
125
- const buffer = gl.createBuffer();
126
- gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
127
- gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]), gl.STATIC_DRAW);
128
- const aPos = gl.getAttribLocation(program, 'a_pos');
129
- gl.enableVertexAttribArray(aPos);
130
- gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
131
- state = {
132
- gl,
133
- program,
134
- prevTex,
135
- nextTex,
136
- uTime: gl.getUniformLocation(program, 'u_time'),
137
- uPrev: gl.getUniformLocation(program, 'u_prev'),
138
- uNext: gl.getUniformLocation(program, 'u_next'),
139
- uAspect: gl.getUniformLocation(program, 'u_aspect'),
140
- uMaxAngle: gl.getUniformLocation(program, 'u_max_angle'),
141
- };
142
- return () => { };
143
- };
113
+ const zoomBlurShader = (canvas) => {
114
+ const gl = canvas.getContext('webgl2', { premultipliedAlpha: true });
115
+ if (!gl) {
116
+ throw new Error('Failed to create WebGL2 context');
117
+ }
118
+ const program = createProgram(gl);
119
+ const prevTex = createTexture(gl);
120
+ const nextTex = createTexture(gl);
121
+ const vao = gl.createVertexArray();
122
+ gl.bindVertexArray(vao);
123
+ const buffer = gl.createBuffer();
124
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
125
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]), gl.STATIC_DRAW);
126
+ const aPos = gl.getAttribLocation(program, 'a_pos');
127
+ gl.enableVertexAttribArray(aPos);
128
+ gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
129
+ const uTime = gl.getUniformLocation(program, 'u_time');
130
+ const uPrev = gl.getUniformLocation(program, 'u_prev');
131
+ const uNext = gl.getUniformLocation(program, 'u_next');
132
+ const uAspect = gl.getUniformLocation(program, 'u_aspect');
133
+ const uMaxAngle = gl.getUniformLocation(program, 'u_max_angle');
144
134
  const cleanup = () => {
145
- if (!state) {
146
- throw new Error('Zoom blur state not initialized');
147
- }
148
- const { gl, program, prevTex, nextTex } = state;
149
135
  gl.deleteProgram(program);
150
136
  gl.deleteTexture(prevTex);
151
137
  gl.deleteTexture(nextTex);
152
- state = null;
153
138
  };
154
139
  const clear = () => {
155
- if (!state) {
156
- throw new Error('Zoom blur state not initialized');
157
- }
158
- const { gl } = state;
159
140
  gl.clearColor(0, 0, 0, 0);
160
141
  gl.clear(gl.COLOR_BUFFER_BIT);
161
142
  };
162
143
  const draw = ({ prevImage, nextImage, width, height, time, passedProps, }) => {
163
- if (!state) {
164
- throw new Error('Zoom blur state not initialized');
165
- }
166
144
  const { rotation = Math.PI / 6 } = passedProps;
167
- const { gl, program, prevTex, nextTex, uTime, uPrev, uNext, uAspect, uMaxAngle, } = state;
168
145
  if (!prevImage && !nextImage) {
169
146
  return;
170
147
  }
@@ -200,10 +177,9 @@ const zoomBlurShader = () => {
200
177
  gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
201
178
  };
202
179
  return {
203
- init,
204
180
  clear,
205
- draw,
206
181
  cleanup,
182
+ draw,
207
183
  };
208
184
  };
209
185
  exports.zoomBlurShader = zoomBlurShader;
@@ -0,0 +1,11 @@
1
+ export type ZoomInOutProps = Record<string, never>;
2
+ export declare const zoomInOutShader: (canvas: OffscreenCanvas) => {
3
+ clear: () => void;
4
+ cleanup: () => void;
5
+ draw: import("..").HtmlInCanvasShaderDraw<ZoomInOutProps>;
6
+ };
7
+ export declare const zoomInOut: (props: ZoomInOutProps & {
8
+ _experimentalEffects?: import("remotion").EffectsProp | undefined;
9
+ }) => import("..").TransitionPresentation<ZoomInOutProps & {
10
+ _experimentalEffects?: import("remotion").EffectsProp | undefined;
11
+ }>;
@@ -0,0 +1,150 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.zoomInOut = exports.zoomInOutShader = void 0;
4
+ const html_in_canvas_presentation_1 = require("../html-in-canvas-presentation");
5
+ const VERTEX_SHADER = `#version 300 es
6
+ in vec2 a_pos;
7
+ out vec2 v_uv;
8
+ void main() {
9
+ v_uv = vec2(a_pos.x * 0.5 + 0.5, 0.5 - a_pos.y * 0.5);
10
+ gl_Position = vec4(a_pos, 0.0, 1.0);
11
+ }`;
12
+ // Adapted from https://gl-transitions.com/editor/zoomInOut
13
+ // Author: OllyOllyOlly · License: MIT
14
+ const FRAGMENT_SHADER = `#version 300 es
15
+ precision highp float;
16
+
17
+ uniform sampler2D u_prev;
18
+ uniform sampler2D u_next;
19
+ uniform float u_time;
20
+
21
+ in vec2 v_uv;
22
+ out vec4 outColor;
23
+
24
+ vec2 zoom(vec2 uv, float amount) {
25
+ return 0.5 + ((uv - 0.5) * (1.0 - amount));
26
+ }
27
+
28
+ void main() {
29
+ float progress = 1.0 - u_time;
30
+ float zoomFrom = smoothstep(0.0, 1.0, progress * 2.0);
31
+ float zoomTo = smoothstep(0.0, 1.0, (1.0 - progress) * 2.0);
32
+ float crossfade = smoothstep(0.4, 0.6, progress);
33
+ outColor = mix(
34
+ texture(u_prev, zoom(v_uv, zoomFrom)),
35
+ texture(u_next, zoom(v_uv, zoomTo)),
36
+ crossfade
37
+ );
38
+ }`;
39
+ const compileShader = (gl, source, type) => {
40
+ const shader = gl.createShader(type);
41
+ if (!shader) {
42
+ throw new Error('Failed to create shader');
43
+ }
44
+ gl.shaderSource(shader, source);
45
+ gl.compileShader(shader);
46
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
47
+ const log = gl.getShaderInfoLog(shader);
48
+ gl.deleteShader(shader);
49
+ throw new Error(`Failed to compile shader: ${log}`);
50
+ }
51
+ return shader;
52
+ };
53
+ const createProgram = (gl) => {
54
+ const program = gl.createProgram();
55
+ if (!program) {
56
+ throw new Error('Failed to create WebGL program');
57
+ }
58
+ const vs = compileShader(gl, VERTEX_SHADER, gl.VERTEX_SHADER);
59
+ const fs = compileShader(gl, FRAGMENT_SHADER, gl.FRAGMENT_SHADER);
60
+ gl.attachShader(program, vs);
61
+ gl.attachShader(program, fs);
62
+ gl.linkProgram(program);
63
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
64
+ const log = gl.getProgramInfoLog(program);
65
+ gl.deleteProgram(program);
66
+ throw new Error(`Failed to link program: ${log}`);
67
+ }
68
+ return program;
69
+ };
70
+ const createTexture = (gl) => {
71
+ const tex = gl.createTexture();
72
+ if (!tex) {
73
+ throw new Error('Failed to create texture');
74
+ }
75
+ gl.bindTexture(gl.TEXTURE_2D, tex);
76
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
77
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
78
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
79
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
80
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0, 0]));
81
+ return tex;
82
+ };
83
+ const zoomInOutShader = (canvas) => {
84
+ const gl = canvas.getContext('webgl2', { premultipliedAlpha: true });
85
+ if (!gl) {
86
+ throw new Error('Failed to create WebGL2 context');
87
+ }
88
+ const program = createProgram(gl);
89
+ const prevTex = createTexture(gl);
90
+ const nextTex = createTexture(gl);
91
+ const vao = gl.createVertexArray();
92
+ gl.bindVertexArray(vao);
93
+ const buffer = gl.createBuffer();
94
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
95
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]), gl.STATIC_DRAW);
96
+ const aPos = gl.getAttribLocation(program, 'a_pos');
97
+ gl.enableVertexAttribArray(aPos);
98
+ gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
99
+ const uTime = gl.getUniformLocation(program, 'u_time');
100
+ const uPrev = gl.getUniformLocation(program, 'u_prev');
101
+ const uNext = gl.getUniformLocation(program, 'u_next');
102
+ const cleanup = () => {
103
+ gl.deleteProgram(program);
104
+ gl.deleteTexture(prevTex);
105
+ gl.deleteTexture(nextTex);
106
+ };
107
+ const clear = () => {
108
+ gl.clearColor(0, 0, 0, 0);
109
+ gl.clear(gl.COLOR_BUFFER_BIT);
110
+ };
111
+ const draw = ({ prevImage, nextImage, width, height, time, }) => {
112
+ if (!prevImage && !nextImage) {
113
+ return;
114
+ }
115
+ if (prevImage && (prevImage.width === 0 || prevImage.height === 0)) {
116
+ return;
117
+ }
118
+ if (nextImage && (nextImage.width === 0 || nextImage.height === 0)) {
119
+ return;
120
+ }
121
+ // When one side is missing, force the mix to fully show the other.
122
+ // At time=0 the shader outputs nextImage. At time=1 the shader outputs prevImage.
123
+ const effectiveTime = !prevImage ? 0 : !nextImage ? 1 : time;
124
+ gl.viewport(0, 0, width, height);
125
+ gl.clearColor(0, 0, 0, 0);
126
+ gl.clear(gl.COLOR_BUFFER_BIT);
127
+ gl.useProgram(program);
128
+ gl.activeTexture(gl.TEXTURE0);
129
+ gl.bindTexture(gl.TEXTURE_2D, prevTex);
130
+ if (prevImage) {
131
+ gl.texElementImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, prevImage);
132
+ }
133
+ gl.uniform1i(uPrev, 0);
134
+ gl.activeTexture(gl.TEXTURE1);
135
+ gl.bindTexture(gl.TEXTURE_2D, nextTex);
136
+ if (nextImage) {
137
+ gl.texElementImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, nextImage);
138
+ }
139
+ gl.uniform1i(uNext, 1);
140
+ gl.uniform1f(uTime, effectiveTime);
141
+ gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
142
+ };
143
+ return {
144
+ clear,
145
+ cleanup,
146
+ draw,
147
+ };
148
+ };
149
+ exports.zoomInOutShader = zoomInOutShader;
150
+ exports.zoomInOut = (0, html_in_canvas_presentation_1.makeHtmlInCanvasPresentation)(exports.zoomInOutShader);
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/transitions"
4
4
  },
5
5
  "name": "@remotion/transitions",
6
- "version": "4.0.455",
6
+ "version": "4.0.457",
7
7
  "description": "Library for creating transitions in Remotion",
8
8
  "sideEffects": false,
9
9
  "main": "dist/esm/index.mjs",
@@ -23,18 +23,18 @@
23
23
  "url": "https://github.com/remotion-dev/remotion/issues"
24
24
  },
25
25
  "dependencies": {
26
- "remotion": "4.0.455",
27
- "@remotion/shapes": "4.0.455",
28
- "@remotion/paths": "4.0.455"
26
+ "remotion": "4.0.457",
27
+ "@remotion/shapes": "4.0.457",
28
+ "@remotion/paths": "4.0.457"
29
29
  },
30
30
  "devDependencies": {
31
31
  "@happy-dom/global-registrator": "14.5.1",
32
- "remotion": "4.0.455",
32
+ "remotion": "4.0.457",
33
33
  "react": "19.2.3",
34
34
  "react-dom": "19.2.3",
35
- "@remotion/test-utils": "4.0.455",
36
- "@remotion/player": "4.0.455",
37
- "@remotion/eslint-config-internal": "4.0.455",
35
+ "@remotion/test-utils": "4.0.457",
36
+ "@remotion/player": "4.0.457",
37
+ "@remotion/eslint-config-internal": "4.0.457",
38
38
  "eslint": "9.19.0",
39
39
  "@typescript/native-preview": "7.0.0-dev.20260217.1"
40
40
  },
@@ -92,6 +92,12 @@
92
92
  "import": "./dist/esm/zoom-blur.mjs",
93
93
  "require": "./dist/presentations/zoom-blur.js"
94
94
  },
95
+ "./zoom-in-out": {
96
+ "types": "./dist/presentations/zoom-in-out.d.ts",
97
+ "module": "./dist/esm/zoom-in-out.mjs",
98
+ "import": "./dist/esm/zoom-in-out.mjs",
99
+ "require": "./dist/presentations/zoom-in-out.js"
100
+ },
95
101
  "./none": {
96
102
  "types": "./dist/presentations/none.d.ts",
97
103
  "module": "./dist/esm/none.mjs",
@@ -131,6 +137,9 @@
131
137
  ],
132
138
  "zoom-blur": [
133
139
  "dist/presentations/zoom-blur.d.ts"
140
+ ],
141
+ "zoom-in-out": [
142
+ "dist/presentations/zoom-in-out.d.ts"
134
143
  ]
135
144
  }
136
145
  },
package/zoom-in-out.js ADDED
@@ -0,0 +1,2 @@
1
+ // For backwards compatibility when you use `esm-wallaby`
2
+ module.exports = require('./dist/presentations/zoom-in-out.js');