@remotion/web-renderer 4.0.421 → 4.0.423

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/dist/add-sample.js +20 -0
  2. package/dist/artifact.js +56 -0
  3. package/dist/audio.js +42 -0
  4. package/dist/can-use-webfs-target.js +19 -0
  5. package/dist/compose.js +85 -0
  6. package/dist/create-audio-sample-source.d.ts +1 -1
  7. package/dist/create-scaffold.js +104 -0
  8. package/dist/drawing/border-radius.js +151 -0
  9. package/dist/drawing/calculate-object-fit.js +208 -0
  10. package/dist/drawing/calculate-transforms.js +127 -0
  11. package/dist/drawing/clamp-rect-to-parent-bounds.js +18 -0
  12. package/dist/drawing/do-rects-intersect.js +6 -0
  13. package/dist/drawing/draw-background.js +62 -0
  14. package/dist/drawing/draw-border.js +353 -0
  15. package/dist/drawing/draw-box-shadow.js +103 -0
  16. package/dist/drawing/draw-dom-element.js +85 -0
  17. package/dist/drawing/draw-element.js +84 -0
  18. package/dist/drawing/draw-outline.js +93 -0
  19. package/dist/drawing/draw-rounded.js +34 -0
  20. package/dist/drawing/drawn-fn.js +1 -0
  21. package/dist/drawing/fit-svg-into-its-dimensions.js +35 -0
  22. package/dist/drawing/get-clipped-background.d.ts +8 -0
  23. package/dist/drawing/get-clipped-background.js +14 -0
  24. package/dist/drawing/get-padding-box.js +30 -0
  25. package/dist/drawing/get-pretransform-rect.js +49 -0
  26. package/dist/drawing/handle-3d-transform.js +26 -0
  27. package/dist/drawing/handle-mask.js +21 -0
  28. package/dist/drawing/has-transform.js +14 -0
  29. package/dist/drawing/mask-image.js +14 -0
  30. package/dist/drawing/opacity.js +7 -0
  31. package/dist/drawing/overflow.js +14 -0
  32. package/dist/drawing/parse-linear-gradient.js +260 -0
  33. package/dist/drawing/parse-transform-origin.js +7 -0
  34. package/dist/drawing/precompose.d.ts +11 -0
  35. package/dist/drawing/precompose.js +14 -0
  36. package/dist/drawing/process-node.js +122 -0
  37. package/dist/drawing/round-to-expand-rect.js +7 -0
  38. package/dist/drawing/text/apply-text-transform.js +12 -0
  39. package/dist/drawing/text/draw-text.js +53 -0
  40. package/dist/drawing/text/find-line-breaks.text.js +118 -0
  41. package/dist/drawing/text/get-collapsed-text.d.ts +1 -0
  42. package/dist/drawing/text/get-collapsed-text.js +46 -0
  43. package/dist/drawing/text/handle-text-node.js +24 -0
  44. package/dist/drawing/transform-in-3d.js +177 -0
  45. package/dist/drawing/transform-rect-with-matrix.js +19 -0
  46. package/dist/drawing/transform.js +10 -0
  47. package/dist/drawing/turn-svg-into-drawable.js +41 -0
  48. package/dist/esm/index.mjs +11 -3
  49. package/dist/frame-range.d.ts +1 -1
  50. package/dist/frame-range.js +15 -0
  51. package/dist/get-audio-encoding-config.js +18 -0
  52. package/dist/get-biggest-bounding-client-rect.js +43 -0
  53. package/dist/index.js +2 -0
  54. package/dist/internal-state.js +36 -0
  55. package/dist/mediabunny-mappings.d.ts +1 -1
  56. package/dist/mediabunny-mappings.js +63 -0
  57. package/dist/output-target.js +1 -0
  58. package/dist/props-if-has-props.js +1 -0
  59. package/dist/render-media-on-web.js +304 -0
  60. package/dist/render-operations-queue.js +3 -0
  61. package/dist/render-still-on-web.js +110 -0
  62. package/dist/send-telemetry-event.js +22 -0
  63. package/dist/take-screenshot.js +30 -0
  64. package/dist/throttle-progress.js +43 -0
  65. package/dist/tree-walker-cleanup-after-children.js +33 -0
  66. package/dist/update-time.js +17 -0
  67. package/dist/validate-video-frame.js +34 -0
  68. package/dist/wait-for-ready.js +39 -0
  69. package/dist/walk-tree.js +14 -0
  70. package/dist/web-fs-target.js +41 -0
  71. package/dist/with-resolvers.js +9 -0
  72. package/package.json +9 -8
@@ -360,6 +360,7 @@ var getEncodableAudioCodecs = async (container, options) => {
360
360
  // src/render-media-on-web.tsx
361
361
  import { BufferTarget, StreamTarget } from "mediabunny";
362
362
  import { Internals as Internals8 } from "remotion";
363
+ import { VERSION } from "remotion/version";
363
364
 
364
365
  // src/add-sample.ts
365
366
  import { AudioSample, VideoSample } from "mediabunny";
@@ -790,10 +791,14 @@ var getRealFrameRange = (durationInFrames, frameRange) => {
790
791
  }
791
792
  return [frameRange, frameRange];
792
793
  }
793
- if (frameRange[1] >= durationInFrames || frameRange[0] < 0) {
794
- throw new Error(`The "durationInFrames" of the composition was evaluated to be ${durationInFrames}, but frame range ${frameRange.join("-")} is not inbetween 0-${durationInFrames - 1}`);
794
+ const resolved = [
795
+ frameRange[0],
796
+ frameRange[1] === null ? durationInFrames - 1 : frameRange[1]
797
+ ];
798
+ if (resolved[0] < 0 || resolved[1] >= durationInFrames || resolved[0] > resolved[1]) {
799
+ throw new Error(`The "durationInFrames" of the composition was evaluated to be ${durationInFrames}, but frame range ${resolved.join("-")} is not inbetween 0-${durationInFrames - 1}`);
795
800
  }
796
- return frameRange;
801
+ return resolved;
797
802
  };
798
803
 
799
804
  // src/internal-state.ts
@@ -3801,6 +3806,9 @@ var internalRenderMediaOnWeb = async ({
3801
3806
  format,
3802
3807
  target
3803
3808
  }), 0);
3809
+ outputWithCleanup.output.setMetadataTags({
3810
+ comment: `Made with Remotion ${VERSION}`
3811
+ });
3804
3812
  const throttledProgress = __using(__stack2, createThrottledProgressCallback(onProgress), 0);
3805
3813
  const throttledOnProgress = throttledProgress?.throttled ?? null;
3806
3814
  try {
@@ -1,2 +1,2 @@
1
- export type FrameRange = number | [number, number];
1
+ export type FrameRange = number | [number, number] | [number, null];
2
2
  export declare const getRealFrameRange: (durationInFrames: number, frameRange: FrameRange | null) => [number, number];
@@ -0,0 +1,15 @@
1
+ export const getRealFrameRange = (durationInFrames, frameRange) => {
2
+ if (frameRange === null) {
3
+ return [0, durationInFrames - 1];
4
+ }
5
+ if (typeof frameRange === 'number') {
6
+ if (frameRange < 0 || frameRange >= durationInFrames) {
7
+ throw new Error(`Frame number is out of range, must be between 0 and ${durationInFrames - 1} but got ${frameRange}`);
8
+ }
9
+ return [frameRange, frameRange];
10
+ }
11
+ if (frameRange[1] >= durationInFrames || frameRange[0] < 0) {
12
+ throw new Error(`The "durationInFrames" of the composition was evaluated to be ${durationInFrames}, but frame range ${frameRange.join('-')} is not inbetween 0-${durationInFrames - 1}`);
13
+ }
14
+ return frameRange;
15
+ };
@@ -0,0 +1,18 @@
1
+ import { canEncodeAudio, QUALITY_MEDIUM, } from 'mediabunny';
2
+ export const getDefaultAudioEncodingConfig = async () => {
3
+ const preferredDefaultAudioEncodingConfig = {
4
+ codec: 'aac',
5
+ bitrate: QUALITY_MEDIUM,
6
+ };
7
+ if (await canEncodeAudio(preferredDefaultAudioEncodingConfig.codec, preferredDefaultAudioEncodingConfig)) {
8
+ return preferredDefaultAudioEncodingConfig;
9
+ }
10
+ const backupDefaultAudioEncodingConfig = {
11
+ codec: 'opus',
12
+ bitrate: QUALITY_MEDIUM,
13
+ };
14
+ if (await canEncodeAudio(backupDefaultAudioEncodingConfig.codec, backupDefaultAudioEncodingConfig)) {
15
+ return backupDefaultAudioEncodingConfig;
16
+ }
17
+ return null;
18
+ };
@@ -0,0 +1,43 @@
1
+ import { parseBoxShadow } from './drawing/draw-box-shadow';
2
+ import { parseOutlineOffset, parseOutlineWidth } from './drawing/draw-outline';
3
+ import { skipToNextNonDescendant } from './walk-tree';
4
+ export const getBiggestBoundingClientRect = (element) => {
5
+ const treeWalker = document.createTreeWalker(element, NodeFilter.SHOW_ELEMENT);
6
+ let mostLeft = Infinity;
7
+ let mostTop = Infinity;
8
+ let mostRight = -Infinity;
9
+ let mostBottom = -Infinity;
10
+ while (true) {
11
+ const computedStyle = getComputedStyle(treeWalker.currentNode);
12
+ const outlineWidth = parseOutlineWidth(computedStyle.outlineWidth);
13
+ const outlineOffset = parseOutlineOffset(computedStyle.outlineOffset);
14
+ const rect = treeWalker.currentNode.getBoundingClientRect();
15
+ // Calculate box shadow extensions
16
+ const shadows = parseBoxShadow(computedStyle.boxShadow);
17
+ let shadowLeft = 0;
18
+ let shadowRight = 0;
19
+ let shadowTop = 0;
20
+ let shadowBottom = 0;
21
+ for (const shadow of shadows) {
22
+ if (!shadow.inset) {
23
+ shadowLeft = Math.max(shadowLeft, Math.abs(Math.min(shadow.offsetX, 0)) + shadow.blurRadius);
24
+ shadowRight = Math.max(shadowRight, Math.max(shadow.offsetX, 0) + shadow.blurRadius);
25
+ shadowTop = Math.max(shadowTop, Math.abs(Math.min(shadow.offsetY, 0)) + shadow.blurRadius);
26
+ shadowBottom = Math.max(shadowBottom, Math.max(shadow.offsetY, 0) + shadow.blurRadius);
27
+ }
28
+ }
29
+ mostLeft = Math.min(mostLeft, rect.left - outlineOffset - outlineWidth - shadowLeft);
30
+ mostTop = Math.min(mostTop, rect.top - outlineOffset - outlineWidth - shadowTop);
31
+ mostRight = Math.max(mostRight, rect.right + outlineOffset + outlineWidth + shadowRight);
32
+ mostBottom = Math.max(mostBottom, rect.bottom + outlineOffset + outlineWidth + shadowBottom);
33
+ if (computedStyle.overflow === 'hidden') {
34
+ if (!skipToNextNonDescendant(treeWalker)) {
35
+ break;
36
+ }
37
+ }
38
+ if (!treeWalker.nextNode()) {
39
+ break;
40
+ }
41
+ }
42
+ return new DOMRect(mostLeft, mostTop, mostRight - mostLeft, mostBottom - mostTop);
43
+ };
package/dist/index.js ADDED
@@ -0,0 +1,2 @@
1
+ export { renderMediaOnWeb } from './render-media-on-web';
2
+ export { renderStillOnWeb } from './render-still-on-web';
@@ -0,0 +1,36 @@
1
+ export const makeInternalState = () => {
2
+ let drawnPrecomposedPixels = 0;
3
+ let precomposedTextures = 0;
4
+ let waitForReadyTime = 0;
5
+ let addSampleTime = 0;
6
+ let createFrameTime = 0;
7
+ const helperCanvasState = {
8
+ current: null,
9
+ };
10
+ return {
11
+ getDrawn3dPixels: () => drawnPrecomposedPixels,
12
+ getPrecomposedTiles: () => precomposedTextures,
13
+ addPrecompose: ({ canvasWidth, canvasHeight, }) => {
14
+ drawnPrecomposedPixels += canvasWidth * canvasHeight;
15
+ precomposedTextures++;
16
+ },
17
+ helperCanvasState,
18
+ cleanup: () => {
19
+ if (helperCanvasState.current) {
20
+ helperCanvasState.current.cleanup();
21
+ }
22
+ },
23
+ getWaitForReadyTime: () => waitForReadyTime,
24
+ addWaitForReadyTime: (time) => {
25
+ waitForReadyTime += time;
26
+ },
27
+ getAddSampleTime: () => addSampleTime,
28
+ addAddSampleTime: (time) => {
29
+ addSampleTime += time;
30
+ },
31
+ getCreateFrameTime: () => createFrameTime,
32
+ addCreateFrameTime: (time) => {
33
+ createFrameTime += time;
34
+ },
35
+ };
36
+ };
@@ -12,4 +12,4 @@ export declare const getMimeType: (container: WebRendererContainer) => string;
12
12
  export declare const getDefaultAudioCodecForContainer: (container: WebRendererContainer) => WebRendererAudioCodec;
13
13
  export declare const getSupportedVideoCodecsForContainer: (container: WebRendererContainer) => WebRendererVideoCodec[];
14
14
  export declare const getSupportedAudioCodecsForContainer: (container: WebRendererContainer) => WebRendererAudioCodec[];
15
- export declare const audioCodecToMediabunnyAudioCodec: (audioCodec: WebRendererAudioCodec) => "aac" | "alaw" | "flac" | "mp3" | "opus" | "pcm-f32" | "pcm-f32be" | "pcm-f64" | "pcm-f64be" | "pcm-s16" | "pcm-s16be" | "pcm-s24" | "pcm-s24be" | "pcm-s32" | "pcm-s32be" | "pcm-s8" | "pcm-u8" | "ulaw" | "vorbis";
15
+ export declare const audioCodecToMediabunnyAudioCodec: (audioCodec: WebRendererAudioCodec) => "aac" | "ac3" | "alaw" | "eac3" | "flac" | "mp3" | "opus" | "pcm-f32" | "pcm-f32be" | "pcm-f64" | "pcm-f64be" | "pcm-s16" | "pcm-s16be" | "pcm-s24" | "pcm-s24be" | "pcm-s32" | "pcm-s32be" | "pcm-s8" | "pcm-u8" | "ulaw" | "vorbis";
@@ -0,0 +1,63 @@
1
+ import { Mp4OutputFormat, QUALITY_HIGH, QUALITY_LOW, QUALITY_MEDIUM, QUALITY_VERY_HIGH, QUALITY_VERY_LOW, WebMOutputFormat, } from 'mediabunny';
2
+ export const codecToMediabunnyCodec = (codec) => {
3
+ switch (codec) {
4
+ case 'h264':
5
+ return 'avc';
6
+ case 'h265':
7
+ return 'hevc';
8
+ case 'vp8':
9
+ return 'vp8';
10
+ case 'vp9':
11
+ return 'vp9';
12
+ case 'av1':
13
+ return 'av1';
14
+ default:
15
+ throw new Error(`Unsupported codec: ${codec}`);
16
+ }
17
+ };
18
+ export const containerToMediabunnyContainer = (container) => {
19
+ switch (container) {
20
+ case 'mp4':
21
+ return new Mp4OutputFormat();
22
+ case 'webm':
23
+ return new WebMOutputFormat();
24
+ default:
25
+ throw new Error(`Unsupported container: ${container}`);
26
+ }
27
+ };
28
+ export const getDefaultVideoCodecForContainer = (container) => {
29
+ switch (container) {
30
+ case 'mp4':
31
+ return 'h264';
32
+ case 'webm':
33
+ return 'vp8';
34
+ default:
35
+ throw new Error(`Unsupported container: ${container}`);
36
+ }
37
+ };
38
+ export const getQualityForWebRendererQuality = (quality) => {
39
+ switch (quality) {
40
+ case 'very-low':
41
+ return QUALITY_VERY_LOW;
42
+ case 'low':
43
+ return QUALITY_LOW;
44
+ case 'medium':
45
+ return QUALITY_MEDIUM;
46
+ case 'high':
47
+ return QUALITY_HIGH;
48
+ case 'very-high':
49
+ return QUALITY_VERY_HIGH;
50
+ default:
51
+ throw new Error(`Unsupported quality: ${quality}`);
52
+ }
53
+ };
54
+ export const getMimeType = (container) => {
55
+ switch (container) {
56
+ case 'mp4':
57
+ return 'video/mp4';
58
+ case 'webm':
59
+ return 'video/webm';
60
+ default:
61
+ throw new Error(`Unsupported container: ${container}`);
62
+ }
63
+ };
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,304 @@
1
+ import { AudioSampleSource, BufferTarget, Output, StreamTarget, VideoSampleSource, } from 'mediabunny';
2
+ import { Internals } from 'remotion';
3
+ import { addAudioSample, addVideoSampleAndCloseFrame } from './add-sample';
4
+ import { handleArtifacts } from './artifact';
5
+ import { onlyInlineAudio } from './audio';
6
+ import { canUseWebFsWriter } from './can-use-webfs-target';
7
+ import { createScaffold } from './create-scaffold';
8
+ import { getRealFrameRange } from './frame-range';
9
+ import { getDefaultAudioEncodingConfig } from './get-audio-encoding-config';
10
+ import { makeInternalState } from './internal-state';
11
+ import { codecToMediabunnyCodec, containerToMediabunnyContainer, getDefaultVideoCodecForContainer, getMimeType, getQualityForWebRendererQuality, } from './mediabunny-mappings';
12
+ import { onlyOneRenderAtATimeQueue } from './render-operations-queue';
13
+ import { sendUsageEvent } from './send-telemetry-event';
14
+ import { createFrame } from './take-screenshot';
15
+ import { createThrottledProgressCallback } from './throttle-progress';
16
+ import { validateVideoFrame } from './validate-video-frame';
17
+ import { waitForReady } from './wait-for-ready';
18
+ import { cleanupStaleOpfsFiles, createWebFsTarget } from './web-fs-target';
19
+ // TODO: More containers
20
+ // TODO: Audio
21
+ // TODO: Metadata
22
+ // TODO: Validating inputs
23
+ // TODO: Apply defaultCodec
24
+ const internalRenderMediaOnWeb = async ({ composition, inputProps, delayRenderTimeoutInMilliseconds, logLevel, mediaCacheSizeInBytes, schema, videoCodec: codec, container, signal, onProgress, hardwareAcceleration, keyframeIntervalInSeconds, videoBitrate, frameRange, transparent, onArtifact, onFrame, outputTarget: userDesiredOutputTarget, licenseKey, muted, }) => {
25
+ var _a, _b, _c, _d, _e, _f, _g;
26
+ const outputTarget = userDesiredOutputTarget === null
27
+ ? (await canUseWebFsWriter())
28
+ ? 'web-fs'
29
+ : 'arraybuffer'
30
+ : userDesiredOutputTarget;
31
+ if (outputTarget === 'web-fs') {
32
+ await cleanupStaleOpfsFiles();
33
+ }
34
+ const cleanupFns = [];
35
+ const format = containerToMediabunnyContainer(container);
36
+ if (codec &&
37
+ !format.getSupportedCodecs().includes(codecToMediabunnyCodec(codec))) {
38
+ return Promise.reject(new Error(`Codec ${codec} is not supported for container ${container}`));
39
+ }
40
+ const resolved = await Internals.resolveVideoConfig({
41
+ calculateMetadata: (_a = composition.calculateMetadata) !== null && _a !== void 0 ? _a : null,
42
+ signal: signal !== null && signal !== void 0 ? signal : new AbortController().signal,
43
+ defaultProps: (_b = composition.defaultProps) !== null && _b !== void 0 ? _b : {},
44
+ inputProps: inputProps !== null && inputProps !== void 0 ? inputProps : {},
45
+ compositionId: composition.id,
46
+ compositionDurationInFrames: (_c = composition.durationInFrames) !== null && _c !== void 0 ? _c : null,
47
+ compositionFps: (_d = composition.fps) !== null && _d !== void 0 ? _d : null,
48
+ compositionHeight: (_e = composition.height) !== null && _e !== void 0 ? _e : null,
49
+ compositionWidth: (_f = composition.width) !== null && _f !== void 0 ? _f : null,
50
+ });
51
+ const realFrameRange = getRealFrameRange(resolved.durationInFrames, frameRange);
52
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
53
+ return Promise.reject(new Error('renderMediaOnWeb() was cancelled'));
54
+ }
55
+ const { delayRenderScope, div, cleanupScaffold, timeUpdater, collectAssets } = await createScaffold({
56
+ width: resolved.width,
57
+ height: resolved.height,
58
+ fps: resolved.fps,
59
+ durationInFrames: resolved.durationInFrames,
60
+ Component: composition.component,
61
+ resolvedProps: resolved.props,
62
+ id: resolved.id,
63
+ delayRenderTimeoutInMilliseconds,
64
+ logLevel,
65
+ mediaCacheSizeInBytes,
66
+ schema: schema !== null && schema !== void 0 ? schema : null,
67
+ audioEnabled: !muted,
68
+ videoEnabled: true,
69
+ initialFrame: 0,
70
+ defaultCodec: resolved.defaultCodec,
71
+ defaultOutName: resolved.defaultOutName,
72
+ });
73
+ const internalState = makeInternalState();
74
+ const artifactsHandler = handleArtifacts();
75
+ cleanupFns.push(() => {
76
+ cleanupScaffold();
77
+ });
78
+ const webFsTarget = outputTarget === 'web-fs' ? await createWebFsTarget() : null;
79
+ const target = webFsTarget
80
+ ? new StreamTarget(webFsTarget.stream)
81
+ : new BufferTarget();
82
+ const output = new Output({
83
+ format,
84
+ target,
85
+ });
86
+ try {
87
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
88
+ throw new Error('renderMediaOnWeb() was cancelled');
89
+ }
90
+ await waitForReady({
91
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
92
+ scope: delayRenderScope,
93
+ signal,
94
+ apiName: 'renderMediaOnWeb',
95
+ internalState,
96
+ });
97
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
98
+ throw new Error('renderMediaOnWeb() was cancelled');
99
+ }
100
+ cleanupFns.push(() => {
101
+ if (output.state === 'finalized' || output.state === 'canceled') {
102
+ return;
103
+ }
104
+ output.cancel();
105
+ });
106
+ const videoSampleSource = new VideoSampleSource({
107
+ codec: codecToMediabunnyCodec(codec),
108
+ bitrate: typeof videoBitrate === 'number'
109
+ ? videoBitrate
110
+ : getQualityForWebRendererQuality(videoBitrate),
111
+ sizeChangeBehavior: 'deny',
112
+ hardwareAcceleration,
113
+ latencyMode: 'quality',
114
+ keyFrameInterval: keyframeIntervalInSeconds,
115
+ alpha: transparent ? 'keep' : 'discard',
116
+ });
117
+ cleanupFns.push(() => {
118
+ videoSampleSource.close();
119
+ });
120
+ output.addVideoTrack(videoSampleSource);
121
+ // TODO: Should be able to customize
122
+ let audioSampleSource = null;
123
+ if (!muted) {
124
+ const defaultAudioEncodingConfig = await getDefaultAudioEncodingConfig();
125
+ if (!defaultAudioEncodingConfig) {
126
+ return Promise.reject(new Error('No default audio encoding config found'));
127
+ }
128
+ audioSampleSource = new AudioSampleSource(defaultAudioEncodingConfig);
129
+ cleanupFns.push(() => {
130
+ audioSampleSource === null || audioSampleSource === void 0 ? void 0 : audioSampleSource.close();
131
+ });
132
+ output.addAudioTrack(audioSampleSource);
133
+ }
134
+ await output.start();
135
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
136
+ throw new Error('renderMediaOnWeb() was cancelled');
137
+ }
138
+ const progress = {
139
+ renderedFrames: 0,
140
+ encodedFrames: 0,
141
+ };
142
+ const throttledOnProgress = createThrottledProgressCallback(onProgress);
143
+ for (let frame = realFrameRange[0]; frame <= realFrameRange[1]; frame++) {
144
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
145
+ throw new Error('renderMediaOnWeb() was cancelled');
146
+ }
147
+ (_g = timeUpdater.current) === null || _g === void 0 ? void 0 : _g.update(frame);
148
+ await waitForReady({
149
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
150
+ scope: delayRenderScope,
151
+ signal,
152
+ apiName: 'renderMediaOnWeb',
153
+ internalState,
154
+ });
155
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
156
+ throw new Error('renderMediaOnWeb() was cancelled');
157
+ }
158
+ const createFrameStart = performance.now();
159
+ const imageData = await createFrame({
160
+ div,
161
+ width: resolved.width,
162
+ height: resolved.height,
163
+ logLevel,
164
+ internalState,
165
+ });
166
+ internalState.addCreateFrameTime(performance.now() - createFrameStart);
167
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
168
+ throw new Error('renderMediaOnWeb() was cancelled');
169
+ }
170
+ const assets = collectAssets.current.collectAssets();
171
+ if (onArtifact) {
172
+ await artifactsHandler.handle({
173
+ imageData,
174
+ frame,
175
+ assets,
176
+ onArtifact,
177
+ });
178
+ }
179
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
180
+ throw new Error('renderMediaOnWeb() was cancelled');
181
+ }
182
+ const audio = muted
183
+ ? null
184
+ : onlyInlineAudio({ assets, fps: resolved.fps, frame });
185
+ const timestamp = Math.round(((frame - realFrameRange[0]) / resolved.fps) * 1000000);
186
+ const videoFrame = new VideoFrame(imageData, {
187
+ timestamp,
188
+ });
189
+ progress.renderedFrames++;
190
+ throttledOnProgress === null || throttledOnProgress === void 0 ? void 0 : throttledOnProgress({ ...progress });
191
+ // Process frame through onFrame callback if provided
192
+ let frameToEncode = videoFrame;
193
+ if (onFrame) {
194
+ const returnedFrame = await onFrame(videoFrame);
195
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
196
+ throw new Error('renderMediaOnWeb() was cancelled');
197
+ }
198
+ frameToEncode = validateVideoFrame({
199
+ originalFrame: videoFrame,
200
+ returnedFrame,
201
+ expectedWidth: resolved.width,
202
+ expectedHeight: resolved.height,
203
+ expectedTimestamp: timestamp,
204
+ });
205
+ }
206
+ const addSampleStart = performance.now();
207
+ await Promise.all([
208
+ addVideoSampleAndCloseFrame(frameToEncode, videoSampleSource),
209
+ audio && audioSampleSource
210
+ ? addAudioSample(audio, audioSampleSource)
211
+ : Promise.resolve(),
212
+ ]);
213
+ internalState.addAddSampleTime(performance.now() - addSampleStart);
214
+ progress.encodedFrames++;
215
+ throttledOnProgress === null || throttledOnProgress === void 0 ? void 0 : throttledOnProgress({ ...progress });
216
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
217
+ throw new Error('renderMediaOnWeb() was cancelled');
218
+ }
219
+ }
220
+ // Call progress one final time to ensure final state is reported
221
+ onProgress === null || onProgress === void 0 ? void 0 : onProgress({ ...progress });
222
+ videoSampleSource.close();
223
+ audioSampleSource === null || audioSampleSource === void 0 ? void 0 : audioSampleSource.close();
224
+ await output.finalize();
225
+ Internals.Log.verbose({ logLevel, tag: 'web-renderer' }, `Render timings: waitForReady=${internalState.getWaitForReadyTime().toFixed(2)}ms, createFrame=${internalState.getCreateFrameTime().toFixed(2)}ms, addSample=${internalState.getAddSampleTime().toFixed(2)}ms`);
226
+ const mimeType = getMimeType(container);
227
+ if (webFsTarget) {
228
+ sendUsageEvent({
229
+ licenseKey: licenseKey !== null && licenseKey !== void 0 ? licenseKey : null,
230
+ succeeded: true,
231
+ apiName: 'renderMediaOnWeb',
232
+ });
233
+ await webFsTarget.close();
234
+ return {
235
+ getBlob: () => {
236
+ return webFsTarget.getBlob();
237
+ },
238
+ internalState,
239
+ };
240
+ }
241
+ if (!(target instanceof BufferTarget)) {
242
+ throw new Error('Expected target to be a BufferTarget');
243
+ }
244
+ sendUsageEvent({
245
+ licenseKey: licenseKey !== null && licenseKey !== void 0 ? licenseKey : null,
246
+ succeeded: true,
247
+ apiName: 'renderMediaOnWeb',
248
+ });
249
+ return {
250
+ getBlob: () => {
251
+ if (!target.buffer) {
252
+ throw new Error('The resulting buffer is empty');
253
+ }
254
+ return Promise.resolve(new Blob([target.buffer], { type: mimeType }));
255
+ },
256
+ internalState,
257
+ };
258
+ }
259
+ catch (err) {
260
+ sendUsageEvent({
261
+ succeeded: false,
262
+ licenseKey: licenseKey !== null && licenseKey !== void 0 ? licenseKey : null,
263
+ apiName: 'renderMediaOnWeb',
264
+ }).catch((err2) => {
265
+ Internals.Log.error({ logLevel: 'error', tag: 'web-renderer' }, 'Failed to send usage event', err2);
266
+ });
267
+ throw err;
268
+ }
269
+ finally {
270
+ cleanupFns.forEach((fn) => fn());
271
+ }
272
+ };
273
+ export const renderMediaOnWeb = (options) => {
274
+ var _a, _b;
275
+ const container = (_a = options.container) !== null && _a !== void 0 ? _a : 'mp4';
276
+ const codec = (_b = options.videoCodec) !== null && _b !== void 0 ? _b : getDefaultVideoCodecForContainer(container);
277
+ onlyOneRenderAtATimeQueue.ref = onlyOneRenderAtATimeQueue.ref
278
+ .catch(() => Promise.resolve())
279
+ .then(() => {
280
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s;
281
+ return internalRenderMediaOnWeb({
282
+ ...options,
283
+ delayRenderTimeoutInMilliseconds: (_a = options.delayRenderTimeoutInMilliseconds) !== null && _a !== void 0 ? _a : 30000,
284
+ logLevel: (_c = (_b = options.logLevel) !== null && _b !== void 0 ? _b : window.remotion_logLevel) !== null && _c !== void 0 ? _c : 'info',
285
+ schema: (_d = options.schema) !== null && _d !== void 0 ? _d : undefined,
286
+ mediaCacheSizeInBytes: (_e = options.mediaCacheSizeInBytes) !== null && _e !== void 0 ? _e : null,
287
+ videoCodec: codec,
288
+ container,
289
+ signal: (_f = options.signal) !== null && _f !== void 0 ? _f : null,
290
+ onProgress: (_g = options.onProgress) !== null && _g !== void 0 ? _g : null,
291
+ hardwareAcceleration: (_h = options.hardwareAcceleration) !== null && _h !== void 0 ? _h : 'no-preference',
292
+ keyframeIntervalInSeconds: (_j = options.keyframeIntervalInSeconds) !== null && _j !== void 0 ? _j : 5,
293
+ videoBitrate: (_k = options.videoBitrate) !== null && _k !== void 0 ? _k : 'medium',
294
+ frameRange: (_l = options.frameRange) !== null && _l !== void 0 ? _l : null,
295
+ transparent: (_m = options.transparent) !== null && _m !== void 0 ? _m : false,
296
+ onArtifact: (_o = options.onArtifact) !== null && _o !== void 0 ? _o : null,
297
+ onFrame: (_p = options.onFrame) !== null && _p !== void 0 ? _p : null,
298
+ outputTarget: (_q = options.outputTarget) !== null && _q !== void 0 ? _q : null,
299
+ licenseKey: (_r = options.licenseKey) !== null && _r !== void 0 ? _r : undefined,
300
+ muted: (_s = options.muted) !== null && _s !== void 0 ? _s : false,
301
+ });
302
+ });
303
+ return onlyOneRenderAtATimeQueue.ref;
304
+ };
@@ -0,0 +1,3 @@
1
+ export const onlyOneRenderAtATimeQueue = {
2
+ ref: Promise.resolve(),
3
+ };
@@ -0,0 +1,110 @@
1
+ import { Internals, } from 'remotion';
2
+ import { handleArtifacts } from './artifact';
3
+ import { createScaffold } from './create-scaffold';
4
+ import { makeInternalState } from './internal-state';
5
+ import { onlyOneRenderAtATimeQueue } from './render-operations-queue';
6
+ import { sendUsageEvent } from './send-telemetry-event';
7
+ import { takeScreenshot } from './take-screenshot';
8
+ import { waitForReady } from './wait-for-ready';
9
+ async function internalRenderStillOnWeb({ frame, delayRenderTimeoutInMilliseconds, logLevel, inputProps, schema, imageFormat, mediaCacheSizeInBytes, composition, signal, onArtifact, licenseKey, }) {
10
+ var _a, _b, _c, _d, _e, _f;
11
+ const resolved = await Internals.resolveVideoConfig({
12
+ calculateMetadata: (_a = composition.calculateMetadata) !== null && _a !== void 0 ? _a : null,
13
+ signal: signal !== null && signal !== void 0 ? signal : new AbortController().signal,
14
+ defaultProps: (_b = composition.defaultProps) !== null && _b !== void 0 ? _b : {},
15
+ inputProps: inputProps !== null && inputProps !== void 0 ? inputProps : {},
16
+ compositionId: composition.id,
17
+ compositionDurationInFrames: (_c = composition.durationInFrames) !== null && _c !== void 0 ? _c : null,
18
+ compositionFps: (_d = composition.fps) !== null && _d !== void 0 ? _d : null,
19
+ compositionHeight: (_e = composition.height) !== null && _e !== void 0 ? _e : null,
20
+ compositionWidth: (_f = composition.width) !== null && _f !== void 0 ? _f : null,
21
+ });
22
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
23
+ return Promise.reject(new Error('renderStillOnWeb() was cancelled'));
24
+ }
25
+ const internalState = makeInternalState();
26
+ const { delayRenderScope, div, cleanupScaffold, collectAssets } = await createScaffold({
27
+ width: resolved.width,
28
+ height: resolved.height,
29
+ delayRenderTimeoutInMilliseconds,
30
+ logLevel,
31
+ resolvedProps: resolved.props,
32
+ id: resolved.id,
33
+ mediaCacheSizeInBytes,
34
+ audioEnabled: false,
35
+ Component: composition.component,
36
+ videoEnabled: true,
37
+ durationInFrames: resolved.durationInFrames,
38
+ fps: resolved.fps,
39
+ schema: schema !== null && schema !== void 0 ? schema : null,
40
+ initialFrame: frame,
41
+ defaultCodec: resolved.defaultCodec,
42
+ defaultOutName: resolved.defaultOutName,
43
+ });
44
+ const artifactsHandler = handleArtifacts();
45
+ try {
46
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
47
+ throw new Error('renderStillOnWeb() was cancelled');
48
+ }
49
+ await waitForReady({
50
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
51
+ scope: delayRenderScope,
52
+ signal,
53
+ apiName: 'renderStillOnWeb',
54
+ internalState: null,
55
+ });
56
+ if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
57
+ throw new Error('renderStillOnWeb() was cancelled');
58
+ }
59
+ const imageData = await takeScreenshot({
60
+ div,
61
+ width: resolved.width,
62
+ height: resolved.height,
63
+ imageFormat,
64
+ logLevel,
65
+ internalState,
66
+ });
67
+ const assets = collectAssets.current.collectAssets();
68
+ if (onArtifact) {
69
+ await artifactsHandler.handle({ imageData, frame, assets, onArtifact });
70
+ }
71
+ sendUsageEvent({
72
+ licenseKey: licenseKey !== null && licenseKey !== void 0 ? licenseKey : null,
73
+ succeeded: true,
74
+ apiName: 'renderStillOnWeb',
75
+ });
76
+ return { blob: imageData, internalState };
77
+ }
78
+ catch (err) {
79
+ sendUsageEvent({
80
+ succeeded: false,
81
+ licenseKey: licenseKey !== null && licenseKey !== void 0 ? licenseKey : null,
82
+ apiName: 'renderStillOnWeb',
83
+ }).catch((err2) => {
84
+ Internals.Log.error({ logLevel: 'error', tag: 'web-renderer' }, 'Failed to send usage event', err2);
85
+ });
86
+ throw err;
87
+ }
88
+ finally {
89
+ internalState.cleanup();
90
+ cleanupScaffold();
91
+ }
92
+ }
93
+ export const renderStillOnWeb = (options) => {
94
+ onlyOneRenderAtATimeQueue.ref = onlyOneRenderAtATimeQueue.ref
95
+ .catch(() => Promise.resolve())
96
+ .then(() => {
97
+ var _a, _b, _c, _d, _e, _f, _g, _h;
98
+ return internalRenderStillOnWeb({
99
+ ...options,
100
+ delayRenderTimeoutInMilliseconds: (_a = options.delayRenderTimeoutInMilliseconds) !== null && _a !== void 0 ? _a : 30000,
101
+ logLevel: (_c = (_b = options.logLevel) !== null && _b !== void 0 ? _b : window.remotion_logLevel) !== null && _c !== void 0 ? _c : 'info',
102
+ schema: (_d = options.schema) !== null && _d !== void 0 ? _d : undefined,
103
+ mediaCacheSizeInBytes: (_e = options.mediaCacheSizeInBytes) !== null && _e !== void 0 ? _e : null,
104
+ signal: (_f = options.signal) !== null && _f !== void 0 ? _f : null,
105
+ onArtifact: (_g = options.onArtifact) !== null && _g !== void 0 ? _g : null,
106
+ licenseKey: (_h = options.licenseKey) !== null && _h !== void 0 ? _h : undefined,
107
+ });
108
+ });
109
+ return onlyOneRenderAtATimeQueue.ref;
110
+ };