@remotion/web-renderer 4.0.400 → 4.0.401
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/index.mjs +175 -152
- package/dist/get-audio-sample-source.d.ts +8 -0
- package/dist/mediabunny-cleanups.d.ts +10 -0
- package/package.json +6 -6
package/dist/esm/index.mjs
CHANGED
|
@@ -37,13 +37,7 @@ var __callDispose = (stack, error, hasError) => {
|
|
|
37
37
|
};
|
|
38
38
|
|
|
39
39
|
// src/render-media-on-web.tsx
|
|
40
|
-
import {
|
|
41
|
-
AudioSampleSource,
|
|
42
|
-
BufferTarget,
|
|
43
|
-
Output,
|
|
44
|
-
StreamTarget,
|
|
45
|
-
VideoSampleSource
|
|
46
|
-
} from "mediabunny";
|
|
40
|
+
import { BufferTarget, StreamTarget } from "mediabunny";
|
|
47
41
|
import { Internals as Internals7 } from "remotion";
|
|
48
42
|
|
|
49
43
|
// src/add-sample.ts
|
|
@@ -404,6 +398,11 @@ var getRealFrameRange = (durationInFrames, frameRange) => {
|
|
|
404
398
|
return frameRange;
|
|
405
399
|
};
|
|
406
400
|
|
|
401
|
+
// src/get-audio-sample-source.ts
|
|
402
|
+
import {
|
|
403
|
+
AudioSampleSource
|
|
404
|
+
} from "mediabunny";
|
|
405
|
+
|
|
407
406
|
// src/get-audio-encoding-config.ts
|
|
408
407
|
import {
|
|
409
408
|
canEncodeAudio,
|
|
@@ -427,6 +426,23 @@ var getDefaultAudioEncodingConfig = async () => {
|
|
|
427
426
|
return null;
|
|
428
427
|
};
|
|
429
428
|
|
|
429
|
+
// src/get-audio-sample-source.ts
|
|
430
|
+
var addAudioSampleSource = async ({
|
|
431
|
+
muted,
|
|
432
|
+
output
|
|
433
|
+
}) => {
|
|
434
|
+
if (muted) {
|
|
435
|
+
return null;
|
|
436
|
+
}
|
|
437
|
+
const defaultAudioEncodingConfig = await getDefaultAudioEncodingConfig();
|
|
438
|
+
if (!defaultAudioEncodingConfig) {
|
|
439
|
+
throw new Error("No default audio encoding config found");
|
|
440
|
+
}
|
|
441
|
+
const audioSampleSource = new AudioSampleSource(defaultAudioEncodingConfig);
|
|
442
|
+
output.addAudioTrack(audioSampleSource);
|
|
443
|
+
return { audioSampleSource, [Symbol.dispose]: () => audioSampleSource.close() };
|
|
444
|
+
};
|
|
445
|
+
|
|
430
446
|
// src/internal-state.ts
|
|
431
447
|
var makeInternalState = () => {
|
|
432
448
|
let drawnPrecomposedPixels = 0;
|
|
@@ -473,6 +489,30 @@ var makeInternalState = () => {
|
|
|
473
489
|
};
|
|
474
490
|
};
|
|
475
491
|
|
|
492
|
+
// src/mediabunny-cleanups.ts
|
|
493
|
+
import { Output, VideoSampleSource } from "mediabunny";
|
|
494
|
+
var makeOutputWithCleanup = (options) => {
|
|
495
|
+
const output = new Output(options);
|
|
496
|
+
return {
|
|
497
|
+
output,
|
|
498
|
+
[Symbol.dispose]: () => {
|
|
499
|
+
if (output.state === "finalized" || output.state === "canceled") {
|
|
500
|
+
return;
|
|
501
|
+
}
|
|
502
|
+
output.cancel();
|
|
503
|
+
}
|
|
504
|
+
};
|
|
505
|
+
};
|
|
506
|
+
var makeVideoSampleSourceCleanup = (encodingConfig) => {
|
|
507
|
+
const videoSampleSource = new VideoSampleSource(encodingConfig);
|
|
508
|
+
return {
|
|
509
|
+
videoSampleSource,
|
|
510
|
+
[Symbol.dispose]: () => {
|
|
511
|
+
videoSampleSource.close();
|
|
512
|
+
}
|
|
513
|
+
};
|
|
514
|
+
};
|
|
515
|
+
|
|
476
516
|
// src/mediabunny-mappings.ts
|
|
477
517
|
import {
|
|
478
518
|
Mp4OutputFormat,
|
|
@@ -1322,7 +1362,7 @@ var calculateTransforms = ({
|
|
|
1322
1362
|
if (!elementComputedStyle) {
|
|
1323
1363
|
throw new Error("Element computed style not found");
|
|
1324
1364
|
}
|
|
1325
|
-
const needs3DTransformViaWebGL = !totalMatrix.
|
|
1365
|
+
const needs3DTransformViaWebGL = !totalMatrix.isIdentity;
|
|
1326
1366
|
const needsMaskImage = maskImageInfo !== null;
|
|
1327
1367
|
return {
|
|
1328
1368
|
dimensions,
|
|
@@ -2777,16 +2817,13 @@ var processNode = async ({
|
|
|
2777
2817
|
});
|
|
2778
2818
|
}
|
|
2779
2819
|
if (precompositing.needs3DTransformViaWebGL) {
|
|
2780
|
-
|
|
2820
|
+
drawable = handle3dTransform({
|
|
2781
2821
|
matrix: totalMatrix,
|
|
2782
2822
|
precomposeRect,
|
|
2783
2823
|
tempCanvas: drawable,
|
|
2784
2824
|
rectAfterTransforms,
|
|
2785
2825
|
internalState
|
|
2786
2826
|
});
|
|
2787
|
-
if (t) {
|
|
2788
|
-
drawable = t;
|
|
2789
|
-
}
|
|
2790
2827
|
}
|
|
2791
2828
|
const previousTransform = context.getTransform();
|
|
2792
2829
|
if (drawable) {
|
|
@@ -3406,13 +3443,12 @@ var internalRenderMediaOnWeb = async ({
|
|
|
3406
3443
|
licenseKey,
|
|
3407
3444
|
muted
|
|
3408
3445
|
}) => {
|
|
3409
|
-
let
|
|
3446
|
+
let __stack2 = [];
|
|
3410
3447
|
try {
|
|
3411
3448
|
const outputTarget = userDesiredOutputTarget === null ? await canUseWebFsWriter() ? "web-fs" : "arraybuffer" : userDesiredOutputTarget;
|
|
3412
3449
|
if (outputTarget === "web-fs") {
|
|
3413
3450
|
await cleanupStaleOpfsFiles();
|
|
3414
3451
|
}
|
|
3415
|
-
const cleanupFns = [];
|
|
3416
3452
|
const format = containerToMediabunnyContainer(container);
|
|
3417
3453
|
if (codec && !format.getSupportedCodecs().includes(codecToMediabunnyCodec(codec))) {
|
|
3418
3454
|
return Promise.reject(new Error(`Codec ${codec} is not supported for container ${container}`));
|
|
@@ -3432,7 +3468,7 @@ var internalRenderMediaOnWeb = async ({
|
|
|
3432
3468
|
if (signal?.aborted) {
|
|
3433
3469
|
return Promise.reject(new Error("renderMediaOnWeb() was cancelled"));
|
|
3434
3470
|
}
|
|
3435
|
-
const scaffold = __using(
|
|
3471
|
+
const scaffold = __using(__stack2, await createScaffold({
|
|
3436
3472
|
width: resolved.width,
|
|
3437
3473
|
height: resolved.height,
|
|
3438
3474
|
fps: resolved.fps,
|
|
@@ -3451,73 +3487,20 @@ var internalRenderMediaOnWeb = async ({
|
|
|
3451
3487
|
defaultOutName: resolved.defaultOutName
|
|
3452
3488
|
}), 0);
|
|
3453
3489
|
const { delayRenderScope, div, timeUpdater, collectAssets } = scaffold;
|
|
3454
|
-
const internalState = __using(
|
|
3490
|
+
const internalState = __using(__stack2, makeInternalState(), 0);
|
|
3455
3491
|
const artifactsHandler = handleArtifacts();
|
|
3456
3492
|
const webFsTarget = outputTarget === "web-fs" ? await createWebFsTarget() : null;
|
|
3457
3493
|
const target = webFsTarget ? new StreamTarget(webFsTarget.stream) : new BufferTarget;
|
|
3458
|
-
const
|
|
3494
|
+
const outputWithCleanup = __using(__stack2, makeOutputWithCleanup({
|
|
3459
3495
|
format,
|
|
3460
3496
|
target
|
|
3461
|
-
});
|
|
3497
|
+
}), 0);
|
|
3462
3498
|
try {
|
|
3463
|
-
|
|
3464
|
-
|
|
3465
|
-
}
|
|
3466
|
-
await waitForReady({
|
|
3467
|
-
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
|
|
3468
|
-
scope: delayRenderScope,
|
|
3469
|
-
signal,
|
|
3470
|
-
apiName: "renderMediaOnWeb",
|
|
3471
|
-
internalState
|
|
3472
|
-
});
|
|
3473
|
-
if (signal?.aborted) {
|
|
3474
|
-
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3475
|
-
}
|
|
3476
|
-
cleanupFns.push(() => {
|
|
3477
|
-
if (output.state === "finalized" || output.state === "canceled") {
|
|
3478
|
-
return;
|
|
3479
|
-
}
|
|
3480
|
-
output.cancel();
|
|
3481
|
-
});
|
|
3482
|
-
const videoSampleSource = new VideoSampleSource({
|
|
3483
|
-
codec: codecToMediabunnyCodec(codec),
|
|
3484
|
-
bitrate: typeof videoBitrate === "number" ? videoBitrate : getQualityForWebRendererQuality(videoBitrate),
|
|
3485
|
-
sizeChangeBehavior: "deny",
|
|
3486
|
-
hardwareAcceleration,
|
|
3487
|
-
latencyMode: "quality",
|
|
3488
|
-
keyFrameInterval: keyframeIntervalInSeconds,
|
|
3489
|
-
alpha: transparent ? "keep" : "discard"
|
|
3490
|
-
});
|
|
3491
|
-
cleanupFns.push(() => {
|
|
3492
|
-
videoSampleSource.close();
|
|
3493
|
-
});
|
|
3494
|
-
output.addVideoTrack(videoSampleSource);
|
|
3495
|
-
let audioSampleSource = null;
|
|
3496
|
-
if (!muted) {
|
|
3497
|
-
const defaultAudioEncodingConfig = await getDefaultAudioEncodingConfig();
|
|
3498
|
-
if (!defaultAudioEncodingConfig) {
|
|
3499
|
-
return Promise.reject(new Error("No default audio encoding config found"));
|
|
3500
|
-
}
|
|
3501
|
-
audioSampleSource = new AudioSampleSource(defaultAudioEncodingConfig);
|
|
3502
|
-
cleanupFns.push(() => {
|
|
3503
|
-
audioSampleSource?.close();
|
|
3504
|
-
});
|
|
3505
|
-
output.addAudioTrack(audioSampleSource);
|
|
3506
|
-
}
|
|
3507
|
-
await output.start();
|
|
3508
|
-
if (signal?.aborted) {
|
|
3509
|
-
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3510
|
-
}
|
|
3511
|
-
const progress = {
|
|
3512
|
-
renderedFrames: 0,
|
|
3513
|
-
encodedFrames: 0
|
|
3514
|
-
};
|
|
3515
|
-
const throttledOnProgress = createThrottledProgressCallback(onProgress);
|
|
3516
|
-
for (let frame = realFrameRange[0];frame <= realFrameRange[1]; frame++) {
|
|
3499
|
+
let __stack = [];
|
|
3500
|
+
try {
|
|
3517
3501
|
if (signal?.aborted) {
|
|
3518
3502
|
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3519
3503
|
}
|
|
3520
|
-
timeUpdater.current?.update(frame);
|
|
3521
3504
|
await waitForReady({
|
|
3522
3505
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
|
|
3523
3506
|
scope: delayRenderScope,
|
|
@@ -3528,102 +3511,144 @@ var internalRenderMediaOnWeb = async ({
|
|
|
3528
3511
|
if (signal?.aborted) {
|
|
3529
3512
|
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3530
3513
|
}
|
|
3531
|
-
const
|
|
3532
|
-
|
|
3533
|
-
|
|
3534
|
-
|
|
3535
|
-
|
|
3536
|
-
|
|
3537
|
-
|
|
3538
|
-
|
|
3539
|
-
|
|
3514
|
+
const videoSampleSource = __using(__stack, makeVideoSampleSourceCleanup({
|
|
3515
|
+
codec: codecToMediabunnyCodec(codec),
|
|
3516
|
+
bitrate: typeof videoBitrate === "number" ? videoBitrate : getQualityForWebRendererQuality(videoBitrate),
|
|
3517
|
+
sizeChangeBehavior: "deny",
|
|
3518
|
+
hardwareAcceleration,
|
|
3519
|
+
latencyMode: "quality",
|
|
3520
|
+
keyFrameInterval: keyframeIntervalInSeconds,
|
|
3521
|
+
alpha: transparent ? "keep" : "discard"
|
|
3522
|
+
}), 0);
|
|
3523
|
+
outputWithCleanup.output.addVideoTrack(videoSampleSource.videoSampleSource);
|
|
3524
|
+
const audioSampleSource = __using(__stack, await addAudioSampleSource({
|
|
3525
|
+
muted,
|
|
3526
|
+
output: outputWithCleanup.output
|
|
3527
|
+
}), 0);
|
|
3528
|
+
await outputWithCleanup.output.start();
|
|
3540
3529
|
if (signal?.aborted) {
|
|
3541
3530
|
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3542
3531
|
}
|
|
3543
|
-
const
|
|
3544
|
-
|
|
3545
|
-
|
|
3546
|
-
}
|
|
3547
|
-
|
|
3548
|
-
|
|
3549
|
-
let frameToEncode = videoFrame;
|
|
3550
|
-
if (onFrame) {
|
|
3551
|
-
const returnedFrame = await onFrame(videoFrame);
|
|
3532
|
+
const progress = {
|
|
3533
|
+
renderedFrames: 0,
|
|
3534
|
+
encodedFrames: 0
|
|
3535
|
+
};
|
|
3536
|
+
const throttledOnProgress = createThrottledProgressCallback(onProgress);
|
|
3537
|
+
for (let frame = realFrameRange[0];frame <= realFrameRange[1]; frame++) {
|
|
3552
3538
|
if (signal?.aborted) {
|
|
3553
3539
|
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3554
3540
|
}
|
|
3555
|
-
|
|
3556
|
-
|
|
3557
|
-
|
|
3558
|
-
|
|
3559
|
-
|
|
3560
|
-
|
|
3541
|
+
timeUpdater.current?.update(frame);
|
|
3542
|
+
await waitForReady({
|
|
3543
|
+
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds,
|
|
3544
|
+
scope: delayRenderScope,
|
|
3545
|
+
signal,
|
|
3546
|
+
apiName: "renderMediaOnWeb",
|
|
3547
|
+
internalState
|
|
3561
3548
|
});
|
|
3562
|
-
|
|
3563
|
-
|
|
3564
|
-
|
|
3565
|
-
|
|
3566
|
-
await
|
|
3567
|
-
|
|
3568
|
-
|
|
3569
|
-
|
|
3570
|
-
|
|
3549
|
+
if (signal?.aborted) {
|
|
3550
|
+
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3551
|
+
}
|
|
3552
|
+
const createFrameStart = performance.now();
|
|
3553
|
+
const imageData = await createFrame({
|
|
3554
|
+
div,
|
|
3555
|
+
width: resolved.width,
|
|
3556
|
+
height: resolved.height,
|
|
3557
|
+
logLevel,
|
|
3558
|
+
internalState
|
|
3559
|
+
});
|
|
3560
|
+
internalState.addCreateFrameTime(performance.now() - createFrameStart);
|
|
3561
|
+
if (signal?.aborted) {
|
|
3562
|
+
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3563
|
+
}
|
|
3564
|
+
const timestamp = Math.round((frame - realFrameRange[0]) / resolved.fps * 1e6);
|
|
3565
|
+
const videoFrame = new VideoFrame(imageData, {
|
|
3566
|
+
timestamp
|
|
3571
3567
|
});
|
|
3568
|
+
progress.renderedFrames++;
|
|
3569
|
+
throttledOnProgress?.({ ...progress });
|
|
3570
|
+
let frameToEncode = videoFrame;
|
|
3571
|
+
if (onFrame) {
|
|
3572
|
+
const returnedFrame = await onFrame(videoFrame);
|
|
3573
|
+
if (signal?.aborted) {
|
|
3574
|
+
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3575
|
+
}
|
|
3576
|
+
frameToEncode = validateVideoFrame({
|
|
3577
|
+
originalFrame: videoFrame,
|
|
3578
|
+
returnedFrame,
|
|
3579
|
+
expectedWidth: resolved.width,
|
|
3580
|
+
expectedHeight: resolved.height,
|
|
3581
|
+
expectedTimestamp: timestamp
|
|
3582
|
+
});
|
|
3583
|
+
}
|
|
3584
|
+
const audioCombineStart = performance.now();
|
|
3585
|
+
const assets = collectAssets.current.collectAssets();
|
|
3586
|
+
if (onArtifact) {
|
|
3587
|
+
await artifactsHandler.handle({
|
|
3588
|
+
imageData,
|
|
3589
|
+
frame,
|
|
3590
|
+
assets,
|
|
3591
|
+
onArtifact
|
|
3592
|
+
});
|
|
3593
|
+
}
|
|
3594
|
+
if (signal?.aborted) {
|
|
3595
|
+
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3596
|
+
}
|
|
3597
|
+
const audio = muted ? null : onlyInlineAudio({ assets, fps: resolved.fps, frame });
|
|
3598
|
+
internalState.addAudioMixingTime(performance.now() - audioCombineStart);
|
|
3599
|
+
const addSampleStart = performance.now();
|
|
3600
|
+
await Promise.all([
|
|
3601
|
+
addVideoSampleAndCloseFrame(frameToEncode, videoSampleSource.videoSampleSource),
|
|
3602
|
+
audio && audioSampleSource ? addAudioSample(audio, audioSampleSource.audioSampleSource) : Promise.resolve()
|
|
3603
|
+
]);
|
|
3604
|
+
internalState.addAddSampleTime(performance.now() - addSampleStart);
|
|
3605
|
+
progress.encodedFrames++;
|
|
3606
|
+
throttledOnProgress?.({ ...progress });
|
|
3607
|
+
if (signal?.aborted) {
|
|
3608
|
+
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3609
|
+
}
|
|
3572
3610
|
}
|
|
3573
|
-
|
|
3574
|
-
|
|
3611
|
+
onProgress?.({ ...progress });
|
|
3612
|
+
videoSampleSource.videoSampleSource.close();
|
|
3613
|
+
audioSampleSource?.audioSampleSource.close();
|
|
3614
|
+
await outputWithCleanup.output.finalize();
|
|
3615
|
+
Internals7.Log.verbose({ logLevel, tag: "web-renderer" }, `Render timings: waitForReady=${internalState.getWaitForReadyTime().toFixed(2)}ms, createFrame=${internalState.getCreateFrameTime().toFixed(2)}ms, addSample=${internalState.getAddSampleTime().toFixed(2)}ms, audioMixing=${internalState.getAudioMixingTime().toFixed(2)}ms`);
|
|
3616
|
+
if (webFsTarget) {
|
|
3617
|
+
sendUsageEvent({
|
|
3618
|
+
licenseKey: licenseKey ?? null,
|
|
3619
|
+
succeeded: true,
|
|
3620
|
+
apiName: "renderMediaOnWeb"
|
|
3621
|
+
});
|
|
3622
|
+
await webFsTarget.close();
|
|
3623
|
+
return {
|
|
3624
|
+
getBlob: () => {
|
|
3625
|
+
return webFsTarget.getBlob();
|
|
3626
|
+
},
|
|
3627
|
+
internalState
|
|
3628
|
+
};
|
|
3575
3629
|
}
|
|
3576
|
-
|
|
3577
|
-
|
|
3578
|
-
const addSampleStart = performance.now();
|
|
3579
|
-
await Promise.all([
|
|
3580
|
-
addVideoSampleAndCloseFrame(frameToEncode, videoSampleSource),
|
|
3581
|
-
audio && audioSampleSource ? addAudioSample(audio, audioSampleSource) : Promise.resolve()
|
|
3582
|
-
]);
|
|
3583
|
-
internalState.addAddSampleTime(performance.now() - addSampleStart);
|
|
3584
|
-
progress.encodedFrames++;
|
|
3585
|
-
throttledOnProgress?.({ ...progress });
|
|
3586
|
-
if (signal?.aborted) {
|
|
3587
|
-
throw new Error("renderMediaOnWeb() was cancelled");
|
|
3630
|
+
if (!(target instanceof BufferTarget)) {
|
|
3631
|
+
throw new Error("Expected target to be a BufferTarget");
|
|
3588
3632
|
}
|
|
3589
|
-
}
|
|
3590
|
-
onProgress?.({ ...progress });
|
|
3591
|
-
videoSampleSource.close();
|
|
3592
|
-
audioSampleSource?.close();
|
|
3593
|
-
await output.finalize();
|
|
3594
|
-
Internals7.Log.verbose({ logLevel, tag: "web-renderer" }, `Render timings: waitForReady=${internalState.getWaitForReadyTime().toFixed(2)}ms, createFrame=${internalState.getCreateFrameTime().toFixed(2)}ms, addSample=${internalState.getAddSampleTime().toFixed(2)}ms, audioMixing=${internalState.getAudioMixingTime().toFixed(2)}ms`);
|
|
3595
|
-
const mimeType = getMimeType(container);
|
|
3596
|
-
if (webFsTarget) {
|
|
3597
3633
|
sendUsageEvent({
|
|
3598
3634
|
licenseKey: licenseKey ?? null,
|
|
3599
3635
|
succeeded: true,
|
|
3600
3636
|
apiName: "renderMediaOnWeb"
|
|
3601
3637
|
});
|
|
3602
|
-
await webFsTarget.close();
|
|
3603
3638
|
return {
|
|
3604
3639
|
getBlob: () => {
|
|
3605
|
-
|
|
3640
|
+
if (!target.buffer) {
|
|
3641
|
+
throw new Error("The resulting buffer is empty");
|
|
3642
|
+
}
|
|
3643
|
+
return Promise.resolve(new Blob([target.buffer], { type: getMimeType(container) }));
|
|
3606
3644
|
},
|
|
3607
3645
|
internalState
|
|
3608
3646
|
};
|
|
3647
|
+
} catch (_catch) {
|
|
3648
|
+
var _err = _catch, _hasErr = 1;
|
|
3649
|
+
} finally {
|
|
3650
|
+
__callDispose(__stack, _err, _hasErr);
|
|
3609
3651
|
}
|
|
3610
|
-
if (!(target instanceof BufferTarget)) {
|
|
3611
|
-
throw new Error("Expected target to be a BufferTarget");
|
|
3612
|
-
}
|
|
3613
|
-
sendUsageEvent({
|
|
3614
|
-
licenseKey: licenseKey ?? null,
|
|
3615
|
-
succeeded: true,
|
|
3616
|
-
apiName: "renderMediaOnWeb"
|
|
3617
|
-
});
|
|
3618
|
-
return {
|
|
3619
|
-
getBlob: () => {
|
|
3620
|
-
if (!target.buffer) {
|
|
3621
|
-
throw new Error("The resulting buffer is empty");
|
|
3622
|
-
}
|
|
3623
|
-
return Promise.resolve(new Blob([target.buffer], { type: mimeType }));
|
|
3624
|
-
},
|
|
3625
|
-
internalState
|
|
3626
|
-
};
|
|
3627
3652
|
} catch (err) {
|
|
3628
3653
|
if (!signal?.aborted) {
|
|
3629
3654
|
sendUsageEvent({
|
|
@@ -3635,13 +3660,11 @@ var internalRenderMediaOnWeb = async ({
|
|
|
3635
3660
|
});
|
|
3636
3661
|
}
|
|
3637
3662
|
throw err;
|
|
3638
|
-
} finally {
|
|
3639
|
-
cleanupFns.forEach((fn) => fn());
|
|
3640
3663
|
}
|
|
3641
|
-
} catch (
|
|
3642
|
-
var
|
|
3664
|
+
} catch (_catch2) {
|
|
3665
|
+
var _err2 = _catch2, _hasErr2 = 1;
|
|
3643
3666
|
} finally {
|
|
3644
|
-
__callDispose(
|
|
3667
|
+
__callDispose(__stack2, _err2, _hasErr2);
|
|
3645
3668
|
}
|
|
3646
3669
|
};
|
|
3647
3670
|
var renderMediaOnWeb = (options) => {
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { AudioSampleSource, type BufferTarget, type Output, type OutputFormat, type StreamTarget } from 'mediabunny';
|
|
2
|
+
export declare const addAudioSampleSource: ({ muted, output, }: {
|
|
3
|
+
muted: boolean;
|
|
4
|
+
output: Output<OutputFormat, BufferTarget | StreamTarget>;
|
|
5
|
+
}) => Promise<{
|
|
6
|
+
audioSampleSource: AudioSampleSource;
|
|
7
|
+
[Symbol.dispose]: () => void;
|
|
8
|
+
} | null>;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { OutputFormat, OutputOptions, Target, VideoEncodingConfig } from 'mediabunny';
|
|
2
|
+
import { Output, VideoSampleSource } from 'mediabunny';
|
|
3
|
+
export declare const makeOutputWithCleanup: <T extends OutputFormat, U extends Target>(options: OutputOptions<T, U>) => {
|
|
4
|
+
output: Output<T, U>;
|
|
5
|
+
[Symbol.dispose]: () => void;
|
|
6
|
+
};
|
|
7
|
+
export declare const makeVideoSampleSourceCleanup: (encodingConfig: VideoEncodingConfig) => {
|
|
8
|
+
videoSampleSource: VideoSampleSource;
|
|
9
|
+
[Symbol.dispose]: () => void;
|
|
10
|
+
};
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/web-renderer"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/web-renderer",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.401",
|
|
7
7
|
"main": "dist/index.js",
|
|
8
8
|
"type": "module",
|
|
9
9
|
"sideEffects": false,
|
|
@@ -18,14 +18,14 @@
|
|
|
18
18
|
"author": "Remotion <jonny@remotion.dev>",
|
|
19
19
|
"license": "UNLICENSED",
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@remotion/licensing": "4.0.
|
|
22
|
-
"remotion": "4.0.
|
|
21
|
+
"@remotion/licensing": "4.0.401",
|
|
22
|
+
"remotion": "4.0.401",
|
|
23
23
|
"mediabunny": "1.27.3"
|
|
24
24
|
},
|
|
25
25
|
"devDependencies": {
|
|
26
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
27
|
-
"@remotion/player": "4.0.
|
|
28
|
-
"@remotion/media": "4.0.
|
|
26
|
+
"@remotion/eslint-config-internal": "4.0.401",
|
|
27
|
+
"@remotion/player": "4.0.401",
|
|
28
|
+
"@remotion/media": "4.0.401",
|
|
29
29
|
"@typescript/native-preview": "7.0.0-dev.20260105.1",
|
|
30
30
|
"@vitejs/plugin-react": "4.1.0",
|
|
31
31
|
"@vitest/browser-playwright": "4.0.9",
|