@remotion/webcodecs 4.0.325 → 4.0.329
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/index.mjs +201 -21
- package/dist/extract-frames.d.ts +2 -1
- package/dist/extract-frames.js +41 -8
- package/dist/get-partial-audio-data.d.ts +8 -0
- package/dist/get-partial-audio-data.js +156 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +3 -1
- package/dist/on-frame.d.ts +1 -1
- package/dist/on-frame.js +3 -3
- package/dist/reencode-video-track.js +2 -2
- package/dist/rotate-and-resize-video-frame.js +25 -11
- package/package.json +5 -5
package/dist/esm/index.mjs
CHANGED
|
@@ -365,23 +365,33 @@ var rotateAndResizeVideoFrame = ({
|
|
|
365
365
|
needsToBeMultipleOfTwo = false,
|
|
366
366
|
resizeOperation
|
|
367
367
|
}) => {
|
|
368
|
-
const normalized = (rotation
|
|
369
|
-
|
|
368
|
+
const normalized = normalizeVideoRotation(rotation);
|
|
369
|
+
const mustProcess = "rotation" in frame && frame.rotation !== 0;
|
|
370
|
+
if (normalized === 0 && resizeOperation === null && !mustProcess) {
|
|
370
371
|
return frame;
|
|
371
372
|
}
|
|
372
373
|
if (normalized % 90 !== 0) {
|
|
373
374
|
throw new Error("Only 90 degree rotations are supported");
|
|
374
375
|
}
|
|
375
|
-
const
|
|
376
|
+
const tentativeDimensions = calculateNewDimensionsFromRotateAndScale({
|
|
376
377
|
height: frame.displayHeight,
|
|
377
378
|
width: frame.displayWidth,
|
|
378
379
|
rotation,
|
|
379
380
|
needsToBeMultipleOfTwo,
|
|
380
381
|
resizeOperation
|
|
381
382
|
});
|
|
382
|
-
if (normalized === 0 && height === frame.displayHeight && width === frame.displayWidth) {
|
|
383
|
+
if (normalized === 0 && tentativeDimensions.height === frame.displayHeight && tentativeDimensions.width === frame.displayWidth && !mustProcess) {
|
|
383
384
|
return frame;
|
|
384
385
|
}
|
|
386
|
+
const frameRotation = frame.rotation ?? 0;
|
|
387
|
+
const canvasRotationToApply = normalizeVideoRotation(normalized - frameRotation);
|
|
388
|
+
const { width, height } = calculateNewDimensionsFromRotateAndScale({
|
|
389
|
+
height: frame.displayHeight,
|
|
390
|
+
width: frame.displayWidth,
|
|
391
|
+
rotation: canvasRotationToApply,
|
|
392
|
+
needsToBeMultipleOfTwo,
|
|
393
|
+
resizeOperation
|
|
394
|
+
});
|
|
385
395
|
const canvas = new OffscreenCanvas(width, height);
|
|
386
396
|
const ctx = canvas.getContext("2d");
|
|
387
397
|
if (!ctx) {
|
|
@@ -389,20 +399,20 @@ var rotateAndResizeVideoFrame = ({
|
|
|
389
399
|
}
|
|
390
400
|
canvas.width = width;
|
|
391
401
|
canvas.height = height;
|
|
392
|
-
if (
|
|
402
|
+
if (canvasRotationToApply === 90) {
|
|
393
403
|
ctx.translate(width, 0);
|
|
394
|
-
} else if (
|
|
404
|
+
} else if (canvasRotationToApply === 180) {
|
|
395
405
|
ctx.translate(width, height);
|
|
396
|
-
} else if (
|
|
406
|
+
} else if (canvasRotationToApply === 270) {
|
|
397
407
|
ctx.translate(0, height);
|
|
398
408
|
}
|
|
399
|
-
if (
|
|
400
|
-
ctx.rotate(
|
|
409
|
+
if (canvasRotationToApply !== 0) {
|
|
410
|
+
ctx.rotate(canvasRotationToApply * (Math.PI / 180));
|
|
401
411
|
}
|
|
402
412
|
if (frame.displayHeight !== height || frame.displayWidth !== width) {
|
|
403
413
|
const dimensionsAfterRotate = calculateNewDimensionsFromRotate({
|
|
404
414
|
height: frame.displayHeight,
|
|
405
|
-
rotation,
|
|
415
|
+
rotation: canvasRotationToApply,
|
|
406
416
|
width: frame.displayWidth
|
|
407
417
|
});
|
|
408
418
|
ctx.scale(width / dimensionsAfterRotate.width, height / dimensionsAfterRotate.height);
|
|
@@ -4872,7 +4882,7 @@ var convertToCorrectVideoFrame = ({
|
|
|
4872
4882
|
};
|
|
4873
4883
|
|
|
4874
4884
|
// src/on-frame.ts
|
|
4875
|
-
var
|
|
4885
|
+
var processFrame = async ({
|
|
4876
4886
|
frame: unrotatedFrame,
|
|
4877
4887
|
onVideoFrame,
|
|
4878
4888
|
track,
|
|
@@ -5067,7 +5077,7 @@ var reencodeVideoTrack = async ({
|
|
|
5067
5077
|
if (videoOperation.type !== "reencode") {
|
|
5068
5078
|
throw new Error(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
5069
5079
|
}
|
|
5070
|
-
const rotation = (videoOperation.rotate ?? rotate)
|
|
5080
|
+
const rotation = (videoOperation.rotate ?? rotate) + track.rotation;
|
|
5071
5081
|
const { height: newHeight, width: newWidth } = calculateNewDimensionsFromRotateAndScale({
|
|
5072
5082
|
width: track.codedWidth,
|
|
5073
5083
|
height: track.codedHeight,
|
|
@@ -5139,7 +5149,7 @@ var reencodeVideoTrack = async ({
|
|
|
5139
5149
|
},
|
|
5140
5150
|
onOutput: async (frame) => {
|
|
5141
5151
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5142
|
-
const processedFrame = await
|
|
5152
|
+
const processedFrame = await processFrame({
|
|
5143
5153
|
frame,
|
|
5144
5154
|
track,
|
|
5145
5155
|
onVideoFrame,
|
|
@@ -5592,7 +5602,7 @@ import {
|
|
|
5592
5602
|
import { parseMediaOnWebWorker } from "@remotion/media-parser/worker";
|
|
5593
5603
|
var internalExtractFrames = ({
|
|
5594
5604
|
src,
|
|
5595
|
-
onFrame
|
|
5605
|
+
onFrame,
|
|
5596
5606
|
signal,
|
|
5597
5607
|
timestampsInSeconds,
|
|
5598
5608
|
acknowledgeRemotionLicense,
|
|
@@ -5615,21 +5625,41 @@ var internalExtractFrames = ({
|
|
|
5615
5625
|
onDurationInSeconds(durationInSeconds) {
|
|
5616
5626
|
dur = durationInSeconds;
|
|
5617
5627
|
},
|
|
5618
|
-
onVideoTrack: async ({ track }) => {
|
|
5628
|
+
onVideoTrack: async ({ track, container }) => {
|
|
5619
5629
|
const timestampTargetsUnsorted = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
|
|
5620
5630
|
track,
|
|
5631
|
+
container,
|
|
5621
5632
|
durationInSeconds: dur
|
|
5622
5633
|
}) : timestampsInSeconds;
|
|
5623
5634
|
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
5635
|
+
if (timestampTargets.length === 0) {
|
|
5636
|
+
throw new Error("expected at least one timestamp to extract but found zero");
|
|
5637
|
+
}
|
|
5624
5638
|
controller.seek(timestampTargets[0]);
|
|
5639
|
+
let lastFrame;
|
|
5625
5640
|
const decoder = createVideoDecoder({
|
|
5626
5641
|
onFrame: (frame) => {
|
|
5627
|
-
|
|
5628
|
-
|
|
5629
|
-
onFrame2(frame);
|
|
5630
|
-
} else {
|
|
5642
|
+
Log.trace(logLevel, "Received frame with timestamp", frame.timestamp);
|
|
5643
|
+
if (expectedFrames.length === 0) {
|
|
5631
5644
|
frame.close();
|
|
5645
|
+
return;
|
|
5646
|
+
}
|
|
5647
|
+
if (frame.timestamp < expectedFrames[0] - 1) {
|
|
5648
|
+
if (lastFrame) {
|
|
5649
|
+
lastFrame.close();
|
|
5650
|
+
}
|
|
5651
|
+
lastFrame = frame;
|
|
5652
|
+
return;
|
|
5632
5653
|
}
|
|
5654
|
+
if (expectedFrames[0] + 6667 < frame.timestamp && lastFrame) {
|
|
5655
|
+
onFrame(lastFrame);
|
|
5656
|
+
expectedFrames.shift();
|
|
5657
|
+
lastFrame = frame;
|
|
5658
|
+
return;
|
|
5659
|
+
}
|
|
5660
|
+
expectedFrames.shift();
|
|
5661
|
+
onFrame(frame);
|
|
5662
|
+
lastFrame = frame;
|
|
5633
5663
|
},
|
|
5634
5664
|
onError: (e) => {
|
|
5635
5665
|
controller.abort();
|
|
@@ -5645,18 +5675,23 @@ var internalExtractFrames = ({
|
|
|
5645
5675
|
expectedFrames.push(timestampTargets.shift() * WEBCODECS_TIMESCALE);
|
|
5646
5676
|
while (queued.length > 0) {
|
|
5647
5677
|
const sam = queued.shift();
|
|
5678
|
+
if (!sam) {
|
|
5679
|
+
throw new Error("Sample is undefined");
|
|
5680
|
+
}
|
|
5648
5681
|
await decoder.waitForQueueToBeLessThan(10);
|
|
5682
|
+
Log.trace(logLevel, "Decoding sample", sam.timestamp);
|
|
5649
5683
|
await decoder.decode(sam);
|
|
5650
5684
|
}
|
|
5651
5685
|
};
|
|
5652
5686
|
return async (sample) => {
|
|
5653
5687
|
const nextTimestampWeWant = timestampTargets[0];
|
|
5688
|
+
Log.trace(logLevel, "Received sample with dts", sample.decodingTimestamp, "and cts", sample.timestamp);
|
|
5654
5689
|
if (sample.type === "key") {
|
|
5655
5690
|
await decoder.flush();
|
|
5656
5691
|
queued.length = 0;
|
|
5657
5692
|
}
|
|
5658
5693
|
queued.push(sample);
|
|
5659
|
-
if (sample.
|
|
5694
|
+
if (sample.decodingTimestamp >= timestampTargets[timestampTargets.length - 1] * WEBCODECS_TIMESCALE) {
|
|
5660
5695
|
await doProcess();
|
|
5661
5696
|
await decoder.flush();
|
|
5662
5697
|
controller.abort();
|
|
@@ -5665,7 +5700,7 @@ var internalExtractFrames = ({
|
|
|
5665
5700
|
if (nextTimestampWeWant === undefined) {
|
|
5666
5701
|
throw new Error("this should not happen");
|
|
5667
5702
|
}
|
|
5668
|
-
if (sample.
|
|
5703
|
+
if (sample.decodingTimestamp >= nextTimestampWeWant * WEBCODECS_TIMESCALE) {
|
|
5669
5704
|
await doProcess();
|
|
5670
5705
|
if (timestampTargets.length === 0) {
|
|
5671
5706
|
await decoder.flush();
|
|
@@ -5673,7 +5708,11 @@ var internalExtractFrames = ({
|
|
|
5673
5708
|
}
|
|
5674
5709
|
}
|
|
5675
5710
|
return async () => {
|
|
5711
|
+
await doProcess();
|
|
5676
5712
|
await decoder.flush();
|
|
5713
|
+
if (lastFrame) {
|
|
5714
|
+
lastFrame.close();
|
|
5715
|
+
}
|
|
5677
5716
|
};
|
|
5678
5717
|
};
|
|
5679
5718
|
}
|
|
@@ -5713,6 +5752,146 @@ var getAvailableAudioCodecs = ({
|
|
|
5713
5752
|
}
|
|
5714
5753
|
throw new Error(`Unsupported container: ${container}`);
|
|
5715
5754
|
};
|
|
5755
|
+
// src/get-partial-audio-data.ts
|
|
5756
|
+
import {
|
|
5757
|
+
hasBeenAborted as hasBeenAborted2,
|
|
5758
|
+
mediaParserController as mediaParserController3,
|
|
5759
|
+
parseMedia
|
|
5760
|
+
} from "@remotion/media-parser";
|
|
5761
|
+
var extractOverlappingAudioSamples = ({
|
|
5762
|
+
sample,
|
|
5763
|
+
fromSeconds,
|
|
5764
|
+
toSeconds,
|
|
5765
|
+
channelIndex,
|
|
5766
|
+
timescale: timescale2
|
|
5767
|
+
}) => {
|
|
5768
|
+
const chunkStartInSeconds = sample.timestamp / timescale2;
|
|
5769
|
+
const chunkDuration = sample.numberOfFrames / sample.sampleRate;
|
|
5770
|
+
const chunkEndInSeconds = chunkStartInSeconds + chunkDuration;
|
|
5771
|
+
const overlapStartSecond = Math.max(chunkStartInSeconds, fromSeconds);
|
|
5772
|
+
const overlapEndSecond = Math.min(chunkEndInSeconds, toSeconds);
|
|
5773
|
+
if (overlapStartSecond >= overlapEndSecond) {
|
|
5774
|
+
return null;
|
|
5775
|
+
}
|
|
5776
|
+
const { numberOfChannels } = sample;
|
|
5777
|
+
const samplesPerChannel = sample.numberOfFrames;
|
|
5778
|
+
let data;
|
|
5779
|
+
if (numberOfChannels === 1) {
|
|
5780
|
+
data = new Float32Array(sample.allocationSize({ format: "f32", planeIndex: 0 }));
|
|
5781
|
+
sample.copyTo(data, { format: "f32", planeIndex: 0 });
|
|
5782
|
+
} else {
|
|
5783
|
+
const allChannelsData = new Float32Array(sample.allocationSize({ format: "f32", planeIndex: 0 }));
|
|
5784
|
+
sample.copyTo(allChannelsData, { format: "f32", planeIndex: 0 });
|
|
5785
|
+
data = new Float32Array(samplesPerChannel);
|
|
5786
|
+
for (let i = 0;i < samplesPerChannel; i++) {
|
|
5787
|
+
data[i] = allChannelsData[i * numberOfChannels + channelIndex];
|
|
5788
|
+
}
|
|
5789
|
+
}
|
|
5790
|
+
const startSampleInChunk = Math.floor((overlapStartSecond - chunkStartInSeconds) * sample.sampleRate);
|
|
5791
|
+
const endSampleInChunk = Math.ceil((overlapEndSecond - chunkStartInSeconds) * sample.sampleRate);
|
|
5792
|
+
return data.slice(startSampleInChunk, endSampleInChunk);
|
|
5793
|
+
};
|
|
5794
|
+
var BUFFER_IN_SECONDS = 0.1;
|
|
5795
|
+
var getPartialAudioData = async ({
|
|
5796
|
+
src,
|
|
5797
|
+
fromSeconds,
|
|
5798
|
+
toSeconds,
|
|
5799
|
+
channelIndex,
|
|
5800
|
+
signal
|
|
5801
|
+
}) => {
|
|
5802
|
+
const controller = mediaParserController3();
|
|
5803
|
+
const audioSamples = [];
|
|
5804
|
+
if (signal.aborted) {
|
|
5805
|
+
throw new Error("Operation was aborted");
|
|
5806
|
+
}
|
|
5807
|
+
const { resolve: resolveAudioDecode, promise: audioDecodePromise } = Promise.withResolvers();
|
|
5808
|
+
const onAbort = () => {
|
|
5809
|
+
controller.abort();
|
|
5810
|
+
resolveAudioDecode();
|
|
5811
|
+
};
|
|
5812
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
5813
|
+
try {
|
|
5814
|
+
if (fromSeconds > 0) {
|
|
5815
|
+
controller.seek(fromSeconds);
|
|
5816
|
+
}
|
|
5817
|
+
await parseMedia({
|
|
5818
|
+
acknowledgeRemotionLicense: true,
|
|
5819
|
+
src,
|
|
5820
|
+
controller,
|
|
5821
|
+
onAudioTrack: ({ track }) => {
|
|
5822
|
+
if (signal.aborted) {
|
|
5823
|
+
return null;
|
|
5824
|
+
}
|
|
5825
|
+
const audioDecoder = createAudioDecoder({
|
|
5826
|
+
track,
|
|
5827
|
+
onFrame: (sample) => {
|
|
5828
|
+
if (signal.aborted) {
|
|
5829
|
+
sample.close();
|
|
5830
|
+
return;
|
|
5831
|
+
}
|
|
5832
|
+
const trimmedData = extractOverlappingAudioSamples({
|
|
5833
|
+
sample,
|
|
5834
|
+
fromSeconds,
|
|
5835
|
+
toSeconds,
|
|
5836
|
+
channelIndex,
|
|
5837
|
+
timescale: track.timescale
|
|
5838
|
+
});
|
|
5839
|
+
if (trimmedData) {
|
|
5840
|
+
audioSamples.push(trimmedData);
|
|
5841
|
+
}
|
|
5842
|
+
sample.close();
|
|
5843
|
+
},
|
|
5844
|
+
onError(error) {
|
|
5845
|
+
resolveAudioDecode();
|
|
5846
|
+
throw error;
|
|
5847
|
+
}
|
|
5848
|
+
});
|
|
5849
|
+
return async (sample) => {
|
|
5850
|
+
if (signal.aborted) {
|
|
5851
|
+
audioDecoder.close();
|
|
5852
|
+
controller.abort();
|
|
5853
|
+
return;
|
|
5854
|
+
}
|
|
5855
|
+
if (!audioDecoder) {
|
|
5856
|
+
throw new Error("No audio decoder found");
|
|
5857
|
+
}
|
|
5858
|
+
const fromSecondsWithBuffer = fromSeconds === 0 ? fromSeconds : fromSeconds + BUFFER_IN_SECONDS;
|
|
5859
|
+
const toSecondsWithBuffer = toSeconds - BUFFER_IN_SECONDS;
|
|
5860
|
+
const time = sample.timestamp / track.timescale;
|
|
5861
|
+
if (time < fromSecondsWithBuffer) {
|
|
5862
|
+
return;
|
|
5863
|
+
}
|
|
5864
|
+
if (time >= toSecondsWithBuffer) {
|
|
5865
|
+
audioDecoder.flush().then(() => {
|
|
5866
|
+
audioDecoder.close();
|
|
5867
|
+
resolveAudioDecode();
|
|
5868
|
+
});
|
|
5869
|
+
controller.abort();
|
|
5870
|
+
return;
|
|
5871
|
+
}
|
|
5872
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
5873
|
+
audioDecoder.decode(sample);
|
|
5874
|
+
};
|
|
5875
|
+
}
|
|
5876
|
+
});
|
|
5877
|
+
} catch (err) {
|
|
5878
|
+
const isAbortedByTimeCutoff = hasBeenAborted2(err);
|
|
5879
|
+
if (!isAbortedByTimeCutoff && !signal.aborted) {
|
|
5880
|
+
throw err;
|
|
5881
|
+
}
|
|
5882
|
+
} finally {
|
|
5883
|
+
signal.removeEventListener("abort", onAbort);
|
|
5884
|
+
}
|
|
5885
|
+
await audioDecodePromise;
|
|
5886
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
5887
|
+
const result = new Float32Array(totalSamples);
|
|
5888
|
+
let offset = 0;
|
|
5889
|
+
for (const audioSample of audioSamples) {
|
|
5890
|
+
result.set(audioSample, offset);
|
|
5891
|
+
offset += audioSample.length;
|
|
5892
|
+
}
|
|
5893
|
+
return result;
|
|
5894
|
+
};
|
|
5716
5895
|
|
|
5717
5896
|
// src/index.ts
|
|
5718
5897
|
var WebCodecsInternals = {
|
|
@@ -5724,6 +5903,7 @@ setRemotionImported();
|
|
|
5724
5903
|
export {
|
|
5725
5904
|
webcodecsController,
|
|
5726
5905
|
rotateAndResizeVideoFrame,
|
|
5906
|
+
getPartialAudioData,
|
|
5727
5907
|
getDefaultVideoCodec,
|
|
5728
5908
|
getDefaultAudioCodec,
|
|
5729
5909
|
getAvailableVideoCodecs,
|
package/dist/extract-frames.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import type { MediaParserLogLevel, MediaParserVideoTrack } from '@remotion/media-parser';
|
|
1
|
+
import type { MediaParserContainer, MediaParserLogLevel, MediaParserVideoTrack } from '@remotion/media-parser';
|
|
2
2
|
export type ExtractFramesTimestampsInSecondsFn = (options: {
|
|
3
3
|
track: MediaParserVideoTrack;
|
|
4
|
+
container: MediaParserContainer;
|
|
4
5
|
durationInSeconds: number | null;
|
|
5
6
|
}) => Promise<number[]> | number[];
|
|
6
7
|
export declare const extractFrames: (options: {
|
package/dist/extract-frames.js
CHANGED
|
@@ -5,6 +5,7 @@ const media_parser_1 = require("@remotion/media-parser");
|
|
|
5
5
|
const worker_1 = require("@remotion/media-parser/worker");
|
|
6
6
|
const create_video_decoder_1 = require("./create-video-decoder");
|
|
7
7
|
const with_resolvers_1 = require("./create/with-resolvers");
|
|
8
|
+
const log_1 = require("./log");
|
|
8
9
|
const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, acknowledgeRemotionLicense, logLevel, }) => {
|
|
9
10
|
const controller = (0, media_parser_1.mediaParserController)();
|
|
10
11
|
const expectedFrames = [];
|
|
@@ -23,24 +24,46 @@ const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, ackn
|
|
|
23
24
|
onDurationInSeconds(durationInSeconds) {
|
|
24
25
|
dur = durationInSeconds;
|
|
25
26
|
},
|
|
26
|
-
onVideoTrack: async ({ track }) => {
|
|
27
|
+
onVideoTrack: async ({ track, container }) => {
|
|
27
28
|
const timestampTargetsUnsorted = typeof timestampsInSeconds === 'function'
|
|
28
29
|
? await timestampsInSeconds({
|
|
29
30
|
track,
|
|
31
|
+
container,
|
|
30
32
|
durationInSeconds: dur,
|
|
31
33
|
})
|
|
32
34
|
: timestampsInSeconds;
|
|
33
35
|
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
36
|
+
if (timestampTargets.length === 0) {
|
|
37
|
+
throw new Error('expected at least one timestamp to extract but found zero');
|
|
38
|
+
}
|
|
34
39
|
controller.seek(timestampTargets[0]);
|
|
40
|
+
let lastFrame;
|
|
35
41
|
const decoder = (0, create_video_decoder_1.createVideoDecoder)({
|
|
36
42
|
onFrame: (frame) => {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
onFrame(frame);
|
|
40
|
-
}
|
|
41
|
-
else {
|
|
43
|
+
log_1.Log.trace(logLevel, 'Received frame with timestamp', frame.timestamp);
|
|
44
|
+
if (expectedFrames.length === 0) {
|
|
42
45
|
frame.close();
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
if (frame.timestamp < expectedFrames[0] - 1) {
|
|
49
|
+
if (lastFrame) {
|
|
50
|
+
lastFrame.close();
|
|
51
|
+
}
|
|
52
|
+
lastFrame = frame;
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
// A WebM might have a timestamp of 67000 but we request 66666
|
|
56
|
+
// See a test with this problem in it-tests/rendering/frame-accuracy.test.ts
|
|
57
|
+
// Solution: We allow a 10.000ms - 3.333ms = 6.667ms difference between the requested timestamp and the actual timestamp
|
|
58
|
+
if (expectedFrames[0] + 6667 < frame.timestamp && lastFrame) {
|
|
59
|
+
onFrame(lastFrame);
|
|
60
|
+
expectedFrames.shift();
|
|
61
|
+
lastFrame = frame;
|
|
62
|
+
return;
|
|
43
63
|
}
|
|
64
|
+
expectedFrames.shift();
|
|
65
|
+
onFrame(frame);
|
|
66
|
+
lastFrame = frame;
|
|
44
67
|
},
|
|
45
68
|
onError: (e) => {
|
|
46
69
|
controller.abort();
|
|
@@ -57,18 +80,23 @@ const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, ackn
|
|
|
57
80
|
expectedFrames.push(timestampTargets.shift() * media_parser_1.WEBCODECS_TIMESCALE);
|
|
58
81
|
while (queued.length > 0) {
|
|
59
82
|
const sam = queued.shift();
|
|
83
|
+
if (!sam) {
|
|
84
|
+
throw new Error('Sample is undefined');
|
|
85
|
+
}
|
|
60
86
|
await decoder.waitForQueueToBeLessThan(10);
|
|
87
|
+
log_1.Log.trace(logLevel, 'Decoding sample', sam.timestamp);
|
|
61
88
|
await decoder.decode(sam);
|
|
62
89
|
}
|
|
63
90
|
};
|
|
64
91
|
return async (sample) => {
|
|
65
92
|
const nextTimestampWeWant = timestampTargets[0];
|
|
93
|
+
log_1.Log.trace(logLevel, 'Received sample with dts', sample.decodingTimestamp, 'and cts', sample.timestamp);
|
|
66
94
|
if (sample.type === 'key') {
|
|
67
95
|
await decoder.flush();
|
|
68
96
|
queued.length = 0;
|
|
69
97
|
}
|
|
70
98
|
queued.push(sample);
|
|
71
|
-
if (sample.
|
|
99
|
+
if (sample.decodingTimestamp >=
|
|
72
100
|
timestampTargets[timestampTargets.length - 1] * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
73
101
|
await doProcess();
|
|
74
102
|
await decoder.flush();
|
|
@@ -78,7 +106,8 @@ const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, ackn
|
|
|
78
106
|
if (nextTimestampWeWant === undefined) {
|
|
79
107
|
throw new Error('this should not happen');
|
|
80
108
|
}
|
|
81
|
-
if (sample.
|
|
109
|
+
if (sample.decodingTimestamp >=
|
|
110
|
+
nextTimestampWeWant * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
82
111
|
await doProcess();
|
|
83
112
|
if (timestampTargets.length === 0) {
|
|
84
113
|
await decoder.flush();
|
|
@@ -86,7 +115,11 @@ const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, ackn
|
|
|
86
115
|
}
|
|
87
116
|
}
|
|
88
117
|
return async () => {
|
|
118
|
+
await doProcess();
|
|
89
119
|
await decoder.flush();
|
|
120
|
+
if (lastFrame) {
|
|
121
|
+
lastFrame.close();
|
|
122
|
+
}
|
|
90
123
|
};
|
|
91
124
|
};
|
|
92
125
|
},
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export type GetPartialAudioDataProps = {
|
|
2
|
+
src: string;
|
|
3
|
+
fromSeconds: number;
|
|
4
|
+
toSeconds: number;
|
|
5
|
+
channelIndex: number;
|
|
6
|
+
signal: AbortSignal;
|
|
7
|
+
};
|
|
8
|
+
export declare const getPartialAudioData: ({ src, fromSeconds, toSeconds, channelIndex, signal, }: GetPartialAudioDataProps) => Promise<Float32Array>;
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getPartialAudioData = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const create_audio_decoder_1 = require("./create-audio-decoder");
|
|
6
|
+
/**
|
|
7
|
+
* Extract the portion of an audio chunk that overlaps with the requested time window
|
|
8
|
+
*/
|
|
9
|
+
const extractOverlappingAudioSamples = ({ sample, fromSeconds, toSeconds, channelIndex, timescale, }) => {
|
|
10
|
+
const chunkStartInSeconds = sample.timestamp / timescale;
|
|
11
|
+
const chunkDuration = sample.numberOfFrames / sample.sampleRate;
|
|
12
|
+
const chunkEndInSeconds = chunkStartInSeconds + chunkDuration;
|
|
13
|
+
// Calculate overlap with the requested window
|
|
14
|
+
const overlapStartSecond = Math.max(chunkStartInSeconds, fromSeconds);
|
|
15
|
+
const overlapEndSecond = Math.min(chunkEndInSeconds, toSeconds);
|
|
16
|
+
// Skip if no overlap with requested window
|
|
17
|
+
if (overlapStartSecond >= overlapEndSecond) {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
// For multi-channel audio, we need to handle channels properly
|
|
21
|
+
const { numberOfChannels } = sample;
|
|
22
|
+
const samplesPerChannel = sample.numberOfFrames;
|
|
23
|
+
let data;
|
|
24
|
+
if (numberOfChannels === 1) {
|
|
25
|
+
// Mono audio
|
|
26
|
+
data = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
27
|
+
sample.copyTo(data, { format: 'f32', planeIndex: 0 });
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
// Multi-channel audio: extract specific channel
|
|
31
|
+
const allChannelsData = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
32
|
+
sample.copyTo(allChannelsData, { format: 'f32', planeIndex: 0 });
|
|
33
|
+
// Extract the specific channel (interleaved audio)
|
|
34
|
+
data = new Float32Array(samplesPerChannel);
|
|
35
|
+
for (let i = 0; i < samplesPerChannel; i++) {
|
|
36
|
+
data[i] = allChannelsData[i * numberOfChannels + channelIndex];
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
// Calculate which samples to keep from this chunk
|
|
40
|
+
const startSampleInChunk = Math.floor((overlapStartSecond - chunkStartInSeconds) * sample.sampleRate);
|
|
41
|
+
const endSampleInChunk = Math.ceil((overlapEndSecond - chunkStartInSeconds) * sample.sampleRate);
|
|
42
|
+
// Only keep the samples we need
|
|
43
|
+
return data.slice(startSampleInChunk, endSampleInChunk);
|
|
44
|
+
};
|
|
45
|
+
// Small buffer to ensure we capture chunks that span across boundaries
|
|
46
|
+
// We need this because specified time window is not always aligned with the audio chunks
|
|
47
|
+
// so that we fetch a bit more, and then trim it down to the requested time window
|
|
48
|
+
const BUFFER_IN_SECONDS = 0.1;
|
|
49
|
+
const getPartialAudioData = async ({ src, fromSeconds, toSeconds, channelIndex, signal, }) => {
|
|
50
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
51
|
+
// Collect audio samples
|
|
52
|
+
const audioSamples = [];
|
|
53
|
+
// Abort if the signal is already aborted
|
|
54
|
+
if (signal.aborted) {
|
|
55
|
+
throw new Error('Operation was aborted');
|
|
56
|
+
}
|
|
57
|
+
// Forward abort signal immediately to the controller
|
|
58
|
+
const { resolve: resolveAudioDecode, promise: audioDecodePromise } = Promise.withResolvers();
|
|
59
|
+
const onAbort = () => {
|
|
60
|
+
controller.abort();
|
|
61
|
+
resolveAudioDecode();
|
|
62
|
+
};
|
|
63
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
64
|
+
try {
|
|
65
|
+
if (fromSeconds > 0) {
|
|
66
|
+
controller.seek(fromSeconds);
|
|
67
|
+
}
|
|
68
|
+
await (0, media_parser_1.parseMedia)({
|
|
69
|
+
acknowledgeRemotionLicense: true,
|
|
70
|
+
src,
|
|
71
|
+
controller,
|
|
72
|
+
onAudioTrack: ({ track }) => {
|
|
73
|
+
if (signal.aborted) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
const audioDecoder = (0, create_audio_decoder_1.createAudioDecoder)({
|
|
77
|
+
track,
|
|
78
|
+
onFrame: (sample) => {
|
|
79
|
+
if (signal.aborted) {
|
|
80
|
+
sample.close();
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
const trimmedData = extractOverlappingAudioSamples({
|
|
84
|
+
sample,
|
|
85
|
+
fromSeconds,
|
|
86
|
+
toSeconds,
|
|
87
|
+
channelIndex,
|
|
88
|
+
timescale: track.timescale,
|
|
89
|
+
});
|
|
90
|
+
if (trimmedData) {
|
|
91
|
+
audioSamples.push(trimmedData);
|
|
92
|
+
}
|
|
93
|
+
sample.close();
|
|
94
|
+
},
|
|
95
|
+
onError(error) {
|
|
96
|
+
resolveAudioDecode();
|
|
97
|
+
throw error;
|
|
98
|
+
},
|
|
99
|
+
});
|
|
100
|
+
return async (sample) => {
|
|
101
|
+
if (signal.aborted) {
|
|
102
|
+
audioDecoder.close();
|
|
103
|
+
controller.abort();
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
if (!audioDecoder) {
|
|
107
|
+
throw new Error('No audio decoder found');
|
|
108
|
+
}
|
|
109
|
+
const fromSecondsWithBuffer = fromSeconds === 0 ? fromSeconds : fromSeconds + BUFFER_IN_SECONDS;
|
|
110
|
+
const toSecondsWithBuffer = toSeconds - BUFFER_IN_SECONDS;
|
|
111
|
+
// Convert timestamp using the track's timescale
|
|
112
|
+
const time = sample.timestamp / track.timescale;
|
|
113
|
+
// Skip samples that are before our requested start time (with buffer)
|
|
114
|
+
if (time < fromSecondsWithBuffer) {
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
// Stop immediately when we reach our target time (with buffer)
|
|
118
|
+
if (time >= toSecondsWithBuffer) {
|
|
119
|
+
// wait until decoder is done
|
|
120
|
+
audioDecoder.flush().then(() => {
|
|
121
|
+
audioDecoder.close();
|
|
122
|
+
resolveAudioDecode();
|
|
123
|
+
});
|
|
124
|
+
controller.abort();
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
128
|
+
// we're waiting for the queue above anyway, enqueue in sync mode
|
|
129
|
+
audioDecoder.decode(sample);
|
|
130
|
+
};
|
|
131
|
+
},
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
catch (err) {
|
|
135
|
+
const isAbortedByTimeCutoff = (0, media_parser_1.hasBeenAborted)(err);
|
|
136
|
+
// Don't throw if we stopped the parsing ourselves
|
|
137
|
+
if (!isAbortedByTimeCutoff && !signal.aborted) {
|
|
138
|
+
throw err;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
finally {
|
|
142
|
+
// Clean up the event listener
|
|
143
|
+
signal.removeEventListener('abort', onAbort);
|
|
144
|
+
}
|
|
145
|
+
await audioDecodePromise;
|
|
146
|
+
// Simply concatenate all audio data since we've already trimmed each chunk
|
|
147
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
148
|
+
const result = new Float32Array(totalSamples);
|
|
149
|
+
let offset = 0;
|
|
150
|
+
for (const audioSample of audioSamples) {
|
|
151
|
+
result.set(audioSample, offset);
|
|
152
|
+
offset += audioSample.length;
|
|
153
|
+
}
|
|
154
|
+
return result;
|
|
155
|
+
};
|
|
156
|
+
exports.getPartialAudioData = getPartialAudioData;
|
package/dist/index.d.ts
CHANGED
|
@@ -23,6 +23,7 @@ export { getAvailableVideoCodecs } from './get-available-video-codecs';
|
|
|
23
23
|
export type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
24
24
|
export { getDefaultAudioCodec } from './get-default-audio-codec';
|
|
25
25
|
export { getDefaultVideoCodec } from './get-default-video-codec';
|
|
26
|
+
export { getPartialAudioData, GetPartialAudioDataProps, } from './get-partial-audio-data';
|
|
26
27
|
export type { AudioOperation, ConvertMediaOnAudioTrackHandler, } from './on-audio-track-handler';
|
|
27
28
|
export type { ConvertMediaOnVideoTrackHandler, VideoOperation, } from './on-video-track-handler';
|
|
28
29
|
export type { ResizeOperation } from './resizing/mode';
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.WebCodecsInternals = exports.webcodecsController = exports.createVideoEncoder = exports.rotateAndResizeVideoFrame = exports.getDefaultVideoCodec = exports.getDefaultAudioCodec = exports.getAvailableVideoCodecs = exports.getAvailableContainers = exports.getAvailableAudioCodecs = exports.extractFrames = exports.defaultOnVideoTrackHandler = exports.defaultOnAudioTrackHandler = exports.createVideoDecoder = exports.createAudioDecoder = exports.convertMedia = exports.convertAudioData = exports.canReencodeVideoTrack = exports.canReencodeAudioTrack = exports.canCopyVideoTrack = exports.canCopyAudioTrack = exports.createAudioEncoder = void 0;
|
|
3
|
+
exports.WebCodecsInternals = exports.webcodecsController = exports.createVideoEncoder = exports.rotateAndResizeVideoFrame = exports.getPartialAudioData = exports.getDefaultVideoCodec = exports.getDefaultAudioCodec = exports.getAvailableVideoCodecs = exports.getAvailableContainers = exports.getAvailableAudioCodecs = exports.extractFrames = exports.defaultOnVideoTrackHandler = exports.defaultOnAudioTrackHandler = exports.createVideoDecoder = exports.createAudioDecoder = exports.convertMedia = exports.convertAudioData = exports.canReencodeVideoTrack = exports.canReencodeAudioTrack = exports.canCopyVideoTrack = exports.canCopyAudioTrack = exports.createAudioEncoder = void 0;
|
|
4
4
|
const rotate_and_resize_video_frame_1 = require("./rotate-and-resize-video-frame");
|
|
5
5
|
const rotation_1 = require("./rotation");
|
|
6
6
|
const set_remotion_imported_1 = require("./set-remotion-imported");
|
|
@@ -38,6 +38,8 @@ var get_default_audio_codec_1 = require("./get-default-audio-codec");
|
|
|
38
38
|
Object.defineProperty(exports, "getDefaultAudioCodec", { enumerable: true, get: function () { return get_default_audio_codec_1.getDefaultAudioCodec; } });
|
|
39
39
|
var get_default_video_codec_1 = require("./get-default-video-codec");
|
|
40
40
|
Object.defineProperty(exports, "getDefaultVideoCodec", { enumerable: true, get: function () { return get_default_video_codec_1.getDefaultVideoCodec; } });
|
|
41
|
+
var get_partial_audio_data_1 = require("./get-partial-audio-data");
|
|
42
|
+
Object.defineProperty(exports, "getPartialAudioData", { enumerable: true, get: function () { return get_partial_audio_data_1.getPartialAudioData; } });
|
|
41
43
|
var rotate_and_resize_video_frame_2 = require("./rotate-and-resize-video-frame");
|
|
42
44
|
Object.defineProperty(exports, "rotateAndResizeVideoFrame", { enumerable: true, get: function () { return rotate_and_resize_video_frame_2.rotateAndResizeVideoFrame; } });
|
|
43
45
|
var video_encoder_1 = require("./video-encoder");
|
package/dist/on-frame.d.ts
CHANGED
|
@@ -2,7 +2,7 @@ import type { MediaParserVideoTrack } from '@remotion/media-parser';
|
|
|
2
2
|
import type { ConvertMediaOnVideoFrame } from './convert-media';
|
|
3
3
|
import type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
4
4
|
import type { ResizeOperation } from './resizing/mode';
|
|
5
|
-
export declare const
|
|
5
|
+
export declare const processFrame: ({ frame: unrotatedFrame, onVideoFrame, track, outputCodec, rotation, resizeOperation, }: {
|
|
6
6
|
frame: VideoFrame;
|
|
7
7
|
onVideoFrame: ConvertMediaOnVideoFrame | null;
|
|
8
8
|
track: MediaParserVideoTrack;
|
package/dist/on-frame.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.processFrame = void 0;
|
|
4
4
|
const browser_quirks_1 = require("./browser-quirks");
|
|
5
5
|
const convert_to_correct_videoframe_1 = require("./convert-to-correct-videoframe");
|
|
6
6
|
const rotate_and_resize_video_frame_1 = require("./rotate-and-resize-video-frame");
|
|
7
|
-
const
|
|
7
|
+
const processFrame = async ({ frame: unrotatedFrame, onVideoFrame, track, outputCodec, rotation, resizeOperation, }) => {
|
|
8
8
|
const rotated = (0, rotate_and_resize_video_frame_1.rotateAndResizeVideoFrame)({
|
|
9
9
|
rotation,
|
|
10
10
|
frame: unrotatedFrame,
|
|
@@ -43,4 +43,4 @@ const onFrame = async ({ frame: unrotatedFrame, onVideoFrame, track, outputCodec
|
|
|
43
43
|
}
|
|
44
44
|
return fixedFrame;
|
|
45
45
|
};
|
|
46
|
-
exports.
|
|
46
|
+
exports.processFrame = processFrame;
|
|
@@ -16,7 +16,7 @@ const reencodeVideoTrack = async ({ videoOperation, rotate, track, logLevel, abo
|
|
|
16
16
|
if (videoOperation.type !== 'reencode') {
|
|
17
17
|
throw new Error(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
18
18
|
}
|
|
19
|
-
const rotation = (videoOperation.rotate ?? rotate)
|
|
19
|
+
const rotation = (videoOperation.rotate ?? rotate) + track.rotation;
|
|
20
20
|
const { height: newHeight, width: newWidth } = (0, rotation_1.calculateNewDimensionsFromRotateAndScale)({
|
|
21
21
|
width: track.codedWidth,
|
|
22
22
|
height: track.codedHeight,
|
|
@@ -88,7 +88,7 @@ const reencodeVideoTrack = async ({ videoOperation, rotate, track, logLevel, abo
|
|
|
88
88
|
},
|
|
89
89
|
onOutput: async (frame) => {
|
|
90
90
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
91
|
-
const processedFrame = await (0, on_frame_1.
|
|
91
|
+
const processedFrame = await (0, on_frame_1.processFrame)({
|
|
92
92
|
frame,
|
|
93
93
|
track,
|
|
94
94
|
onVideoFrame,
|
|
@@ -7,15 +7,18 @@ const normalizeVideoRotation = (rotation) => {
|
|
|
7
7
|
};
|
|
8
8
|
exports.normalizeVideoRotation = normalizeVideoRotation;
|
|
9
9
|
const rotateAndResizeVideoFrame = ({ frame, rotation, needsToBeMultipleOfTwo = false, resizeOperation, }) => {
|
|
10
|
-
const normalized = ((rotation
|
|
10
|
+
const normalized = (0, exports.normalizeVideoRotation)(rotation);
|
|
11
|
+
// In Chrome, there is "rotation", but we cannot put frames with VideoEncoder if they have a rotation.
|
|
12
|
+
// We have to draw them to a canvas and make a new frame without video rotation.
|
|
13
|
+
const mustProcess = 'rotation' in frame && frame.rotation !== 0;
|
|
11
14
|
// No resize, no rotation
|
|
12
|
-
if (normalized === 0 && resizeOperation === null) {
|
|
15
|
+
if (normalized === 0 && resizeOperation === null && !mustProcess) {
|
|
13
16
|
return frame;
|
|
14
17
|
}
|
|
15
18
|
if (normalized % 90 !== 0) {
|
|
16
19
|
throw new Error('Only 90 degree rotations are supported');
|
|
17
20
|
}
|
|
18
|
-
const
|
|
21
|
+
const tentativeDimensions = (0, rotation_1.calculateNewDimensionsFromRotateAndScale)({
|
|
19
22
|
height: frame.displayHeight,
|
|
20
23
|
width: frame.displayWidth,
|
|
21
24
|
rotation,
|
|
@@ -24,10 +27,21 @@ const rotateAndResizeVideoFrame = ({ frame, rotation, needsToBeMultipleOfTwo = f
|
|
|
24
27
|
});
|
|
25
28
|
// No rotation, and resize turned out to be same dimensions
|
|
26
29
|
if (normalized === 0 &&
|
|
27
|
-
height === frame.displayHeight &&
|
|
28
|
-
width === frame.displayWidth
|
|
30
|
+
tentativeDimensions.height === frame.displayHeight &&
|
|
31
|
+
tentativeDimensions.width === frame.displayWidth &&
|
|
32
|
+
!mustProcess) {
|
|
29
33
|
return frame;
|
|
30
34
|
}
|
|
35
|
+
// @ts-expect-error
|
|
36
|
+
const frameRotation = frame.rotation ?? 0;
|
|
37
|
+
const canvasRotationToApply = (0, exports.normalizeVideoRotation)(normalized - frameRotation);
|
|
38
|
+
const { width, height } = (0, rotation_1.calculateNewDimensionsFromRotateAndScale)({
|
|
39
|
+
height: frame.displayHeight,
|
|
40
|
+
width: frame.displayWidth,
|
|
41
|
+
rotation: canvasRotationToApply,
|
|
42
|
+
needsToBeMultipleOfTwo,
|
|
43
|
+
resizeOperation,
|
|
44
|
+
});
|
|
31
45
|
const canvas = new OffscreenCanvas(width, height);
|
|
32
46
|
const ctx = canvas.getContext('2d');
|
|
33
47
|
if (!ctx) {
|
|
@@ -35,22 +49,22 @@ const rotateAndResizeVideoFrame = ({ frame, rotation, needsToBeMultipleOfTwo = f
|
|
|
35
49
|
}
|
|
36
50
|
canvas.width = width;
|
|
37
51
|
canvas.height = height;
|
|
38
|
-
if (
|
|
52
|
+
if (canvasRotationToApply === 90) {
|
|
39
53
|
ctx.translate(width, 0);
|
|
40
54
|
}
|
|
41
|
-
else if (
|
|
55
|
+
else if (canvasRotationToApply === 180) {
|
|
42
56
|
ctx.translate(width, height);
|
|
43
57
|
}
|
|
44
|
-
else if (
|
|
58
|
+
else if (canvasRotationToApply === 270) {
|
|
45
59
|
ctx.translate(0, height);
|
|
46
60
|
}
|
|
47
|
-
if (
|
|
48
|
-
ctx.rotate(
|
|
61
|
+
if (canvasRotationToApply !== 0) {
|
|
62
|
+
ctx.rotate(canvasRotationToApply * (Math.PI / 180));
|
|
49
63
|
}
|
|
50
64
|
if (frame.displayHeight !== height || frame.displayWidth !== width) {
|
|
51
65
|
const dimensionsAfterRotate = (0, rotation_1.calculateNewDimensionsFromRotate)({
|
|
52
66
|
height: frame.displayHeight,
|
|
53
|
-
rotation,
|
|
67
|
+
rotation: canvasRotationToApply,
|
|
54
68
|
width: frame.displayWidth,
|
|
55
69
|
});
|
|
56
70
|
ctx.scale(width / dimensionsAfterRotate.width, height / dimensionsAfterRotate.height);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remotion/webcodecs",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.329",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/esm/index.mjs",
|
|
@@ -19,8 +19,8 @@
|
|
|
19
19
|
"author": "Jonny Burger <jonny@remotion.dev>",
|
|
20
20
|
"license": "Remotion License (See https://remotion.dev/docs/webcodecs#license)",
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@remotion/
|
|
23
|
-
"@remotion/
|
|
22
|
+
"@remotion/licensing": "4.0.329",
|
|
23
|
+
"@remotion/media-parser": "4.0.329"
|
|
24
24
|
},
|
|
25
25
|
"peerDependencies": {},
|
|
26
26
|
"devDependencies": {
|
|
@@ -29,8 +29,8 @@
|
|
|
29
29
|
"vite": "5.4.19",
|
|
30
30
|
"@playwright/test": "1.51.1",
|
|
31
31
|
"eslint": "9.19.0",
|
|
32
|
-
"@remotion/example-videos": "4.0.
|
|
33
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
32
|
+
"@remotion/example-videos": "4.0.329",
|
|
33
|
+
"@remotion/eslint-config-internal": "4.0.329"
|
|
34
34
|
},
|
|
35
35
|
"keywords": [],
|
|
36
36
|
"publishConfig": {
|