@remotion/webcodecs 4.0.330 → 4.0.332
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/create/matroska/create-matroska-media.js +15 -4
- package/dist/esm/index.mjs +36 -16
- package/dist/esm/worker.mjs +503 -0
- package/dist/extract-frames-on-web-worker.d.ts +12 -0
- package/dist/extract-frames-on-web-worker.js +15 -0
- package/dist/extract-frames-on-worker.d.ts +0 -0
- package/dist/extract-frames-on-worker.js +1 -0
- package/dist/extract-frames.d.ts +6 -8
- package/dist/extract-frames.js +3 -145
- package/dist/index.d.ts +2 -2
- package/dist/internal-extract-frames.d.ts +16 -0
- package/dist/internal-extract-frames.js +155 -0
- package/dist/worker.d.ts +1 -0
- package/dist/worker.js +5 -0
- package/package.json +14 -5
|
@@ -95,7 +95,11 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
95
95
|
isVideo,
|
|
96
96
|
chunk,
|
|
97
97
|
})) {
|
|
98
|
-
return {
|
|
98
|
+
return {
|
|
99
|
+
cluster: currentCluster,
|
|
100
|
+
isNew: false,
|
|
101
|
+
smallestProgress,
|
|
102
|
+
};
|
|
99
103
|
}
|
|
100
104
|
currentCluster = await (0, cluster_1.makeCluster)({
|
|
101
105
|
writer: w,
|
|
@@ -103,7 +107,11 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
103
107
|
timescale,
|
|
104
108
|
logLevel,
|
|
105
109
|
});
|
|
106
|
-
return {
|
|
110
|
+
return {
|
|
111
|
+
cluster: currentCluster,
|
|
112
|
+
isNew: true,
|
|
113
|
+
smallestProgress,
|
|
114
|
+
};
|
|
107
115
|
};
|
|
108
116
|
const updateDuration = async (newDuration) => {
|
|
109
117
|
const blocks = (0, make_duration_with_padding_1.makeDurationWithPadding)(newDuration);
|
|
@@ -111,6 +119,7 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
111
119
|
onBytesProgress(w.getWrittenByteCount());
|
|
112
120
|
};
|
|
113
121
|
const addSample = async ({ chunk, trackNumber, isVideo, }) => {
|
|
122
|
+
const offset = w.getWrittenByteCount();
|
|
114
123
|
const { cluster, isNew, smallestProgress } = await getClusterOrMakeNew({
|
|
115
124
|
chunk,
|
|
116
125
|
isVideo,
|
|
@@ -119,11 +128,13 @@ const createMatroskaMedia = async ({ writer, onBytesProgress, onMillisecondsProg
|
|
|
119
128
|
await updateDuration(newDuration);
|
|
120
129
|
const { timecodeRelativeToCluster } = await cluster.addSample(chunk, trackNumber);
|
|
121
130
|
if (isNew) {
|
|
122
|
-
|
|
131
|
+
if (offset === null) {
|
|
132
|
+
throw new Error('offset is null');
|
|
133
|
+
}
|
|
123
134
|
cues.push({
|
|
124
135
|
time: (0, cluster_1.timestampToClusterTimestamp)(smallestProgress, timescale) +
|
|
125
136
|
timecodeRelativeToCluster,
|
|
126
|
-
clusterPosition:
|
|
137
|
+
clusterPosition: offset - seekHeadOffset,
|
|
127
138
|
trackNumber,
|
|
128
139
|
});
|
|
129
140
|
}
|
package/dist/esm/index.mjs
CHANGED
|
@@ -3751,7 +3751,11 @@ var createMatroskaMedia = async ({
|
|
|
3751
3751
|
isVideo,
|
|
3752
3752
|
chunk
|
|
3753
3753
|
})) {
|
|
3754
|
-
return {
|
|
3754
|
+
return {
|
|
3755
|
+
cluster: currentCluster,
|
|
3756
|
+
isNew: false,
|
|
3757
|
+
smallestProgress
|
|
3758
|
+
};
|
|
3755
3759
|
}
|
|
3756
3760
|
currentCluster = await makeCluster({
|
|
3757
3761
|
writer: w,
|
|
@@ -3759,7 +3763,11 @@ var createMatroskaMedia = async ({
|
|
|
3759
3763
|
timescale,
|
|
3760
3764
|
logLevel
|
|
3761
3765
|
});
|
|
3762
|
-
return {
|
|
3766
|
+
return {
|
|
3767
|
+
cluster: currentCluster,
|
|
3768
|
+
isNew: true,
|
|
3769
|
+
smallestProgress
|
|
3770
|
+
};
|
|
3763
3771
|
};
|
|
3764
3772
|
const updateDuration = async (newDuration) => {
|
|
3765
3773
|
const blocks = makeDurationWithPadding(newDuration);
|
|
@@ -3771,6 +3779,7 @@ var createMatroskaMedia = async ({
|
|
|
3771
3779
|
trackNumber,
|
|
3772
3780
|
isVideo
|
|
3773
3781
|
}) => {
|
|
3782
|
+
const offset = w.getWrittenByteCount();
|
|
3774
3783
|
const { cluster, isNew, smallestProgress } = await getClusterOrMakeNew({
|
|
3775
3784
|
chunk,
|
|
3776
3785
|
isVideo
|
|
@@ -3779,10 +3788,12 @@ var createMatroskaMedia = async ({
|
|
|
3779
3788
|
await updateDuration(newDuration);
|
|
3780
3789
|
const { timecodeRelativeToCluster } = await cluster.addSample(chunk, trackNumber);
|
|
3781
3790
|
if (isNew) {
|
|
3782
|
-
|
|
3791
|
+
if (offset === null) {
|
|
3792
|
+
throw new Error("offset is null");
|
|
3793
|
+
}
|
|
3783
3794
|
cues.push({
|
|
3784
3795
|
time: timestampToClusterTimestamp(smallestProgress, timescale) + timecodeRelativeToCluster,
|
|
3785
|
-
clusterPosition:
|
|
3796
|
+
clusterPosition: offset - seekHeadOffset,
|
|
3786
3797
|
trackNumber
|
|
3787
3798
|
});
|
|
3788
3799
|
}
|
|
@@ -5593,20 +5604,23 @@ var convertMedia = async function({
|
|
|
5593
5604
|
});
|
|
5594
5605
|
};
|
|
5595
5606
|
// src/extract-frames.ts
|
|
5607
|
+
import { parseMedia } from "@remotion/media-parser";
|
|
5608
|
+
|
|
5609
|
+
// src/internal-extract-frames.ts
|
|
5596
5610
|
import {
|
|
5597
|
-
hasBeenAborted,
|
|
5598
5611
|
MediaParserAbortError as MediaParserAbortError4,
|
|
5599
|
-
|
|
5600
|
-
|
|
5612
|
+
WEBCODECS_TIMESCALE,
|
|
5613
|
+
hasBeenAborted,
|
|
5614
|
+
mediaParserController as mediaParserController2
|
|
5601
5615
|
} from "@remotion/media-parser";
|
|
5602
|
-
import { parseMediaOnWebWorker } from "@remotion/media-parser/worker";
|
|
5603
5616
|
var internalExtractFrames = ({
|
|
5604
5617
|
src,
|
|
5605
5618
|
onFrame,
|
|
5606
5619
|
signal,
|
|
5607
5620
|
timestampsInSeconds,
|
|
5608
5621
|
acknowledgeRemotionLicense,
|
|
5609
|
-
logLevel
|
|
5622
|
+
logLevel,
|
|
5623
|
+
parseMediaImplementation
|
|
5610
5624
|
}) => {
|
|
5611
5625
|
const controller = mediaParserController2();
|
|
5612
5626
|
const expectedFrames = [];
|
|
@@ -5619,7 +5633,7 @@ var internalExtractFrames = ({
|
|
|
5619
5633
|
let dur = null;
|
|
5620
5634
|
let lastFrame;
|
|
5621
5635
|
let lastFrameEmitted;
|
|
5622
|
-
|
|
5636
|
+
parseMediaImplementation({
|
|
5623
5637
|
src: new URL(src, window.location.href),
|
|
5624
5638
|
acknowledgeRemotionLicense,
|
|
5625
5639
|
controller,
|
|
@@ -5652,7 +5666,7 @@ var internalExtractFrames = ({
|
|
|
5652
5666
|
lastFrame = frame;
|
|
5653
5667
|
return;
|
|
5654
5668
|
}
|
|
5655
|
-
if (expectedFrames[0] + 6667 < frame.timestamp && lastFrame) {
|
|
5669
|
+
if (expectedFrames[0] + 6667 < frame.timestamp && lastFrame && lastFrame !== lastFrameEmitted) {
|
|
5656
5670
|
onFrame(lastFrame);
|
|
5657
5671
|
lastFrameEmitted = lastFrame;
|
|
5658
5672
|
expectedFrames.shift();
|
|
@@ -5661,6 +5675,9 @@ var internalExtractFrames = ({
|
|
|
5661
5675
|
}
|
|
5662
5676
|
expectedFrames.shift();
|
|
5663
5677
|
onFrame(frame);
|
|
5678
|
+
if (lastFrame && lastFrame !== lastFrameEmitted) {
|
|
5679
|
+
lastFrame.close();
|
|
5680
|
+
}
|
|
5664
5681
|
lastFrameEmitted = frame;
|
|
5665
5682
|
lastFrame = frame;
|
|
5666
5683
|
},
|
|
@@ -5681,14 +5698,14 @@ var internalExtractFrames = ({
|
|
|
5681
5698
|
if (!sam) {
|
|
5682
5699
|
throw new Error("Sample is undefined");
|
|
5683
5700
|
}
|
|
5684
|
-
await decoder.waitForQueueToBeLessThan(
|
|
5701
|
+
await decoder.waitForQueueToBeLessThan(20);
|
|
5685
5702
|
Log.trace(logLevel, "Decoding sample", sam.timestamp);
|
|
5686
5703
|
await decoder.decode(sam);
|
|
5687
5704
|
}
|
|
5688
5705
|
};
|
|
5689
5706
|
return async (sample) => {
|
|
5690
5707
|
const nextTimestampWeWant = timestampTargets[0];
|
|
5691
|
-
Log.trace(logLevel,
|
|
5708
|
+
Log.trace(logLevel, `Received ${sample.type} sample with dts`, sample.decodingTimestamp, "and cts", sample.timestamp);
|
|
5692
5709
|
if (sample.type === "key") {
|
|
5693
5710
|
await decoder.flush();
|
|
5694
5711
|
queued.length = 0;
|
|
@@ -5735,12 +5752,15 @@ var internalExtractFrames = ({
|
|
|
5735
5752
|
});
|
|
5736
5753
|
return resolvers.promise;
|
|
5737
5754
|
};
|
|
5755
|
+
|
|
5756
|
+
// src/extract-frames.ts
|
|
5738
5757
|
var extractFrames = (options) => {
|
|
5739
5758
|
return internalExtractFrames({
|
|
5740
5759
|
...options,
|
|
5741
5760
|
signal: options.signal ?? null,
|
|
5742
5761
|
acknowledgeRemotionLicense: options.acknowledgeRemotionLicense ?? false,
|
|
5743
|
-
logLevel: options.logLevel ?? "info"
|
|
5762
|
+
logLevel: options.logLevel ?? "info",
|
|
5763
|
+
parseMediaImplementation: parseMedia
|
|
5744
5764
|
});
|
|
5745
5765
|
};
|
|
5746
5766
|
// src/get-available-audio-codecs.ts
|
|
@@ -5762,7 +5782,7 @@ var getAvailableAudioCodecs = ({
|
|
|
5762
5782
|
import {
|
|
5763
5783
|
hasBeenAborted as hasBeenAborted2,
|
|
5764
5784
|
mediaParserController as mediaParserController3,
|
|
5765
|
-
parseMedia
|
|
5785
|
+
parseMedia as parseMedia2
|
|
5766
5786
|
} from "@remotion/media-parser";
|
|
5767
5787
|
var extractOverlappingAudioSamples = ({
|
|
5768
5788
|
sample,
|
|
@@ -5820,7 +5840,7 @@ var getPartialAudioData = async ({
|
|
|
5820
5840
|
if (fromSeconds > 0) {
|
|
5821
5841
|
controller.seek(fromSeconds);
|
|
5822
5842
|
}
|
|
5823
|
-
await
|
|
5843
|
+
await parseMedia2({
|
|
5824
5844
|
acknowledgeRemotionLicense: true,
|
|
5825
5845
|
src,
|
|
5826
5846
|
controller,
|
|
@@ -0,0 +1,503 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
7
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
8
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
9
|
+
for (let key of __getOwnPropNames(mod))
|
|
10
|
+
if (!__hasOwnProp.call(to, key))
|
|
11
|
+
__defProp(to, key, {
|
|
12
|
+
get: () => mod[key],
|
|
13
|
+
enumerable: true
|
|
14
|
+
});
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
18
|
+
|
|
19
|
+
// src/extract-frames-on-web-worker.ts
|
|
20
|
+
import { parseMediaOnWebWorker } from "@remotion/media-parser/worker";
|
|
21
|
+
|
|
22
|
+
// src/internal-extract-frames.ts
|
|
23
|
+
import {
|
|
24
|
+
MediaParserAbortError,
|
|
25
|
+
WEBCODECS_TIMESCALE,
|
|
26
|
+
hasBeenAborted,
|
|
27
|
+
mediaParserController
|
|
28
|
+
} from "@remotion/media-parser";
|
|
29
|
+
|
|
30
|
+
// src/create/with-resolvers.ts
|
|
31
|
+
var withResolvers = function() {
|
|
32
|
+
let resolve;
|
|
33
|
+
let reject;
|
|
34
|
+
const promise = new Promise((res, rej) => {
|
|
35
|
+
resolve = res;
|
|
36
|
+
reject = rej;
|
|
37
|
+
});
|
|
38
|
+
return { promise, resolve, reject };
|
|
39
|
+
};
|
|
40
|
+
var withResolversAndWaitForReturn = () => {
|
|
41
|
+
const { promise, reject, resolve } = withResolvers();
|
|
42
|
+
const { promise: returnPromise, resolve: resolveReturn } = withResolvers();
|
|
43
|
+
return {
|
|
44
|
+
getPromiseToImmediatelyReturn: () => {
|
|
45
|
+
resolveReturn(undefined);
|
|
46
|
+
return promise;
|
|
47
|
+
},
|
|
48
|
+
reject: (reason) => {
|
|
49
|
+
returnPromise.then(() => reject(reason));
|
|
50
|
+
},
|
|
51
|
+
resolve
|
|
52
|
+
};
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
// src/flush-pending.ts
|
|
56
|
+
var makeFlushPending = () => {
|
|
57
|
+
const { promise, resolve, reject } = withResolvers();
|
|
58
|
+
return {
|
|
59
|
+
promise,
|
|
60
|
+
resolve,
|
|
61
|
+
reject
|
|
62
|
+
};
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
// src/create/event-emitter.ts
|
|
66
|
+
class IoEventEmitter {
|
|
67
|
+
listeners = {
|
|
68
|
+
input: [],
|
|
69
|
+
output: [],
|
|
70
|
+
processed: [],
|
|
71
|
+
progress: []
|
|
72
|
+
};
|
|
73
|
+
addEventListener(name, callback) {
|
|
74
|
+
this.listeners[name].push(callback);
|
|
75
|
+
}
|
|
76
|
+
removeEventListener(name, callback) {
|
|
77
|
+
this.listeners[name] = this.listeners[name].filter((l) => l !== callback);
|
|
78
|
+
}
|
|
79
|
+
dispatchEvent(dispatchName, context) {
|
|
80
|
+
this.listeners[dispatchName].forEach((callback) => {
|
|
81
|
+
callback({ detail: context });
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// src/log.ts
|
|
87
|
+
import { MediaParserInternals } from "@remotion/media-parser";
|
|
88
|
+
var { Log } = MediaParserInternals;
|
|
89
|
+
|
|
90
|
+
// src/io-manager/make-timeout-promise.ts
|
|
91
|
+
var makeTimeoutPromise = ({
|
|
92
|
+
label,
|
|
93
|
+
ms,
|
|
94
|
+
controller
|
|
95
|
+
}) => {
|
|
96
|
+
const { promise, reject, resolve } = withResolvers();
|
|
97
|
+
let timeout = null;
|
|
98
|
+
const set = () => {
|
|
99
|
+
timeout = setTimeout(() => {
|
|
100
|
+
reject(new Error(`${label()} (timed out after ${ms}ms)`));
|
|
101
|
+
}, ms);
|
|
102
|
+
};
|
|
103
|
+
set();
|
|
104
|
+
const onPause = () => {
|
|
105
|
+
if (timeout) {
|
|
106
|
+
clearTimeout(timeout);
|
|
107
|
+
}
|
|
108
|
+
};
|
|
109
|
+
const onResume = () => {
|
|
110
|
+
set();
|
|
111
|
+
};
|
|
112
|
+
if (controller) {
|
|
113
|
+
controller.addEventListener("pause", onPause);
|
|
114
|
+
controller.addEventListener("resume", onResume);
|
|
115
|
+
}
|
|
116
|
+
return {
|
|
117
|
+
timeoutPromise: promise,
|
|
118
|
+
clear: () => {
|
|
119
|
+
if (timeout) {
|
|
120
|
+
clearTimeout(timeout);
|
|
121
|
+
}
|
|
122
|
+
resolve();
|
|
123
|
+
if (controller) {
|
|
124
|
+
controller.removeEventListener("pause", onPause);
|
|
125
|
+
controller.removeEventListener("resume", onResume);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
// src/io-manager/io-synchronizer.ts
|
|
132
|
+
var makeIoSynchronizer = ({
|
|
133
|
+
logLevel,
|
|
134
|
+
label,
|
|
135
|
+
controller
|
|
136
|
+
}) => {
|
|
137
|
+
const eventEmitter = new IoEventEmitter;
|
|
138
|
+
let lastInput = 0;
|
|
139
|
+
let lastOutput = 0;
|
|
140
|
+
let inputsSinceLastOutput = 0;
|
|
141
|
+
let inputs = [];
|
|
142
|
+
let resolvers = [];
|
|
143
|
+
const getQueuedItems = () => {
|
|
144
|
+
inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput) + 1);
|
|
145
|
+
return inputs.length;
|
|
146
|
+
};
|
|
147
|
+
const printState = (prefix) => {
|
|
148
|
+
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last output = ${lastOutput} Inputs since last output = ${inputsSinceLastOutput}, Queue = ${getQueuedItems()}`);
|
|
149
|
+
};
|
|
150
|
+
const inputItem = (timestamp) => {
|
|
151
|
+
lastInput = timestamp;
|
|
152
|
+
inputsSinceLastOutput++;
|
|
153
|
+
inputs.push(timestamp);
|
|
154
|
+
eventEmitter.dispatchEvent("input", {
|
|
155
|
+
timestamp
|
|
156
|
+
});
|
|
157
|
+
printState("Input item");
|
|
158
|
+
};
|
|
159
|
+
const onOutput = (timestamp) => {
|
|
160
|
+
lastOutput = timestamp;
|
|
161
|
+
inputsSinceLastOutput = 0;
|
|
162
|
+
eventEmitter.dispatchEvent("output", {
|
|
163
|
+
timestamp
|
|
164
|
+
});
|
|
165
|
+
printState("Got output");
|
|
166
|
+
};
|
|
167
|
+
const waitForOutput = () => {
|
|
168
|
+
const { promise, resolve } = withResolvers();
|
|
169
|
+
const on = () => {
|
|
170
|
+
eventEmitter.removeEventListener("output", on);
|
|
171
|
+
resolve();
|
|
172
|
+
resolvers = resolvers.filter((resolver) => resolver !== resolve);
|
|
173
|
+
};
|
|
174
|
+
eventEmitter.addEventListener("output", on);
|
|
175
|
+
resolvers.push(resolve);
|
|
176
|
+
return promise;
|
|
177
|
+
};
|
|
178
|
+
const makeErrorBanner = () => {
|
|
179
|
+
return [
|
|
180
|
+
`Waited too long for ${label} to finish:`,
|
|
181
|
+
`${getQueuedItems()} queued items`,
|
|
182
|
+
`inputs: ${JSON.stringify(inputs)}`,
|
|
183
|
+
`last output: ${lastOutput}`
|
|
184
|
+
];
|
|
185
|
+
};
|
|
186
|
+
const waitForQueueSize = async (queueSize) => {
|
|
187
|
+
if (getQueuedItems() <= queueSize) {
|
|
188
|
+
return Promise.resolve();
|
|
189
|
+
}
|
|
190
|
+
const { timeoutPromise, clear } = makeTimeoutPromise({
|
|
191
|
+
label: () => [
|
|
192
|
+
...makeErrorBanner(),
|
|
193
|
+
`wanted: <${queueSize} queued items`,
|
|
194
|
+
`Report this at https://remotion.dev/report`
|
|
195
|
+
].join(`
|
|
196
|
+
`),
|
|
197
|
+
ms: 1e4,
|
|
198
|
+
controller
|
|
199
|
+
});
|
|
200
|
+
if (controller) {
|
|
201
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", clear);
|
|
202
|
+
}
|
|
203
|
+
await Promise.race([
|
|
204
|
+
timeoutPromise,
|
|
205
|
+
(async () => {
|
|
206
|
+
while (getQueuedItems() > queueSize) {
|
|
207
|
+
await waitForOutput();
|
|
208
|
+
}
|
|
209
|
+
})()
|
|
210
|
+
]).finally(() => clear());
|
|
211
|
+
if (controller) {
|
|
212
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", clear);
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
const clearQueue = () => {
|
|
216
|
+
inputs.length = 0;
|
|
217
|
+
lastInput = 0;
|
|
218
|
+
lastOutput = 0;
|
|
219
|
+
inputsSinceLastOutput = 0;
|
|
220
|
+
resolvers.forEach((resolver) => {
|
|
221
|
+
return resolver();
|
|
222
|
+
});
|
|
223
|
+
resolvers.length = 0;
|
|
224
|
+
inputs.length = 0;
|
|
225
|
+
};
|
|
226
|
+
return {
|
|
227
|
+
inputItem,
|
|
228
|
+
onOutput,
|
|
229
|
+
waitForQueueSize,
|
|
230
|
+
clearQueue
|
|
231
|
+
};
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
// src/create-video-decoder.ts
|
|
235
|
+
var internalCreateVideoDecoder = ({
|
|
236
|
+
onFrame,
|
|
237
|
+
onError,
|
|
238
|
+
controller,
|
|
239
|
+
config,
|
|
240
|
+
logLevel
|
|
241
|
+
}) => {
|
|
242
|
+
if (controller && controller._internals._mediaParserController._internals.signal.aborted) {
|
|
243
|
+
throw new Error("Not creating audio decoder, already aborted");
|
|
244
|
+
}
|
|
245
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
246
|
+
logLevel,
|
|
247
|
+
label: "Video decoder",
|
|
248
|
+
controller
|
|
249
|
+
});
|
|
250
|
+
let mostRecentSampleReceived = null;
|
|
251
|
+
const videoDecoder = new VideoDecoder({
|
|
252
|
+
async output(frame) {
|
|
253
|
+
try {
|
|
254
|
+
await onFrame(frame);
|
|
255
|
+
} catch (err) {
|
|
256
|
+
onError(err);
|
|
257
|
+
frame.close();
|
|
258
|
+
}
|
|
259
|
+
ioSynchronizer.onOutput(frame.timestamp);
|
|
260
|
+
},
|
|
261
|
+
error(error) {
|
|
262
|
+
onError(error);
|
|
263
|
+
}
|
|
264
|
+
});
|
|
265
|
+
const close = () => {
|
|
266
|
+
if (controller) {
|
|
267
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
268
|
+
}
|
|
269
|
+
if (videoDecoder.state === "closed") {
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
videoDecoder.close();
|
|
273
|
+
};
|
|
274
|
+
const onAbort = () => {
|
|
275
|
+
close();
|
|
276
|
+
};
|
|
277
|
+
if (controller) {
|
|
278
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
279
|
+
}
|
|
280
|
+
videoDecoder.configure(config);
|
|
281
|
+
const decode = async (sample) => {
|
|
282
|
+
if (videoDecoder.state === "closed") {
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
try {
|
|
286
|
+
await controller?._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
287
|
+
} catch (err) {
|
|
288
|
+
onError(err);
|
|
289
|
+
return;
|
|
290
|
+
}
|
|
291
|
+
mostRecentSampleReceived = sample.timestamp;
|
|
292
|
+
const encodedChunk = sample instanceof EncodedVideoChunk ? sample : new EncodedVideoChunk(sample);
|
|
293
|
+
videoDecoder.decode(encodedChunk);
|
|
294
|
+
ioSynchronizer.inputItem(sample.timestamp);
|
|
295
|
+
};
|
|
296
|
+
let flushPending = null;
|
|
297
|
+
let lastReset = null;
|
|
298
|
+
return {
|
|
299
|
+
decode,
|
|
300
|
+
close,
|
|
301
|
+
flush: () => {
|
|
302
|
+
if (flushPending) {
|
|
303
|
+
throw new Error("Flush already pending");
|
|
304
|
+
}
|
|
305
|
+
const pendingFlush = makeFlushPending();
|
|
306
|
+
flushPending = pendingFlush;
|
|
307
|
+
Promise.resolve().then(() => {
|
|
308
|
+
return videoDecoder.flush();
|
|
309
|
+
}).catch(() => {}).finally(() => {
|
|
310
|
+
pendingFlush.resolve();
|
|
311
|
+
flushPending = null;
|
|
312
|
+
});
|
|
313
|
+
return pendingFlush.promise;
|
|
314
|
+
},
|
|
315
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
316
|
+
reset: () => {
|
|
317
|
+
lastReset = Date.now();
|
|
318
|
+
flushPending?.resolve();
|
|
319
|
+
ioSynchronizer.clearQueue();
|
|
320
|
+
videoDecoder.reset();
|
|
321
|
+
videoDecoder.configure(config);
|
|
322
|
+
},
|
|
323
|
+
checkReset: () => {
|
|
324
|
+
const initTime = Date.now();
|
|
325
|
+
return {
|
|
326
|
+
wasReset: () => lastReset !== null && lastReset > initTime
|
|
327
|
+
};
|
|
328
|
+
},
|
|
329
|
+
getMostRecentSampleInput() {
|
|
330
|
+
return mostRecentSampleReceived;
|
|
331
|
+
}
|
|
332
|
+
};
|
|
333
|
+
};
|
|
334
|
+
var createVideoDecoder = ({
|
|
335
|
+
onFrame,
|
|
336
|
+
onError,
|
|
337
|
+
controller,
|
|
338
|
+
track,
|
|
339
|
+
logLevel
|
|
340
|
+
}) => {
|
|
341
|
+
return internalCreateVideoDecoder({
|
|
342
|
+
onFrame,
|
|
343
|
+
onError,
|
|
344
|
+
controller: controller ?? null,
|
|
345
|
+
config: track,
|
|
346
|
+
logLevel: logLevel ?? "info"
|
|
347
|
+
});
|
|
348
|
+
};
|
|
349
|
+
|
|
350
|
+
// src/internal-extract-frames.ts
|
|
351
|
+
var internalExtractFrames = ({
|
|
352
|
+
src,
|
|
353
|
+
onFrame,
|
|
354
|
+
signal,
|
|
355
|
+
timestampsInSeconds,
|
|
356
|
+
acknowledgeRemotionLicense,
|
|
357
|
+
logLevel,
|
|
358
|
+
parseMediaImplementation
|
|
359
|
+
}) => {
|
|
360
|
+
const controller = mediaParserController();
|
|
361
|
+
const expectedFrames = [];
|
|
362
|
+
const resolvers = withResolvers();
|
|
363
|
+
const abortListener = () => {
|
|
364
|
+
controller.abort();
|
|
365
|
+
resolvers.reject(new MediaParserAbortError("Aborted by user"));
|
|
366
|
+
};
|
|
367
|
+
signal?.addEventListener("abort", abortListener, { once: true });
|
|
368
|
+
let dur = null;
|
|
369
|
+
let lastFrame;
|
|
370
|
+
let lastFrameEmitted;
|
|
371
|
+
parseMediaImplementation({
|
|
372
|
+
src: new URL(src, window.location.href),
|
|
373
|
+
acknowledgeRemotionLicense,
|
|
374
|
+
controller,
|
|
375
|
+
logLevel,
|
|
376
|
+
onDurationInSeconds(durationInSeconds) {
|
|
377
|
+
dur = durationInSeconds;
|
|
378
|
+
},
|
|
379
|
+
onVideoTrack: async ({ track, container }) => {
|
|
380
|
+
const timestampTargetsUnsorted = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
|
|
381
|
+
track,
|
|
382
|
+
container,
|
|
383
|
+
durationInSeconds: dur
|
|
384
|
+
}) : timestampsInSeconds;
|
|
385
|
+
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
386
|
+
if (timestampTargets.length === 0) {
|
|
387
|
+
throw new Error("expected at least one timestamp to extract but found zero");
|
|
388
|
+
}
|
|
389
|
+
controller.seek(timestampTargets[0]);
|
|
390
|
+
const decoder = createVideoDecoder({
|
|
391
|
+
onFrame: (frame) => {
|
|
392
|
+
Log.trace(logLevel, "Received frame with timestamp", frame.timestamp);
|
|
393
|
+
if (expectedFrames.length === 0) {
|
|
394
|
+
frame.close();
|
|
395
|
+
return;
|
|
396
|
+
}
|
|
397
|
+
if (frame.timestamp < expectedFrames[0] - 1) {
|
|
398
|
+
if (lastFrame) {
|
|
399
|
+
lastFrame.close();
|
|
400
|
+
}
|
|
401
|
+
lastFrame = frame;
|
|
402
|
+
return;
|
|
403
|
+
}
|
|
404
|
+
if (expectedFrames[0] + 6667 < frame.timestamp && lastFrame && lastFrame !== lastFrameEmitted) {
|
|
405
|
+
onFrame(lastFrame);
|
|
406
|
+
lastFrameEmitted = lastFrame;
|
|
407
|
+
expectedFrames.shift();
|
|
408
|
+
lastFrame = frame;
|
|
409
|
+
return;
|
|
410
|
+
}
|
|
411
|
+
expectedFrames.shift();
|
|
412
|
+
onFrame(frame);
|
|
413
|
+
if (lastFrame && lastFrame !== lastFrameEmitted) {
|
|
414
|
+
lastFrame.close();
|
|
415
|
+
}
|
|
416
|
+
lastFrameEmitted = frame;
|
|
417
|
+
lastFrame = frame;
|
|
418
|
+
},
|
|
419
|
+
onError: (e) => {
|
|
420
|
+
controller.abort();
|
|
421
|
+
try {
|
|
422
|
+
decoder.close();
|
|
423
|
+
} catch {}
|
|
424
|
+
resolvers.reject(e);
|
|
425
|
+
},
|
|
426
|
+
track
|
|
427
|
+
});
|
|
428
|
+
const queued = [];
|
|
429
|
+
const doProcess = async () => {
|
|
430
|
+
expectedFrames.push(timestampTargets.shift() * WEBCODECS_TIMESCALE);
|
|
431
|
+
while (queued.length > 0) {
|
|
432
|
+
const sam = queued.shift();
|
|
433
|
+
if (!sam) {
|
|
434
|
+
throw new Error("Sample is undefined");
|
|
435
|
+
}
|
|
436
|
+
await decoder.waitForQueueToBeLessThan(20);
|
|
437
|
+
Log.trace(logLevel, "Decoding sample", sam.timestamp);
|
|
438
|
+
await decoder.decode(sam);
|
|
439
|
+
}
|
|
440
|
+
};
|
|
441
|
+
return async (sample) => {
|
|
442
|
+
const nextTimestampWeWant = timestampTargets[0];
|
|
443
|
+
Log.trace(logLevel, `Received ${sample.type} sample with dts`, sample.decodingTimestamp, "and cts", sample.timestamp);
|
|
444
|
+
if (sample.type === "key") {
|
|
445
|
+
await decoder.flush();
|
|
446
|
+
queued.length = 0;
|
|
447
|
+
}
|
|
448
|
+
queued.push(sample);
|
|
449
|
+
if (sample.decodingTimestamp >= timestampTargets[timestampTargets.length - 1] * WEBCODECS_TIMESCALE) {
|
|
450
|
+
await doProcess();
|
|
451
|
+
await decoder.flush();
|
|
452
|
+
controller.abort();
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
if (nextTimestampWeWant === undefined) {
|
|
456
|
+
throw new Error("this should not happen");
|
|
457
|
+
}
|
|
458
|
+
if (sample.decodingTimestamp >= nextTimestampWeWant * WEBCODECS_TIMESCALE) {
|
|
459
|
+
await doProcess();
|
|
460
|
+
if (timestampTargets.length === 0) {
|
|
461
|
+
await decoder.flush();
|
|
462
|
+
controller.abort();
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
return async () => {
|
|
466
|
+
await doProcess();
|
|
467
|
+
await decoder.flush();
|
|
468
|
+
if (lastFrame && lastFrameEmitted !== lastFrame) {
|
|
469
|
+
lastFrame.close();
|
|
470
|
+
}
|
|
471
|
+
};
|
|
472
|
+
};
|
|
473
|
+
}
|
|
474
|
+
}).then(() => {
|
|
475
|
+
resolvers.resolve();
|
|
476
|
+
}).catch((e) => {
|
|
477
|
+
if (!hasBeenAborted(e)) {
|
|
478
|
+
resolvers.reject(e);
|
|
479
|
+
} else {
|
|
480
|
+
resolvers.resolve();
|
|
481
|
+
}
|
|
482
|
+
}).finally(() => {
|
|
483
|
+
if (lastFrame && lastFrameEmitted !== lastFrame) {
|
|
484
|
+
lastFrame.close();
|
|
485
|
+
}
|
|
486
|
+
signal?.removeEventListener("abort", abortListener);
|
|
487
|
+
});
|
|
488
|
+
return resolvers.promise;
|
|
489
|
+
};
|
|
490
|
+
|
|
491
|
+
// src/extract-frames-on-web-worker.ts
|
|
492
|
+
var extractFramesOnWebWorker = (options) => {
|
|
493
|
+
return internalExtractFrames({
|
|
494
|
+
...options,
|
|
495
|
+
signal: options.signal ?? null,
|
|
496
|
+
acknowledgeRemotionLicense: options.acknowledgeRemotionLicense ?? false,
|
|
497
|
+
logLevel: options.logLevel ?? "info",
|
|
498
|
+
parseMediaImplementation: parseMediaOnWebWorker
|
|
499
|
+
});
|
|
500
|
+
};
|
|
501
|
+
export {
|
|
502
|
+
extractFramesOnWebWorker
|
|
503
|
+
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { type MediaParserLogLevel } from '@remotion/media-parser';
|
|
2
|
+
import { type ExtractFramesTimestampsInSecondsFn } from './internal-extract-frames';
|
|
3
|
+
export type ExtractFramesOnWebWorkerProps = {
|
|
4
|
+
src: string;
|
|
5
|
+
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
|
|
6
|
+
onFrame: (frame: VideoFrame) => void;
|
|
7
|
+
signal?: AbortSignal;
|
|
8
|
+
acknowledgeRemotionLicense?: boolean;
|
|
9
|
+
logLevel?: MediaParserLogLevel;
|
|
10
|
+
};
|
|
11
|
+
export type ExtractFramesOnWebWorker = (options: ExtractFramesOnWebWorkerProps) => Promise<void>;
|
|
12
|
+
export declare const extractFramesOnWebWorker: ExtractFramesOnWebWorker;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractFramesOnWebWorker = void 0;
|
|
4
|
+
const worker_1 = require("@remotion/media-parser/worker");
|
|
5
|
+
const internal_extract_frames_1 = require("./internal-extract-frames");
|
|
6
|
+
const extractFramesOnWebWorker = (options) => {
|
|
7
|
+
return (0, internal_extract_frames_1.internalExtractFrames)({
|
|
8
|
+
...options,
|
|
9
|
+
signal: options.signal ?? null,
|
|
10
|
+
acknowledgeRemotionLicense: options.acknowledgeRemotionLicense ?? false,
|
|
11
|
+
logLevel: options.logLevel ?? 'info',
|
|
12
|
+
parseMediaImplementation: worker_1.parseMediaOnWebWorker,
|
|
13
|
+
});
|
|
14
|
+
};
|
|
15
|
+
exports.extractFramesOnWebWorker = extractFramesOnWebWorker;
|
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";
|
package/dist/extract-frames.d.ts
CHANGED
|
@@ -1,14 +1,12 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
container: MediaParserContainer;
|
|
5
|
-
durationInSeconds: number | null;
|
|
6
|
-
}) => Promise<number[]> | number[];
|
|
7
|
-
export declare const extractFrames: (options: {
|
|
1
|
+
import { type MediaParserLogLevel } from '@remotion/media-parser';
|
|
2
|
+
import { type ExtractFramesTimestampsInSecondsFn } from './internal-extract-frames';
|
|
3
|
+
export type ExtractFramesProps = {
|
|
8
4
|
src: string;
|
|
9
5
|
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
|
|
10
6
|
onFrame: (frame: VideoFrame) => void;
|
|
11
7
|
signal?: AbortSignal;
|
|
12
8
|
acknowledgeRemotionLicense?: boolean;
|
|
13
9
|
logLevel?: MediaParserLogLevel;
|
|
14
|
-
}
|
|
10
|
+
};
|
|
11
|
+
export type ExtractFrames = (options: ExtractFramesProps) => Promise<void>;
|
|
12
|
+
export declare const extractFrames: ExtractFrames;
|
package/dist/extract-frames.js
CHANGED
|
@@ -2,156 +2,14 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.extractFrames = void 0;
|
|
4
4
|
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
-
const
|
|
6
|
-
const create_video_decoder_1 = require("./create-video-decoder");
|
|
7
|
-
const with_resolvers_1 = require("./create/with-resolvers");
|
|
8
|
-
const log_1 = require("./log");
|
|
9
|
-
const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, acknowledgeRemotionLicense, logLevel, }) => {
|
|
10
|
-
const controller = (0, media_parser_1.mediaParserController)();
|
|
11
|
-
const expectedFrames = [];
|
|
12
|
-
const resolvers = (0, with_resolvers_1.withResolvers)();
|
|
13
|
-
const abortListener = () => {
|
|
14
|
-
controller.abort();
|
|
15
|
-
resolvers.reject(new media_parser_1.MediaParserAbortError('Aborted by user'));
|
|
16
|
-
};
|
|
17
|
-
signal?.addEventListener('abort', abortListener, { once: true });
|
|
18
|
-
let dur = null;
|
|
19
|
-
let lastFrame;
|
|
20
|
-
let lastFrameEmitted;
|
|
21
|
-
(0, worker_1.parseMediaOnWebWorker)({
|
|
22
|
-
src: new URL(src, window.location.href),
|
|
23
|
-
acknowledgeRemotionLicense,
|
|
24
|
-
controller,
|
|
25
|
-
logLevel,
|
|
26
|
-
onDurationInSeconds(durationInSeconds) {
|
|
27
|
-
dur = durationInSeconds;
|
|
28
|
-
},
|
|
29
|
-
onVideoTrack: async ({ track, container }) => {
|
|
30
|
-
const timestampTargetsUnsorted = typeof timestampsInSeconds === 'function'
|
|
31
|
-
? await timestampsInSeconds({
|
|
32
|
-
track,
|
|
33
|
-
container,
|
|
34
|
-
durationInSeconds: dur,
|
|
35
|
-
})
|
|
36
|
-
: timestampsInSeconds;
|
|
37
|
-
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
38
|
-
if (timestampTargets.length === 0) {
|
|
39
|
-
throw new Error('expected at least one timestamp to extract but found zero');
|
|
40
|
-
}
|
|
41
|
-
controller.seek(timestampTargets[0]);
|
|
42
|
-
const decoder = (0, create_video_decoder_1.createVideoDecoder)({
|
|
43
|
-
onFrame: (frame) => {
|
|
44
|
-
log_1.Log.trace(logLevel, 'Received frame with timestamp', frame.timestamp);
|
|
45
|
-
if (expectedFrames.length === 0) {
|
|
46
|
-
frame.close();
|
|
47
|
-
return;
|
|
48
|
-
}
|
|
49
|
-
if (frame.timestamp < expectedFrames[0] - 1) {
|
|
50
|
-
if (lastFrame) {
|
|
51
|
-
lastFrame.close();
|
|
52
|
-
}
|
|
53
|
-
lastFrame = frame;
|
|
54
|
-
return;
|
|
55
|
-
}
|
|
56
|
-
// A WebM might have a timestamp of 67000 but we request 66666
|
|
57
|
-
// See a test with this problem in it-tests/rendering/frame-accuracy.test.ts
|
|
58
|
-
// Solution: We allow a 10.000ms - 3.333ms = 6.667ms difference between the requested timestamp and the actual timestamp
|
|
59
|
-
if (expectedFrames[0] + 6667 < frame.timestamp && lastFrame) {
|
|
60
|
-
onFrame(lastFrame);
|
|
61
|
-
lastFrameEmitted = lastFrame;
|
|
62
|
-
expectedFrames.shift();
|
|
63
|
-
lastFrame = frame;
|
|
64
|
-
return;
|
|
65
|
-
}
|
|
66
|
-
expectedFrames.shift();
|
|
67
|
-
onFrame(frame);
|
|
68
|
-
lastFrameEmitted = frame;
|
|
69
|
-
lastFrame = frame;
|
|
70
|
-
},
|
|
71
|
-
onError: (e) => {
|
|
72
|
-
controller.abort();
|
|
73
|
-
try {
|
|
74
|
-
decoder.close();
|
|
75
|
-
}
|
|
76
|
-
catch { }
|
|
77
|
-
resolvers.reject(e);
|
|
78
|
-
},
|
|
79
|
-
track,
|
|
80
|
-
});
|
|
81
|
-
const queued = [];
|
|
82
|
-
const doProcess = async () => {
|
|
83
|
-
expectedFrames.push(timestampTargets.shift() * media_parser_1.WEBCODECS_TIMESCALE);
|
|
84
|
-
while (queued.length > 0) {
|
|
85
|
-
const sam = queued.shift();
|
|
86
|
-
if (!sam) {
|
|
87
|
-
throw new Error('Sample is undefined');
|
|
88
|
-
}
|
|
89
|
-
await decoder.waitForQueueToBeLessThan(10);
|
|
90
|
-
log_1.Log.trace(logLevel, 'Decoding sample', sam.timestamp);
|
|
91
|
-
await decoder.decode(sam);
|
|
92
|
-
}
|
|
93
|
-
};
|
|
94
|
-
return async (sample) => {
|
|
95
|
-
const nextTimestampWeWant = timestampTargets[0];
|
|
96
|
-
log_1.Log.trace(logLevel, 'Received sample with dts', sample.decodingTimestamp, 'and cts', sample.timestamp);
|
|
97
|
-
if (sample.type === 'key') {
|
|
98
|
-
await decoder.flush();
|
|
99
|
-
queued.length = 0;
|
|
100
|
-
}
|
|
101
|
-
queued.push(sample);
|
|
102
|
-
if (sample.decodingTimestamp >=
|
|
103
|
-
timestampTargets[timestampTargets.length - 1] * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
104
|
-
await doProcess();
|
|
105
|
-
await decoder.flush();
|
|
106
|
-
controller.abort();
|
|
107
|
-
return;
|
|
108
|
-
}
|
|
109
|
-
if (nextTimestampWeWant === undefined) {
|
|
110
|
-
throw new Error('this should not happen');
|
|
111
|
-
}
|
|
112
|
-
if (sample.decodingTimestamp >=
|
|
113
|
-
nextTimestampWeWant * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
114
|
-
await doProcess();
|
|
115
|
-
if (timestampTargets.length === 0) {
|
|
116
|
-
await decoder.flush();
|
|
117
|
-
controller.abort();
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
return async () => {
|
|
121
|
-
await doProcess();
|
|
122
|
-
await decoder.flush();
|
|
123
|
-
if (lastFrame && lastFrameEmitted !== lastFrame) {
|
|
124
|
-
lastFrame.close();
|
|
125
|
-
}
|
|
126
|
-
};
|
|
127
|
-
};
|
|
128
|
-
},
|
|
129
|
-
})
|
|
130
|
-
.then(() => {
|
|
131
|
-
resolvers.resolve();
|
|
132
|
-
})
|
|
133
|
-
.catch((e) => {
|
|
134
|
-
if (!(0, media_parser_1.hasBeenAborted)(e)) {
|
|
135
|
-
resolvers.reject(e);
|
|
136
|
-
}
|
|
137
|
-
else {
|
|
138
|
-
resolvers.resolve();
|
|
139
|
-
}
|
|
140
|
-
})
|
|
141
|
-
.finally(() => {
|
|
142
|
-
if (lastFrame && lastFrameEmitted !== lastFrame) {
|
|
143
|
-
lastFrame.close();
|
|
144
|
-
}
|
|
145
|
-
signal?.removeEventListener('abort', abortListener);
|
|
146
|
-
});
|
|
147
|
-
return resolvers.promise;
|
|
148
|
-
};
|
|
5
|
+
const internal_extract_frames_1 = require("./internal-extract-frames");
|
|
149
6
|
const extractFrames = (options) => {
|
|
150
|
-
return internalExtractFrames({
|
|
7
|
+
return (0, internal_extract_frames_1.internalExtractFrames)({
|
|
151
8
|
...options,
|
|
152
9
|
signal: options.signal ?? null,
|
|
153
10
|
acknowledgeRemotionLicense: options.acknowledgeRemotionLicense ?? false,
|
|
154
11
|
logLevel: options.logLevel ?? 'info',
|
|
12
|
+
parseMediaImplementation: media_parser_1.parseMedia,
|
|
155
13
|
});
|
|
156
14
|
};
|
|
157
15
|
exports.extractFrames = extractFrames;
|
package/dist/index.d.ts
CHANGED
|
@@ -13,8 +13,7 @@ export { createVideoDecoder } from './create-video-decoder';
|
|
|
13
13
|
export type { WebCodecsVideoDecoder } from './create-video-decoder';
|
|
14
14
|
export { defaultOnAudioTrackHandler } from './default-on-audio-track-handler';
|
|
15
15
|
export { defaultOnVideoTrackHandler } from './default-on-video-track-handler';
|
|
16
|
-
export { extractFrames } from './extract-frames';
|
|
17
|
-
export type { ExtractFramesTimestampsInSecondsFn } from './extract-frames';
|
|
16
|
+
export { extractFrames, ExtractFrames, ExtractFramesProps, } from './extract-frames';
|
|
18
17
|
export { getAvailableAudioCodecs } from './get-available-audio-codecs';
|
|
19
18
|
export type { ConvertMediaAudioCodec } from './get-available-audio-codecs';
|
|
20
19
|
export { getAvailableContainers } from './get-available-containers';
|
|
@@ -24,6 +23,7 @@ export type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
|
24
23
|
export { getDefaultAudioCodec } from './get-default-audio-codec';
|
|
25
24
|
export { getDefaultVideoCodec } from './get-default-video-codec';
|
|
26
25
|
export { getPartialAudioData, GetPartialAudioDataProps, } from './get-partial-audio-data';
|
|
26
|
+
export type { ExtractFramesTimestampsInSecondsFn } from './internal-extract-frames';
|
|
27
27
|
export type { AudioOperation, ConvertMediaOnAudioTrackHandler, } from './on-audio-track-handler';
|
|
28
28
|
export type { ConvertMediaOnVideoTrackHandler, VideoOperation, } from './on-video-track-handler';
|
|
29
29
|
export type { ResizeOperation } from './resizing/mode';
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { MediaParserContainer, MediaParserLogLevel, MediaParserVideoTrack, ParseMedia } from '@remotion/media-parser';
|
|
2
|
+
import type { ParseMediaOnWorker } from '@remotion/media-parser/worker';
|
|
3
|
+
export type ExtractFramesTimestampsInSecondsFn = (options: {
|
|
4
|
+
track: MediaParserVideoTrack;
|
|
5
|
+
container: MediaParserContainer;
|
|
6
|
+
durationInSeconds: number | null;
|
|
7
|
+
}) => Promise<number[]> | number[];
|
|
8
|
+
export declare const internalExtractFrames: ({ src, onFrame, signal, timestampsInSeconds, acknowledgeRemotionLicense, logLevel, parseMediaImplementation, }: {
|
|
9
|
+
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
|
|
10
|
+
src: string;
|
|
11
|
+
onFrame: (frame: VideoFrame) => void;
|
|
12
|
+
signal: AbortSignal | null;
|
|
13
|
+
acknowledgeRemotionLicense: boolean;
|
|
14
|
+
logLevel: MediaParserLogLevel;
|
|
15
|
+
parseMediaImplementation: ParseMediaOnWorker | ParseMedia;
|
|
16
|
+
}) => Promise<void>;
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.internalExtractFrames = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const create_video_decoder_1 = require("./create-video-decoder");
|
|
6
|
+
const with_resolvers_1 = require("./create/with-resolvers");
|
|
7
|
+
const log_1 = require("./log");
|
|
8
|
+
const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, acknowledgeRemotionLicense, logLevel, parseMediaImplementation, }) => {
|
|
9
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
10
|
+
const expectedFrames = [];
|
|
11
|
+
const resolvers = (0, with_resolvers_1.withResolvers)();
|
|
12
|
+
const abortListener = () => {
|
|
13
|
+
controller.abort();
|
|
14
|
+
resolvers.reject(new media_parser_1.MediaParserAbortError('Aborted by user'));
|
|
15
|
+
};
|
|
16
|
+
signal?.addEventListener('abort', abortListener, { once: true });
|
|
17
|
+
let dur = null;
|
|
18
|
+
let lastFrame;
|
|
19
|
+
let lastFrameEmitted;
|
|
20
|
+
parseMediaImplementation({
|
|
21
|
+
src: new URL(src, window.location.href),
|
|
22
|
+
acknowledgeRemotionLicense,
|
|
23
|
+
controller,
|
|
24
|
+
logLevel,
|
|
25
|
+
onDurationInSeconds(durationInSeconds) {
|
|
26
|
+
dur = durationInSeconds;
|
|
27
|
+
},
|
|
28
|
+
onVideoTrack: async ({ track, container }) => {
|
|
29
|
+
const timestampTargetsUnsorted = typeof timestampsInSeconds === 'function'
|
|
30
|
+
? await timestampsInSeconds({
|
|
31
|
+
track,
|
|
32
|
+
container,
|
|
33
|
+
durationInSeconds: dur,
|
|
34
|
+
})
|
|
35
|
+
: timestampsInSeconds;
|
|
36
|
+
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
37
|
+
if (timestampTargets.length === 0) {
|
|
38
|
+
throw new Error('expected at least one timestamp to extract but found zero');
|
|
39
|
+
}
|
|
40
|
+
controller.seek(timestampTargets[0]);
|
|
41
|
+
const decoder = (0, create_video_decoder_1.createVideoDecoder)({
|
|
42
|
+
onFrame: (frame) => {
|
|
43
|
+
log_1.Log.trace(logLevel, 'Received frame with timestamp', frame.timestamp);
|
|
44
|
+
if (expectedFrames.length === 0) {
|
|
45
|
+
frame.close();
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
if (frame.timestamp < expectedFrames[0] - 1) {
|
|
49
|
+
if (lastFrame) {
|
|
50
|
+
lastFrame.close();
|
|
51
|
+
}
|
|
52
|
+
lastFrame = frame;
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
// A WebM might have a timestamp of 67000 but we request 66666
|
|
56
|
+
// See a test with this problem in it-tests/rendering/frame-accuracy.test.ts
|
|
57
|
+
// Solution: We allow a 10.000ms - 3.333ms = 6.667ms difference between the requested timestamp and the actual timestamp
|
|
58
|
+
if (expectedFrames[0] + 6667 < frame.timestamp &&
|
|
59
|
+
lastFrame &&
|
|
60
|
+
lastFrame !== lastFrameEmitted) {
|
|
61
|
+
onFrame(lastFrame);
|
|
62
|
+
lastFrameEmitted = lastFrame;
|
|
63
|
+
expectedFrames.shift();
|
|
64
|
+
lastFrame = frame;
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
expectedFrames.shift();
|
|
68
|
+
onFrame(frame);
|
|
69
|
+
if (lastFrame && lastFrame !== lastFrameEmitted) {
|
|
70
|
+
lastFrame.close();
|
|
71
|
+
}
|
|
72
|
+
lastFrameEmitted = frame;
|
|
73
|
+
lastFrame = frame;
|
|
74
|
+
},
|
|
75
|
+
onError: (e) => {
|
|
76
|
+
controller.abort();
|
|
77
|
+
try {
|
|
78
|
+
decoder.close();
|
|
79
|
+
}
|
|
80
|
+
catch {
|
|
81
|
+
// Ignore
|
|
82
|
+
}
|
|
83
|
+
resolvers.reject(e);
|
|
84
|
+
},
|
|
85
|
+
track,
|
|
86
|
+
});
|
|
87
|
+
const queued = [];
|
|
88
|
+
const doProcess = async () => {
|
|
89
|
+
expectedFrames.push(timestampTargets.shift() * media_parser_1.WEBCODECS_TIMESCALE);
|
|
90
|
+
while (queued.length > 0) {
|
|
91
|
+
const sam = queued.shift();
|
|
92
|
+
if (!sam) {
|
|
93
|
+
throw new Error('Sample is undefined');
|
|
94
|
+
}
|
|
95
|
+
await decoder.waitForQueueToBeLessThan(20);
|
|
96
|
+
log_1.Log.trace(logLevel, 'Decoding sample', sam.timestamp);
|
|
97
|
+
await decoder.decode(sam);
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
return async (sample) => {
|
|
101
|
+
const nextTimestampWeWant = timestampTargets[0];
|
|
102
|
+
log_1.Log.trace(logLevel, `Received ${sample.type} sample with dts`, sample.decodingTimestamp, 'and cts', sample.timestamp);
|
|
103
|
+
if (sample.type === 'key') {
|
|
104
|
+
await decoder.flush();
|
|
105
|
+
queued.length = 0;
|
|
106
|
+
}
|
|
107
|
+
queued.push(sample);
|
|
108
|
+
if (sample.decodingTimestamp >=
|
|
109
|
+
timestampTargets[timestampTargets.length - 1] * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
110
|
+
await doProcess();
|
|
111
|
+
await decoder.flush();
|
|
112
|
+
controller.abort();
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
if (nextTimestampWeWant === undefined) {
|
|
116
|
+
throw new Error('this should not happen');
|
|
117
|
+
}
|
|
118
|
+
if (sample.decodingTimestamp >=
|
|
119
|
+
nextTimestampWeWant * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
120
|
+
await doProcess();
|
|
121
|
+
if (timestampTargets.length === 0) {
|
|
122
|
+
await decoder.flush();
|
|
123
|
+
controller.abort();
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return async () => {
|
|
127
|
+
await doProcess();
|
|
128
|
+
await decoder.flush();
|
|
129
|
+
if (lastFrame && lastFrameEmitted !== lastFrame) {
|
|
130
|
+
lastFrame.close();
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
};
|
|
134
|
+
},
|
|
135
|
+
})
|
|
136
|
+
.then(() => {
|
|
137
|
+
resolvers.resolve();
|
|
138
|
+
})
|
|
139
|
+
.catch((e) => {
|
|
140
|
+
if (!(0, media_parser_1.hasBeenAborted)(e)) {
|
|
141
|
+
resolvers.reject(e);
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
resolvers.resolve();
|
|
145
|
+
}
|
|
146
|
+
})
|
|
147
|
+
.finally(() => {
|
|
148
|
+
if (lastFrame && lastFrameEmitted !== lastFrame) {
|
|
149
|
+
lastFrame.close();
|
|
150
|
+
}
|
|
151
|
+
signal?.removeEventListener('abort', abortListener);
|
|
152
|
+
});
|
|
153
|
+
return resolvers.promise;
|
|
154
|
+
};
|
|
155
|
+
exports.internalExtractFrames = internalExtractFrames;
|
package/dist/worker.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { ExtractFramesOnWebWorker, ExtractFramesOnWebWorkerProps, extractFramesOnWebWorker, } from './extract-frames-on-web-worker';
|
package/dist/worker.js
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractFramesOnWebWorker = void 0;
|
|
4
|
+
var extract_frames_on_web_worker_1 = require("./extract-frames-on-web-worker");
|
|
5
|
+
Object.defineProperty(exports, "extractFramesOnWebWorker", { enumerable: true, get: function () { return extract_frames_on_web_worker_1.extractFramesOnWebWorker; } });
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remotion/webcodecs",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.332",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/esm/index.mjs",
|
|
@@ -19,8 +19,8 @@
|
|
|
19
19
|
"author": "Jonny Burger <jonny@remotion.dev>",
|
|
20
20
|
"license": "Remotion License (See https://remotion.dev/docs/webcodecs#license)",
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@remotion/media-parser": "4.0.
|
|
23
|
-
"@remotion/licensing": "4.0.
|
|
22
|
+
"@remotion/media-parser": "4.0.332",
|
|
23
|
+
"@remotion/licensing": "4.0.332"
|
|
24
24
|
},
|
|
25
25
|
"peerDependencies": {},
|
|
26
26
|
"devDependencies": {
|
|
@@ -29,8 +29,8 @@
|
|
|
29
29
|
"vite": "5.4.19",
|
|
30
30
|
"@playwright/test": "1.51.1",
|
|
31
31
|
"eslint": "9.19.0",
|
|
32
|
-
"@remotion/
|
|
33
|
-
"@remotion/
|
|
32
|
+
"@remotion/example-videos": "4.0.332",
|
|
33
|
+
"@remotion/eslint-config-internal": "4.0.332"
|
|
34
34
|
},
|
|
35
35
|
"keywords": [],
|
|
36
36
|
"publishConfig": {
|
|
@@ -55,6 +55,12 @@
|
|
|
55
55
|
"module": "./dist/esm/buffer.mjs",
|
|
56
56
|
"import": "./dist/esm/buffer.mjs"
|
|
57
57
|
},
|
|
58
|
+
"./worker": {
|
|
59
|
+
"types": "./dist/worker.d.ts",
|
|
60
|
+
"require": "./dist/worker.js",
|
|
61
|
+
"module": "./dist/esm/worker.mjs",
|
|
62
|
+
"import": "./dist/esm/worker.mjs"
|
|
63
|
+
},
|
|
58
64
|
"./package.json": "./package.json"
|
|
59
65
|
},
|
|
60
66
|
"typesVersions": {
|
|
@@ -62,6 +68,9 @@
|
|
|
62
68
|
"web-fs": [
|
|
63
69
|
"dist/writers/web-fs.d.ts"
|
|
64
70
|
],
|
|
71
|
+
"worker": [
|
|
72
|
+
"dist/worker.d.ts"
|
|
73
|
+
],
|
|
65
74
|
"buffer": [
|
|
66
75
|
"dist/writers/buffer.d.ts"
|
|
67
76
|
]
|