@remotion/media 4.0.364 → 4.0.366

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,171 +1,57 @@
1
- import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
1
+ import { ALL_FORMATS, Input, UrlSource } from 'mediabunny';
2
2
  import { Internals } from 'remotion';
3
- import { HEALTHY_BUFFER_THRESHOLD_SECONDS, makeAudioIterator, } from './audio/audio-preview-iterator';
3
+ import { audioIteratorManager, } from './audio-iterator-manager';
4
+ import { calculatePlaybackTime } from './calculate-playbacktime';
4
5
  import { drawPreviewOverlay } from './debug-overlay/preview-overlay';
5
6
  import { getTimeInSeconds } from './get-time-in-seconds';
6
7
  import { isNetworkError } from './is-network-error';
7
- import { sleep, TimeoutError, withTimeout } from './video/timeout-utils';
8
- import { createVideoIterator, } from './video/video-preview-iterator';
9
- const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
8
+ import { makeNonceManager } from './nonce-manager';
9
+ import { videoIteratorManager } from './video-iterator-manager';
10
10
  export class MediaPlayer {
11
- constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, debugOverlay, bufferState, }) {
12
- this.canvasSink = null;
13
- this.videoFrameIterator = null;
14
- this.debugStats = {
15
- videoIteratorsCreated: 0,
16
- framesRendered: 0,
17
- };
18
- this.audioSink = null;
19
- this.audioBufferIterator = null;
20
- this.gainNode = null;
21
- this.currentVolume = 1;
11
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, globalPlaybackRate, audioStreamIndex, fps, debugOverlay, bufferState, isPremounting, isPostmounting, }) {
12
+ this.audioIteratorManager = null;
13
+ this.videoIteratorManager = null;
22
14
  // this is the time difference between Web Audio timeline
23
15
  // and media file timeline
24
16
  this.audioSyncAnchor = 0;
25
17
  this.playing = false;
26
- this.muted = false;
27
18
  this.loop = false;
28
- this.initialized = false;
29
- // for remotion buffer state
30
- this.isBuffering = false;
31
- this.mediaEnded = false;
32
19
  this.debugOverlay = false;
20
+ this.onVideoFrameCallback = null;
33
21
  this.initializationPromise = null;
34
- this.input = null;
35
- this.currentSeekNonce = 0;
36
22
  this.seekPromiseChain = Promise.resolve();
37
- this.drawFrame = (frame) => {
38
- if (!this.context) {
39
- throw new Error('Context not initialized');
40
- }
41
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
42
- this.context.drawImage(frame.canvas, 0, 0);
43
- this.debugStats.framesRendered++;
44
- this.drawDebugOverlay();
45
- if (this.onVideoFrameCallback && this.canvas) {
46
- this.onVideoFrameCallback(this.canvas);
47
- }
48
- Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
49
- };
50
- this.startAudioIterator = (startFromSecond) => {
51
- if (!this.hasAudio())
52
- return;
53
- // Clean up existing audio iterator
54
- this.audioBufferIterator?.destroy();
55
- try {
56
- const iterator = makeAudioIterator(this.audioSink, startFromSecond);
57
- this.audioBufferIterator = iterator;
58
- this.runAudioIterator(startFromSecond, iterator);
59
- }
60
- catch (error) {
61
- if (this.isDisposalError()) {
62
- return;
63
- }
64
- Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio iterator', error);
23
+ this.delayPlaybackHandleIfNotPremounting = () => {
24
+ if (this.isPremounting || this.isPostmounting) {
25
+ return {
26
+ unblock: () => { },
27
+ };
65
28
  }
29
+ return this.bufferState.delayPlayback();
66
30
  };
67
- this.startVideoIterator = async (timeToSeek, nonce) => {
68
- if (!this.canvasSink) {
69
- return;
70
- }
71
- this.videoFrameIterator?.destroy();
72
- const iterator = createVideoIterator(timeToSeek, this.canvasSink);
73
- this.debugStats.videoIteratorsCreated++;
74
- this.videoFrameIterator = iterator;
75
- const delayHandle = this.bufferState?.delayPlayback();
76
- const frameResult = await iterator.getNext();
77
- delayHandle?.unblock();
78
- if (iterator.isDestroyed()) {
79
- return;
80
- }
81
- if (nonce !== this.currentSeekNonce) {
82
- return;
83
- }
84
- if (this.videoFrameIterator.isDestroyed()) {
85
- return;
86
- }
87
- if (frameResult.value) {
88
- this.audioSyncAnchor =
89
- this.sharedAudioContext.currentTime - frameResult.value.timestamp;
90
- this.drawFrame(frameResult.value);
31
+ this.scheduleAudioNode = (node, mediaTimestamp) => {
32
+ const currentTime = this.getPlaybackTime();
33
+ const delayWithoutPlaybackRate = mediaTimestamp - currentTime;
34
+ const delay = delayWithoutPlaybackRate / (this.playbackRate * this.globalPlaybackRate);
35
+ if (delay >= 0) {
36
+ node.start(this.sharedAudioContext.currentTime + delay);
91
37
  }
92
38
  else {
93
- // media ended
39
+ node.start(this.sharedAudioContext.currentTime, -delay);
94
40
  }
95
41
  };
96
- this.bufferingStartedAtMs = null;
97
- this.minBufferingTimeoutMs = 500;
98
- this.runAudioIterator = async (startFromSecond, audioIterator) => {
99
- if (!this.hasAudio())
42
+ this.drawDebugOverlay = () => {
43
+ if (!this.debugOverlay)
100
44
  return;
101
- try {
102
- let totalBufferDuration = 0;
103
- let isFirstBuffer = true;
104
- audioIterator.setAudioIteratorStarted(true);
105
- while (true) {
106
- if (audioIterator.isDestroyed()) {
107
- return;
108
- }
109
- const BUFFERING_TIMEOUT_MS = 50;
110
- let result;
111
- try {
112
- result = await withTimeout(audioIterator.getNext(), BUFFERING_TIMEOUT_MS, 'Iterator timeout');
113
- }
114
- catch (error) {
115
- if (error instanceof TimeoutError && !this.mediaEnded) {
116
- this.setBufferingState(true);
117
- }
118
- await sleep(10);
119
- continue;
120
- }
121
- // media has ended
122
- if (result.done || !result.value) {
123
- this.mediaEnded = true;
124
- break;
125
- }
126
- const { buffer, timestamp, duration } = result.value;
127
- totalBufferDuration += duration;
128
- audioIterator.setAudioBufferHealth(Math.max(0, totalBufferDuration / this.playbackRate));
129
- this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
130
- if (this.playing) {
131
- if (isFirstBuffer) {
132
- this.audioSyncAnchor =
133
- this.sharedAudioContext.currentTime - timestamp;
134
- isFirstBuffer = false;
135
- }
136
- // if timestamp is less than timeToSeek, skip
137
- // context: for some reason, mediabunny returns buffer at 9.984s, when requested at 10s
138
- if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
139
- continue;
140
- }
141
- this.scheduleAudioChunk(buffer, timestamp);
142
- }
143
- const playbackTime = this.getPlaybackTime();
144
- if (playbackTime === null) {
145
- continue;
146
- }
147
- if (timestamp - playbackTime >= 1) {
148
- await new Promise((resolve) => {
149
- const check = () => {
150
- const currentPlaybackTime = this.getPlaybackTime();
151
- if (currentPlaybackTime !== null &&
152
- timestamp - currentPlaybackTime < 1) {
153
- resolve();
154
- }
155
- else {
156
- requestAnimationFrame(check);
157
- }
158
- };
159
- check();
160
- });
161
- }
162
- }
163
- }
164
- catch (error) {
165
- if (this.isDisposalError()) {
166
- return;
167
- }
168
- Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to run audio iterator', error);
45
+ if (this.context && this.canvas) {
46
+ drawPreviewOverlay({
47
+ context: this.context,
48
+ audioTime: this.sharedAudioContext.currentTime,
49
+ audioContextState: this.sharedAudioContext.state,
50
+ audioSyncAnchor: this.audioSyncAnchor,
51
+ audioIteratorManager: this.audioIteratorManager,
52
+ playing: this.playing,
53
+ videoIteratorManager: this.videoIteratorManager,
54
+ });
169
55
  }
170
56
  };
171
57
  this.canvas = canvas ?? null;
@@ -173,6 +59,7 @@ export class MediaPlayer {
173
59
  this.logLevel = logLevel ?? window.remotion_logLevel;
174
60
  this.sharedAudioContext = sharedAudioContext;
175
61
  this.playbackRate = playbackRate;
62
+ this.globalPlaybackRate = globalPlaybackRate;
176
63
  this.loop = loop;
177
64
  this.trimBefore = trimBefore;
178
65
  this.trimAfter = trimAfter;
@@ -180,6 +67,13 @@ export class MediaPlayer {
180
67
  this.fps = fps;
181
68
  this.debugOverlay = debugOverlay;
182
69
  this.bufferState = bufferState;
70
+ this.isPremounting = isPremounting;
71
+ this.isPostmounting = isPostmounting;
72
+ this.nonceManager = makeNonceManager();
73
+ this.input = new Input({
74
+ source: new UrlSource(this.src),
75
+ formats: ALL_FORMATS,
76
+ });
183
77
  if (canvas) {
184
78
  const context = canvas.getContext('2d', {
185
79
  alpha: true,
@@ -194,19 +88,8 @@ export class MediaPlayer {
194
88
  this.context = null;
195
89
  }
196
90
  }
197
- isReady() {
198
- return (this.initialized &&
199
- Boolean(this.sharedAudioContext) &&
200
- !this.input?.disposed);
201
- }
202
- hasAudio() {
203
- return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
204
- }
205
- isCurrentlyBuffering() {
206
- return this.isBuffering && Boolean(this.bufferingStartedAtMs);
207
- }
208
91
  isDisposalError() {
209
- return this.input?.disposed === true;
92
+ return this.input.disposed === true;
210
93
  }
211
94
  initialize(startTimeUnresolved) {
212
95
  const promise = this._initialize(startTimeUnresolved);
@@ -215,17 +98,11 @@ export class MediaPlayer {
215
98
  }
216
99
  async _initialize(startTimeUnresolved) {
217
100
  try {
218
- const urlSource = new UrlSource(this.src);
219
- const input = new Input({
220
- source: urlSource,
221
- formats: ALL_FORMATS,
222
- });
223
- this.input = input;
224
- if (input.disposed) {
101
+ if (this.input.disposed) {
225
102
  return { type: 'disposed' };
226
103
  }
227
104
  try {
228
- await input.getFormat();
105
+ await this.input.getFormat();
229
106
  }
230
107
  catch (error) {
231
108
  if (this.isDisposalError()) {
@@ -239,10 +116,13 @@ export class MediaPlayer {
239
116
  return { type: 'unknown-container-format' };
240
117
  }
241
118
  const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
242
- input.computeDuration(),
243
- input.getPrimaryVideoTrack(),
244
- input.getAudioTracks(),
119
+ this.input.computeDuration(),
120
+ this.input.getPrimaryVideoTrack(),
121
+ this.input.getAudioTracks(),
245
122
  ]);
123
+ if (this.input.disposed) {
124
+ return { type: 'disposed' };
125
+ }
246
126
  this.totalDuration = durationInSeconds;
247
127
  const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
248
128
  if (!videoTrack && !audioTrack) {
@@ -253,18 +133,18 @@ export class MediaPlayer {
253
133
  if (!canDecode) {
254
134
  return { type: 'cannot-decode' };
255
135
  }
256
- this.canvasSink = new CanvasSink(videoTrack, {
257
- poolSize: 2,
258
- fit: 'contain',
259
- alpha: true,
136
+ if (this.input.disposed) {
137
+ return { type: 'disposed' };
138
+ }
139
+ this.videoIteratorManager = videoIteratorManager({
140
+ videoTrack,
141
+ delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
142
+ context: this.context,
143
+ canvas: this.canvas,
144
+ getOnVideoFrameCallback: () => this.onVideoFrameCallback,
145
+ logLevel: this.logLevel,
146
+ drawDebugOverlay: this.drawDebugOverlay,
260
147
  });
261
- this.canvas.width = videoTrack.displayWidth;
262
- this.canvas.height = videoTrack.displayHeight;
263
- }
264
- if (audioTrack && this.sharedAudioContext) {
265
- this.audioSink = new AudioBufferSink(audioTrack);
266
- this.gainNode = this.sharedAudioContext.createGain();
267
- this.gainNode.connect(this.sharedAudioContext.destination);
268
148
  }
269
149
  const startTime = getTimeInSeconds({
270
150
  unloopedTimeInSeconds: startTimeUnresolved,
@@ -278,16 +158,29 @@ export class MediaPlayer {
278
158
  src: this.src,
279
159
  });
280
160
  if (startTime === null) {
281
- this.clearCanvas();
282
- return { type: 'success', durationInSeconds: this.totalDuration };
161
+ throw new Error(`should have asserted that the time is not null`);
283
162
  }
284
- if (this.sharedAudioContext) {
285
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
163
+ this.setPlaybackTime(startTime, this.playbackRate * this.globalPlaybackRate);
164
+ if (audioTrack) {
165
+ this.audioIteratorManager = audioIteratorManager({
166
+ audioTrack,
167
+ delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
168
+ sharedAudioContext: this.sharedAudioContext,
169
+ });
286
170
  }
287
- this.initialized = true;
171
+ const nonce = this.nonceManager.createAsyncOperation();
288
172
  try {
289
- this.startAudioIterator(startTime);
290
- await this.startVideoIterator(startTime, this.currentSeekNonce);
173
+ // intentionally not awaited
174
+ if (this.audioIteratorManager) {
175
+ this.audioIteratorManager.startAudioIterator({
176
+ nonce,
177
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
178
+ startFromSecond: startTime,
179
+ getIsPlaying: () => this.playing,
180
+ scheduleAudioNode: this.scheduleAudioNode,
181
+ });
182
+ }
183
+ await this.videoIteratorManager?.startVideoIterator(startTime, nonce);
291
184
  }
292
185
  catch (error) {
293
186
  if (this.isDisposalError()) {
@@ -307,24 +200,7 @@ export class MediaPlayer {
307
200
  throw error;
308
201
  }
309
202
  }
310
- clearCanvas() {
311
- if (this.context && this.canvas) {
312
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
313
- }
314
- }
315
203
  async seekTo(time) {
316
- this.currentSeekNonce++;
317
- const nonce = this.currentSeekNonce;
318
- await this.seekPromiseChain;
319
- this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
320
- await this.seekPromiseChain;
321
- }
322
- async seekToDoNotCallDirectly(time, nonce) {
323
- if (nonce !== this.currentSeekNonce) {
324
- return;
325
- }
326
- if (!this.isReady())
327
- return;
328
204
  const newTime = getTimeInSeconds({
329
205
  unloopedTimeInSeconds: time,
330
206
  playbackRate: this.playbackRate,
@@ -337,75 +213,129 @@ export class MediaPlayer {
337
213
  src: this.src,
338
214
  });
339
215
  if (newTime === null) {
340
- // invalidate in-flight video operations
341
- this.videoFrameIterator?.destroy();
342
- this.videoFrameIterator = null;
343
- this.clearCanvas();
344
- this.audioBufferIterator?.destroy();
345
- this.audioBufferIterator = null;
216
+ throw new Error(`should have asserted that the time is not null`);
217
+ }
218
+ const nonce = this.nonceManager.createAsyncOperation();
219
+ await this.seekPromiseChain;
220
+ this.seekPromiseChain = this.seekToDoNotCallDirectly(newTime, nonce);
221
+ await this.seekPromiseChain;
222
+ }
223
+ async seekToDoNotCallDirectly(newTime, nonce) {
224
+ if (nonce.isStale()) {
346
225
  return;
347
226
  }
348
227
  const currentPlaybackTime = this.getPlaybackTime();
349
228
  if (currentPlaybackTime === newTime) {
350
229
  return;
351
230
  }
352
- const satisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
353
- if (satisfyResult?.type === 'satisfied') {
354
- this.drawFrame(satisfyResult.frame);
355
- return;
356
- }
357
- if (this.currentSeekNonce !== nonce) {
358
- return;
231
+ const newAudioSyncAnchor = this.sharedAudioContext.currentTime -
232
+ newTime / (this.playbackRate * this.globalPlaybackRate);
233
+ const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
234
+ if (diff > 0.04) {
235
+ this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
359
236
  }
360
- this.mediaEnded = false;
361
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
362
- this.startAudioIterator(newTime);
363
- this.startVideoIterator(newTime, nonce);
237
+ await this.videoIteratorManager?.seek({
238
+ newTime,
239
+ nonce,
240
+ });
241
+ await this.audioIteratorManager?.seek({
242
+ newTime,
243
+ nonce,
244
+ fps: this.fps,
245
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
246
+ getIsPlaying: () => this.playing,
247
+ scheduleAudioNode: this.scheduleAudioNode,
248
+ });
364
249
  }
365
- async play() {
366
- if (!this.isReady())
367
- return;
368
- if (!this.playing) {
369
- if (this.sharedAudioContext.state === 'suspended') {
370
- await this.sharedAudioContext.resume();
371
- }
372
- this.playing = true;
250
+ async play(time) {
251
+ const newTime = getTimeInSeconds({
252
+ unloopedTimeInSeconds: time,
253
+ playbackRate: this.playbackRate,
254
+ loop: this.loop,
255
+ trimBefore: this.trimBefore,
256
+ trimAfter: this.trimAfter,
257
+ mediaDurationInSeconds: this.totalDuration ?? null,
258
+ fps: this.fps,
259
+ ifNoMediaDuration: 'infinity',
260
+ src: this.src,
261
+ });
262
+ if (newTime === null) {
263
+ throw new Error(`should have asserted that the time is not null`);
373
264
  }
265
+ this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
266
+ this.playing = true;
267
+ if (this.audioIteratorManager) {
268
+ this.audioIteratorManager.resumeScheduledAudioChunks({
269
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
270
+ scheduleAudioNode: this.scheduleAudioNode,
271
+ });
272
+ }
273
+ if (this.sharedAudioContext.state === 'suspended') {
274
+ await this.sharedAudioContext.resume();
275
+ }
276
+ this.drawDebugOverlay();
374
277
  }
375
278
  pause() {
376
279
  this.playing = false;
377
- this.audioBufferIterator?.cleanupAudioQueue();
280
+ this.audioIteratorManager?.pausePlayback();
281
+ this.drawDebugOverlay();
378
282
  }
379
283
  setMuted(muted) {
380
- this.muted = muted;
381
- if (this.gainNode) {
382
- this.gainNode.gain.value = muted ? 0 : this.currentVolume;
383
- }
284
+ this.audioIteratorManager?.setMuted(muted);
384
285
  }
385
286
  setVolume(volume) {
386
- if (!this.gainNode) {
287
+ if (!this.audioIteratorManager) {
387
288
  return;
388
289
  }
389
- const appliedVolume = Math.max(0, volume);
390
- this.currentVolume = appliedVolume;
391
- if (!this.muted) {
392
- this.gainNode.gain.value = appliedVolume;
393
- }
290
+ this.audioIteratorManager.setVolume(volume);
291
+ }
292
+ setTrimBefore(trimBefore) {
293
+ this.trimBefore = trimBefore;
294
+ }
295
+ setTrimAfter(trimAfter) {
296
+ this.trimAfter = trimAfter;
394
297
  }
395
298
  setDebugOverlay(debugOverlay) {
396
299
  this.debugOverlay = debugOverlay;
397
300
  }
301
+ updateAfterPlaybackRateChange() {
302
+ if (!this.audioIteratorManager) {
303
+ return;
304
+ }
305
+ this.setPlaybackTime(this.getPlaybackTime(), this.playbackRate * this.globalPlaybackRate);
306
+ const iterator = this.audioIteratorManager.getAudioBufferIterator();
307
+ if (!iterator) {
308
+ return;
309
+ }
310
+ iterator.moveQueuedChunksToPauseQueue();
311
+ if (this.playing) {
312
+ this.audioIteratorManager.resumeScheduledAudioChunks({
313
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
314
+ scheduleAudioNode: this.scheduleAudioNode,
315
+ });
316
+ }
317
+ }
398
318
  setPlaybackRate(rate) {
399
319
  this.playbackRate = rate;
320
+ this.updateAfterPlaybackRateChange();
321
+ }
322
+ setGlobalPlaybackRate(rate) {
323
+ this.globalPlaybackRate = rate;
324
+ this.updateAfterPlaybackRateChange();
400
325
  }
401
326
  setFps(fps) {
402
327
  this.fps = fps;
403
328
  }
329
+ setIsPremounting(isPremounting) {
330
+ this.isPremounting = isPremounting;
331
+ }
332
+ setIsPostmounting(isPostmounting) {
333
+ this.isPostmounting = isPostmounting;
334
+ }
404
335
  setLoop(loop) {
405
336
  this.loop = loop;
406
337
  }
407
338
  async dispose() {
408
- this.initialized = false;
409
339
  if (this.initializationPromise) {
410
340
  try {
411
341
  // wait for the init to finished
@@ -417,80 +347,24 @@ export class MediaPlayer {
417
347
  // Ignore initialization errors during disposal
418
348
  }
419
349
  }
420
- this.input?.dispose();
421
- this.videoFrameIterator?.destroy();
422
- this.videoFrameIterator = null;
423
- this.audioBufferIterator?.destroy();
424
- this.audioBufferIterator = null;
350
+ // Mark all async operations as stale
351
+ this.nonceManager.createAsyncOperation();
352
+ this.videoIteratorManager?.destroy();
353
+ this.audioIteratorManager?.destroy();
354
+ this.input.dispose();
425
355
  }
426
356
  getPlaybackTime() {
427
- return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
428
- }
429
- scheduleAudioChunk(buffer, mediaTimestamp) {
430
- const targetTime = mediaTimestamp + this.audioSyncAnchor;
431
- const delay = targetTime - this.sharedAudioContext.currentTime;
432
- const node = this.sharedAudioContext.createBufferSource();
433
- node.buffer = buffer;
434
- node.playbackRate.value = this.playbackRate;
435
- node.connect(this.gainNode);
436
- if (delay >= 0) {
437
- node.start(targetTime);
438
- }
439
- else {
440
- node.start(this.sharedAudioContext.currentTime, -delay);
441
- }
442
- this.audioBufferIterator?.addQueuedAudioNode(node);
443
- node.onended = () => this.audioBufferIterator?.removeQueuedAudioNode(node);
357
+ return calculatePlaybackTime({
358
+ audioSyncAnchor: this.audioSyncAnchor,
359
+ currentTime: this.sharedAudioContext.currentTime,
360
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
361
+ });
444
362
  }
445
- onBufferingChange(callback) {
446
- this.onBufferingChangeCallback = callback;
447
- return () => {
448
- if (this.onBufferingChangeCallback === callback) {
449
- this.onBufferingChangeCallback = undefined;
450
- }
451
- };
363
+ setPlaybackTime(time, playbackRate) {
364
+ this.audioSyncAnchor =
365
+ this.sharedAudioContext.currentTime - time / playbackRate;
452
366
  }
453
- onVideoFrame(callback) {
367
+ setVideoFrameCallback(callback) {
454
368
  this.onVideoFrameCallback = callback;
455
- if (this.initialized && callback && this.canvas) {
456
- callback(this.canvas);
457
- }
458
- return () => {
459
- if (this.onVideoFrameCallback === callback) {
460
- this.onVideoFrameCallback = undefined;
461
- }
462
- };
463
- }
464
- drawDebugOverlay() {
465
- if (!this.debugOverlay)
466
- return;
467
- if (this.context && this.canvas) {
468
- drawPreviewOverlay(this.context, this.debugStats, this.sharedAudioContext.state, this.sharedAudioContext.currentTime);
469
- }
470
- }
471
- setBufferingState(isBuffering) {
472
- if (this.isBuffering !== isBuffering) {
473
- this.isBuffering = isBuffering;
474
- if (isBuffering) {
475
- this.bufferingStartedAtMs = performance.now();
476
- this.onBufferingChangeCallback?.(true);
477
- }
478
- else {
479
- this.bufferingStartedAtMs = null;
480
- this.onBufferingChangeCallback?.(false);
481
- }
482
- }
483
- }
484
- maybeResumeFromBuffering(currentBufferDuration) {
485
- if (!this.isCurrentlyBuffering())
486
- return;
487
- const now = performance.now();
488
- const bufferingDuration = now - this.bufferingStartedAtMs;
489
- const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
490
- const bufferHealthy = currentBufferDuration >= HEALTHY_BUFFER_THRESHOLD_SECONDS;
491
- if (minTimeElapsed && bufferHealthy) {
492
- Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
493
- this.setBufferingState(false);
494
- }
495
369
  }
496
370
  }
@@ -0,0 +1,9 @@
1
+ export type Nonce = {
2
+ isStale: () => boolean;
3
+ };
4
+ export type NonceManager = ReturnType<typeof makeNonceManager>;
5
+ export declare const makeNonceManager: () => {
6
+ createAsyncOperation: () => {
7
+ isStale: () => boolean;
8
+ };
9
+ };
@@ -0,0 +1,13 @@
1
+ export const makeNonceManager = () => {
2
+ let nonce = 0;
3
+ const createAsyncOperation = () => {
4
+ nonce++;
5
+ const currentNonce = nonce;
6
+ return {
7
+ isStale: () => nonce !== currentNonce,
8
+ };
9
+ };
10
+ return {
11
+ createAsyncOperation,
12
+ };
13
+ };