@remotion/media 4.0.451 → 4.0.453

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,15 +37,15 @@ var __callDispose = (stack, error, hasError) => {
37
37
  };
38
38
 
39
39
  // src/audio/audio.tsx
40
- import { Internals as Internals19, Sequence, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
40
+ import { Internals as Internals17, Sequence, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
41
41
 
42
42
  // src/audio/audio-for-preview.tsx
43
43
  import { useContext as useContext3, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from "react";
44
44
  import {
45
- Internals as Internals10,
45
+ Internals as Internals8,
46
46
  Audio as RemotionAudio,
47
47
  useBufferState,
48
- useCurrentFrame as useCurrentFrame2,
48
+ useCurrentFrame,
49
49
  useVideoConfig as useVideoConfig2
50
50
  } from "remotion";
51
51
 
@@ -102,219 +102,123 @@ var calculateEndTime = ({
102
102
 
103
103
  // src/media-player.ts
104
104
  import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
105
- import { Internals as Internals6 } from "remotion";
105
+ import { Internals as Internals4 } from "remotion";
106
106
 
107
107
  // src/audio-iterator-manager.ts
108
108
  import { AudioBufferSink, InputDisposedError } from "mediabunny";
109
- import { Internals as Internals4 } from "remotion";
110
-
111
- // src/audio/audio-preview-iterator.ts
112
- import { Internals as Internals3 } from "remotion";
113
-
114
- // src/helpers/round-to-4-digits.ts
115
- var roundTo4Digits = (timestamp) => {
116
- return Math.round(timestamp * 1000) / 1000;
117
- };
118
-
119
- // src/set-global-time-anchor.ts
120
109
  import { Internals as Internals2 } from "remotion";
121
- var ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT = 0.1;
122
- var setGlobalTimeAnchor = ({
123
- audioContext,
124
- audioSyncAnchor,
125
- absoluteTimeInSeconds,
126
- globalPlaybackRate,
127
- debugAudioScheduling,
128
- logLevel
129
- }) => {
130
- const newAnchor = audioContext.currentTime - absoluteTimeInSeconds / globalPlaybackRate;
131
- const shift = (newAnchor - audioSyncAnchor.value) * globalPlaybackRate;
132
- if (Math.abs(shift) < ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT) {
133
- return;
110
+
111
+ // src/make-iterator-with-priming.ts
112
+ var AUDIO_PRIMING_SECONDS = 0.5;
113
+ async function* makeIteratorWithPrimingInner(audioSink, timeToSeek, maximumTimestamp) {
114
+ const primingStart = Math.max(0, timeToSeek - AUDIO_PRIMING_SECONDS);
115
+ const iterator = audioSink.buffers(primingStart, maximumTimestamp);
116
+ for await (const buffer of iterator) {
117
+ if (buffer.timestamp + buffer.duration <= timeToSeek) {
118
+ continue;
119
+ }
120
+ yield {
121
+ buffer,
122
+ timestamp: buffer.timestamp
123
+ };
134
124
  }
135
- if (debugAudioScheduling) {
136
- Internals2.Log.info({ logLevel, tag: "audio-scheduling" }, "Anchor changed from %s to %s with shift %s", audioSyncAnchor.value, newAnchor, shift);
125
+ }
126
+ async function* makeLoopingIterator({
127
+ audioSink,
128
+ segmentStartInSeconds,
129
+ segmentEndInSeconds,
130
+ playbackRate,
131
+ sequenceDurationInSeconds
132
+ }) {
133
+ const duration = segmentEndInSeconds - segmentStartInSeconds;
134
+ let iteration = 0;
135
+ let broken = false;
136
+ while (true) {
137
+ for await (const item of makeIteratorWithPrimingInner(audioSink, segmentStartInSeconds, segmentEndInSeconds)) {
138
+ const timestamp = item.timestamp + iteration * duration;
139
+ const endTimestamp = duration * iteration + (item.timestamp - segmentStartInSeconds + item.buffer.duration);
140
+ if (endTimestamp > sequenceDurationInSeconds * playbackRate) {
141
+ broken = true;
142
+ break;
143
+ }
144
+ yield {
145
+ buffer: item.buffer,
146
+ timestamp
147
+ };
148
+ }
149
+ if (broken) {
150
+ break;
151
+ }
152
+ iteration++;
137
153
  }
138
- audioSyncAnchor.value = newAnchor;
154
+ }
155
+ var makeIteratorWithPriming = ({
156
+ audioSink,
157
+ timeToSeek,
158
+ maximumTimestamp,
159
+ loop,
160
+ playbackRate,
161
+ sequenceDurationInSeconds
162
+ }) => {
163
+ if (loop) {
164
+ return makeLoopingIterator({
165
+ audioSink,
166
+ segmentStartInSeconds: timeToSeek,
167
+ segmentEndInSeconds: maximumTimestamp,
168
+ playbackRate,
169
+ sequenceDurationInSeconds
170
+ });
171
+ }
172
+ return makeIteratorWithPrimingInner(audioSink, timeToSeek, maximumTimestamp);
139
173
  };
140
174
 
141
175
  // src/audio/audio-preview-iterator.ts
176
+ var ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT = 0.1;
142
177
  var makeAudioIterator = ({
143
178
  startFromSecond,
144
179
  maximumTimestamp,
145
- cache,
146
- debugAudioScheduling
180
+ audioSink,
181
+ loop,
182
+ playbackRate,
183
+ sequenceDurationInSeconds,
184
+ unscheduleAudioNode
147
185
  }) => {
148
186
  let destroyed = false;
149
- const iterator = cache.makeIteratorOrUsePrewarmed(startFromSecond, maximumTimestamp);
187
+ const iterator = makeIteratorWithPriming({
188
+ audioSink,
189
+ timeToSeek: startFromSecond,
190
+ maximumTimestamp,
191
+ loop,
192
+ playbackRate,
193
+ sequenceDurationInSeconds
194
+ });
150
195
  const queuedAudioNodes = [];
151
- const audioChunksForAfterResuming = [];
152
196
  let mostRecentTimestamp = -Infinity;
153
- let pendingNext = null;
154
- const cleanupAudioQueue = (audioContext) => {
197
+ const cleanupAudioQueue = () => {
155
198
  for (const node of queuedAudioNodes) {
199
+ unscheduleAudioNode(node.node);
156
200
  try {
157
- const isAlreadyPlaying = node.scheduledTime - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT < audioContext.audioContext.currentTime;
158
- const wasScheduledForThisAnchor = node.scheduledAtAnchor === audioContext.audioSyncAnchor.value;
159
- if (isAlreadyPlaying && wasScheduledForThisAnchor) {
160
- continue;
161
- }
162
- if (debugAudioScheduling) {
163
- const currentlyHearing = audioContext.audioContext.getOutputTimestamp().contextTime;
164
- const nodeEndTime = node.scheduledTime + node.buffer.duration / node.playbackRate;
165
- Internals3.Log.info({ logLevel: "trace", tag: "audio-scheduling" }, `Stopping node ${node.timestamp.toFixed(3)}, currently hearing = ${currentlyHearing.toFixed(3)} currentTime = ${audioContext.audioContext.currentTime.toFixed(3)} nodeEndTime = ${nodeEndTime.toFixed(3)} scheduledTime = ${node.scheduledTime.toFixed(3)}`);
166
- }
167
201
  node.node.stop();
168
202
  } catch {}
169
203
  }
170
204
  queuedAudioNodes.length = 0;
171
205
  };
172
- const getNextOrNullIfNotAvailable = async () => {
173
- let next = pendingNext;
174
- if (!next) {
175
- next = iterator.next();
176
- }
177
- pendingNext = null;
178
- const result = await Promise.race([
179
- next,
180
- new Promise((resolve) => {
181
- Promise.resolve().then(() => resolve());
182
- })
183
- ]);
184
- if (!result) {
185
- pendingNext = next;
186
- return {
187
- type: "need-to-wait-for-it",
188
- waitPromise: async () => {
189
- const res = await next;
190
- return res.value;
191
- }
192
- };
206
+ const getNextFn = async () => {
207
+ const next = await iterator.next();
208
+ if (next.value) {
209
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.buffer.duration);
193
210
  }
194
- if (result.value) {
195
- mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);
196
- pendingNext = iterator.next();
197
- return {
198
- type: "got-buffer",
199
- buffer: result.value
200
- };
201
- }
202
- return {
203
- type: "got-end",
204
- mostRecentTimestamp
205
- };
206
- };
207
- const tryToSatisfySeek = async (time, onBufferScheduled) => {
208
- if (time < startFromSecond) {
209
- return {
210
- type: "not-satisfied",
211
- reason: `time requested is before the start of the iterator`
212
- };
213
- }
214
- while (true) {
215
- const buffer = await getNextOrNullIfNotAvailable();
216
- if (buffer.type === "need-to-wait-for-it") {
217
- return {
218
- type: "not-satisfied",
219
- reason: "iterator did not have buffer ready"
220
- };
221
- }
222
- if (buffer.type === "got-end") {
223
- if (time >= mostRecentTimestamp) {
224
- return {
225
- type: "ended"
226
- };
227
- }
228
- return {
229
- type: "not-satisfied",
230
- reason: `iterator ended before the requested time`
231
- };
232
- }
233
- if (buffer.type === "got-buffer") {
234
- const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
235
- const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
236
- const timestamp = roundTo4Digits(time);
237
- if (timestamp < bufferTimestamp) {
238
- return {
239
- type: "not-satisfied",
240
- reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${timestamp}`
241
- };
242
- }
243
- if (bufferTimestamp <= timestamp && bufferEndTimestamp >= timestamp) {
244
- onBufferScheduled(buffer.buffer);
245
- return {
246
- type: "satisfied"
247
- };
248
- }
249
- onBufferScheduled(buffer.buffer);
250
- continue;
251
- }
252
- throw new Error("Unreachable");
253
- }
254
- };
255
- const bufferAsFarAsPossible = async (onBufferScheduled, maxTimestamp) => {
256
- while (true) {
257
- if (mostRecentTimestamp >= maxTimestamp) {
258
- return { type: "max-reached" };
259
- }
260
- const buffer = await getNextOrNullIfNotAvailable();
261
- if (buffer.type === "need-to-wait-for-it") {
262
- return { type: "waiting" };
263
- }
264
- if (buffer.type === "got-end") {
265
- return { type: "ended" };
266
- }
267
- if (buffer.type === "got-buffer") {
268
- onBufferScheduled(buffer.buffer);
269
- continue;
270
- }
271
- throw new Error("Unreachable");
272
- }
273
- };
274
- const removeAndReturnAllQueuedAudioNodes = () => {
275
- const nodes = queuedAudioNodes.slice();
276
- for (const node of nodes) {
277
- try {
278
- node.node.stop();
279
- } catch {}
280
- }
281
- queuedAudioNodes.length = 0;
282
- return nodes;
283
- };
284
- const addChunkForAfterResuming = (buffer, timestamp) => {
285
- audioChunksForAfterResuming.push({
286
- buffer,
287
- timestamp
288
- });
289
- };
290
- const moveQueuedChunksToPauseQueue = () => {
291
- const toQueue = removeAndReturnAllQueuedAudioNodes();
292
- for (const chunk of toQueue) {
293
- addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
294
- }
295
- if (debugAudioScheduling && toQueue.length > 0) {
296
- Internals3.Log.trace({ logLevel: "trace", tag: "audio-scheduling" }, `Moved ${toQueue.length} ${toQueue.length === 1 ? "chunk" : "chunks"} to pause queue (${toQueue[0].timestamp.toFixed(3)}-${toQueue[toQueue.length - 1].timestamp + toQueue[toQueue.length - 1].buffer.duration.toFixed(3)})`);
297
- }
298
- };
299
- const getNumberOfChunksAfterResuming = () => {
300
- return audioChunksForAfterResuming.length;
211
+ return next;
301
212
  };
302
213
  return {
303
- destroy: (audioContext) => {
304
- cleanupAudioQueue(audioContext);
214
+ destroy: () => {
215
+ cleanupAudioQueue();
305
216
  destroyed = true;
306
217
  iterator.return().catch(() => {
307
218
  return;
308
219
  });
309
- audioChunksForAfterResuming.length = 0;
310
- },
311
- getNext: async () => {
312
- const next = await iterator.next();
313
- if (next.value) {
314
- mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);
315
- }
316
- return next;
317
220
  },
221
+ getNextFn,
318
222
  isDestroyed: () => {
319
223
  return destroyed;
320
224
  },
@@ -323,7 +227,6 @@ var makeAudioIterator = ({
323
227
  timestamp,
324
228
  buffer,
325
229
  scheduledTime,
326
- playbackRate,
327
230
  scheduledAtAnchor
328
231
  }) => {
329
232
  queuedAudioNodes.push({
@@ -335,16 +238,8 @@ var makeAudioIterator = ({
335
238
  scheduledAtAnchor
336
239
  });
337
240
  },
338
- removeQueuedAudioNode: (node) => {
339
- const index = queuedAudioNodes.findIndex((n) => n.node === node);
340
- if (index !== -1) {
341
- queuedAudioNodes.splice(index, 1);
342
- }
343
- },
344
- getAndClearAudioChunksForAfterResuming: () => {
345
- const chunks = audioChunksForAfterResuming.slice();
346
- audioChunksForAfterResuming.length = 0;
347
- return chunks;
241
+ guessNextTimestamp: () => {
242
+ return !Number.isFinite(mostRecentTimestamp) ? startFromSecond : mostRecentTimestamp;
348
243
  },
349
244
  getQueuedPeriod: () => {
350
245
  let until = -Infinity;
@@ -353,10 +248,6 @@ var makeAudioIterator = ({
353
248
  until = Math.max(until, node.timestamp + node.buffer.duration);
354
249
  from = Math.min(from, node.timestamp);
355
250
  }
356
- for (const chunk of audioChunksForAfterResuming) {
357
- until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
358
- from = Math.min(from, chunk.timestamp);
359
- }
360
251
  if (!Number.isFinite(from) || !Number.isFinite(until)) {
361
252
  return null;
362
253
  }
@@ -364,12 +255,7 @@ var makeAudioIterator = ({
364
255
  from,
365
256
  until
366
257
  };
367
- },
368
- tryToSatisfySeek,
369
- bufferAsFarAsPossible,
370
- addChunkForAfterResuming,
371
- moveQueuedChunksToPauseQueue,
372
- getNumberOfChunksAfterResuming
258
+ }
373
259
  };
374
260
  };
375
261
  var isAlreadyQueued = (time, queuedPeriod) => {
@@ -379,224 +265,183 @@ var isAlreadyQueued = (time, queuedPeriod) => {
379
265
  return time >= queuedPeriod.from && time < queuedPeriod.until;
380
266
  };
381
267
 
382
- // src/make-iterator-with-priming.ts
383
- var AUDIO_PRIMING_SECONDS = 0.5;
384
- var PREDECODE_AHEAD_SECONDS = 2;
385
- function makePredecodingIterator(inner) {
386
- const buffer = [];
387
- let consumerEndTime = 0;
388
- let innerDone = false;
389
- let returned = false;
390
- let fetching = false;
391
- let waiter = null;
392
- const prefetch = () => {
393
- if (fetching || returned || innerDone) {
394
- return;
395
- }
396
- const lastBuffered = buffer.length > 0 ? buffer[buffer.length - 1] : null;
397
- const bufferedEndTime = lastBuffered ? lastBuffered.timestamp + lastBuffered.duration : consumerEndTime;
398
- if (bufferedEndTime >= consumerEndTime + PREDECODE_AHEAD_SECONDS) {
399
- return;
400
- }
401
- fetching = true;
402
- inner.next().then((result) => {
403
- fetching = false;
404
- if (returned) {
405
- return;
406
- }
407
- if (result.done) {
408
- innerDone = true;
409
- if (waiter) {
410
- const w = waiter;
411
- waiter = null;
412
- w({ value: undefined, done: true });
413
- }
414
- return;
415
- }
416
- if (waiter) {
417
- const w = waiter;
418
- waiter = null;
419
- const buf = result.value;
420
- consumerEndTime = buf.timestamp + buf.duration;
421
- w({ value: buf, done: false });
422
- prefetch();
423
- return;
424
- }
425
- buffer.push(result.value);
426
- prefetch();
427
- }, () => {
428
- fetching = false;
429
- innerDone = true;
430
- if (waiter) {
431
- const w = waiter;
432
- waiter = null;
433
- w({ value: undefined, done: true });
434
- }
435
- });
436
- };
437
- prefetch();
438
- const _return = () => {
439
- returned = true;
440
- buffer.length = 0;
441
- if (waiter) {
442
- const w = waiter;
443
- waiter = null;
444
- w({ value: undefined, done: true });
445
- }
446
- inner.return(undefined);
447
- return Promise.resolve({ value: undefined, done: true });
448
- };
449
- const iterator = {
450
- next() {
451
- if (buffer.length > 0) {
452
- const buf = buffer.shift();
453
- consumerEndTime = buf.timestamp + buf.duration;
454
- prefetch();
455
- return Promise.resolve({ value: buf, done: false });
456
- }
457
- if (innerDone) {
458
- return Promise.resolve({
459
- value: undefined,
460
- done: true
461
- });
462
- }
463
- return new Promise((resolve) => {
464
- waiter = resolve;
465
- prefetch();
466
- });
467
- },
468
- return: _return,
469
- throw(e) {
470
- returned = true;
471
- buffer.length = 0;
472
- return inner.throw(e);
473
- },
474
- [Symbol.asyncIterator]() {
475
- return iterator;
476
- }
477
- };
478
- return iterator;
479
- }
480
- async function* makeIteratorWithPrimingInner(audioSink, timeToSeek, maximumTimestamp) {
481
- const primingStart = Math.max(0, timeToSeek - AUDIO_PRIMING_SECONDS);
482
- const iterator = audioSink.buffers(primingStart, maximumTimestamp);
483
- for await (const buffer of iterator) {
484
- if (buffer.timestamp + buffer.duration <= timeToSeek) {
485
- continue;
486
- }
487
- yield buffer;
488
- }
489
- }
490
- var makeIteratorWithPriming = ({
491
- audioSink,
492
- timeToSeek,
493
- maximumTimestamp
268
+ // src/audio/get-scheduled-time.ts
269
+ var getScheduledTime = ({
270
+ mediaTimestamp,
271
+ targetTime,
272
+ currentTime,
273
+ sequenceStartTime
494
274
  }) => {
495
- return makePredecodingIterator(makeIteratorWithPrimingInner(audioSink, timeToSeek, maximumTimestamp));
275
+ const needsTrimStart = mediaTimestamp < sequenceStartTime;
276
+ const offsetBecauseOfTrim = needsTrimStart ? sequenceStartTime - mediaTimestamp : 0;
277
+ const offsetBecauseOfTooLate = targetTime < 0 ? -targetTime : 0;
278
+ const offset = offsetBecauseOfTrim + offsetBecauseOfTooLate;
279
+ const scheduledTime = targetTime + currentTime + offset;
280
+ return scheduledTime;
496
281
  };
497
-
498
- // src/prewarm-iterator-for-looping.ts
499
- var makePrewarmedVideoIteratorCache = (videoSink) => {
500
- const prewarmedVideoIterators = new Map;
501
- const prewarmIteratorForLooping = ({ timeToSeek }) => {
502
- if (!prewarmedVideoIterators.has(timeToSeek)) {
503
- prewarmedVideoIterators.set(timeToSeek, videoSink.canvases(timeToSeek));
504
- }
505
- };
506
- const makeIteratorOrUsePrewarmed = (timeToSeek) => {
507
- const prewarmedIterator = prewarmedVideoIterators.get(timeToSeek);
508
- if (prewarmedIterator) {
509
- prewarmedVideoIterators.delete(timeToSeek);
510
- return prewarmedIterator;
511
- }
512
- const iterator = videoSink.canvases(timeToSeek);
513
- return iterator;
514
- };
515
- const destroy = () => {
516
- for (const iterator of prewarmedVideoIterators.values()) {
517
- iterator.return();
518
- }
519
- prewarmedVideoIterators.clear();
520
- };
521
- return {
522
- prewarmIteratorForLooping,
523
- makeIteratorOrUsePrewarmed,
524
- destroy
525
- };
282
+ var getDurationOfNode = ({
283
+ bufferDuration,
284
+ loopSegmentMediaEndTimestamp,
285
+ offset,
286
+ originalUnloopedMediaTimestamp
287
+ }) => {
288
+ const originalUnloopedMediaEndTime = originalUnloopedMediaTimestamp + bufferDuration;
289
+ const needsTrimEnd = originalUnloopedMediaEndTime > loopSegmentMediaEndTimestamp;
290
+ const durationMinusOffset = bufferDuration - offset;
291
+ const duration = needsTrimEnd ? durationMinusOffset - Math.max(0, originalUnloopedMediaEndTime - loopSegmentMediaEndTimestamp) : durationMinusOffset;
292
+ return duration;
526
293
  };
527
- var makeKey = (timeToSeek, maximumTimestamp) => {
528
- return `${timeToSeek}-${maximumTimestamp}`;
294
+ var getOffset = ({
295
+ mediaTimestamp,
296
+ targetTime,
297
+ sequenceStartTime
298
+ }) => {
299
+ const needsTrimStart = mediaTimestamp < sequenceStartTime;
300
+ const offsetBecauseOfTrim = needsTrimStart ? sequenceStartTime - mediaTimestamp : 0;
301
+ const offsetBecauseOfTooLate = targetTime < 0 ? -targetTime : 0;
302
+ const offset = offsetBecauseOfTrim + offsetBecauseOfTooLate;
303
+ return offset;
529
304
  };
530
- var makePrewarmedAudioIteratorCache = (audioSink) => {
531
- const prewarmedAudioIterators = new Map;
532
- const prewarmIteratorForLooping = ({
533
- timeToSeek,
534
- maximumTimestamp
535
- }) => {
536
- if (!prewarmedAudioIterators.has(makeKey(timeToSeek, maximumTimestamp))) {
537
- prewarmedAudioIterators.set(makeKey(timeToSeek, maximumTimestamp), makeIteratorWithPriming({ audioSink, timeToSeek, maximumTimestamp }));
305
+
306
+ // src/audio/sort-by-priority.ts
307
+ class StaleWaiterError extends Error {
308
+ constructor() {
309
+ super("Waiter became stale before it got its turn");
310
+ this.name = "StaleWaiterError";
311
+ }
312
+ }
313
+ var CONCURRENCY = 1;
314
+ var waiters = [];
315
+ var running = 0;
316
+ var processNext = () => {
317
+ if (running >= CONCURRENCY) {
318
+ return;
319
+ }
320
+ const staleWaiters = [];
321
+ for (let i = waiters.length - 1;i >= 0; i--) {
322
+ if (waiters[i].getPriority() === null) {
323
+ const [stale] = waiters.splice(i, 1);
324
+ staleWaiters.push(stale);
538
325
  }
539
- };
540
- const makeIteratorOrUsePrewarmed = (timeToSeek, maximumTimestamp) => {
541
- const prewarmedIterator = prewarmedAudioIterators.get(makeKey(timeToSeek, maximumTimestamp));
542
- if (prewarmedIterator) {
543
- prewarmedAudioIterators.delete(makeKey(timeToSeek, maximumTimestamp));
544
- return prewarmedIterator;
326
+ }
327
+ for (const stale of staleWaiters) {
328
+ stale.onError(new StaleWaiterError);
329
+ }
330
+ if (waiters.length === 0) {
331
+ return;
332
+ }
333
+ let bestIndex = 0;
334
+ let bestPriority = waiters[0].getPriority();
335
+ if (bestPriority === null) {
336
+ throw new Error("Stale waiter should have been removed");
337
+ }
338
+ for (let i = 1;i < waiters.length; i++) {
339
+ const priority = waiters[i].getPriority();
340
+ if (priority === null) {
341
+ throw new Error("Stale waiter should have been removed");
545
342
  }
546
- const iterator = makeIteratorWithPriming({
547
- audioSink,
548
- timeToSeek,
549
- maximumTimestamp
550
- });
551
- return iterator;
552
- };
553
- const destroy = () => {
554
- for (const iterator of prewarmedAudioIterators.values()) {
555
- iterator.return();
343
+ if (priority < bestPriority) {
344
+ bestPriority = priority;
345
+ bestIndex = i;
556
346
  }
557
- prewarmedAudioIterators.clear();
558
- };
559
- return {
560
- prewarmIteratorForLooping,
561
- makeIteratorOrUsePrewarmed,
562
- destroy
563
- };
347
+ }
348
+ if (bestPriority > 2) {
349
+ return;
350
+ }
351
+ const [next] = waiters.splice(bestIndex, 1);
352
+ running++;
353
+ next.fn().then((value) => {
354
+ running--;
355
+ next.onDone(value, processNext);
356
+ }, (err) => {
357
+ running--;
358
+ next.onError(err);
359
+ });
360
+ };
361
+ var waitForTurn = ({
362
+ getPriority,
363
+ fn,
364
+ onDone,
365
+ onError
366
+ }) => {
367
+ waiters.push({
368
+ getPriority,
369
+ fn,
370
+ onDone,
371
+ onError
372
+ });
373
+ processNext();
564
374
  };
565
375
 
566
376
  // src/audio-iterator-manager.ts
567
- var MAX_BUFFER_AHEAD_SECONDS = 2;
568
377
  var audioIteratorManager = ({
569
378
  audioTrack,
570
379
  delayPlaybackHandleIfNotPremounting,
571
380
  sharedAudioContext,
572
- getIsLooping,
573
- getEndTime,
381
+ getSequenceEndTimestamp,
382
+ getSequenceDurationInSeconds,
383
+ getMediaEndTimestamp,
574
384
  getStartTime,
575
385
  initialMuted,
576
- drawDebugOverlay
386
+ drawDebugOverlay,
387
+ initialPlaybackRate,
388
+ initialTrimBefore,
389
+ initialTrimAfter,
390
+ initialSequenceOffset,
391
+ initialSequenceDurationInFrames,
392
+ initialLoop,
393
+ initialFps
577
394
  }) => {
578
395
  let muted = initialMuted;
579
396
  let currentVolume = 1;
397
+ let currentSeek = {
398
+ time: -1,
399
+ playbackRate: initialPlaybackRate,
400
+ trimBefore: initialTrimBefore,
401
+ trimAfter: initialTrimAfter,
402
+ sequenceOffset: initialSequenceOffset,
403
+ sequenceDurationInFrames: initialSequenceDurationInFrames,
404
+ loop: initialLoop,
405
+ fps: initialFps
406
+ };
580
407
  const gainNode = sharedAudioContext.audioContext.createGain();
581
- gainNode.connect(sharedAudioContext.audioContext.destination);
408
+ gainNode.connect(sharedAudioContext.gainNode);
582
409
  const audioSink = new AudioBufferSink(audioTrack);
583
- const prewarmedAudioIteratorCache = makePrewarmedAudioIteratorCache(audioSink);
584
410
  let audioBufferIterator = null;
585
411
  let audioIteratorsCreated = 0;
412
+ let totalAudioScheduledInSeconds = 0;
586
413
  let currentDelayHandle = null;
414
+ const pendingScheduleWaiters = [];
415
+ const notifyNodeScheduled = () => {
416
+ for (let i = pendingScheduleWaiters.length - 1;i >= 0; i--) {
417
+ const waiter = pendingScheduleWaiters[i];
418
+ waiter.remaining--;
419
+ if (waiter.remaining <= 0) {
420
+ waiter.resolve();
421
+ pendingScheduleWaiters.splice(i, 1);
422
+ }
423
+ }
424
+ };
425
+ const waitForNScheduledNodes = (n) => {
426
+ if (n <= 0) {
427
+ return Promise.resolve();
428
+ }
429
+ return new Promise((resolve) => {
430
+ pendingScheduleWaiters.push({ remaining: n, resolve });
431
+ });
432
+ };
587
433
  const scheduleAudioChunk = ({
588
434
  buffer,
589
435
  mediaTimestamp,
436
+ originalUnloopedMediaTimestamp,
590
437
  playbackRate,
591
438
  scheduleAudioNode,
592
- debugAudioScheduling
439
+ logLevel,
440
+ currentTime
593
441
  }) => {
594
442
  if (!audioBufferIterator) {
595
443
  throw new Error("Audio buffer iterator not found");
596
444
  }
597
- if (sharedAudioContext.audioContext.state !== "running") {
598
- throw new Error("Tried to schedule node while audio context is not running");
599
- }
600
445
  if (muted) {
601
446
  return;
602
447
  }
@@ -604,252 +449,247 @@ var audioIteratorManager = ({
604
449
  node.buffer = buffer;
605
450
  node.playbackRate.value = playbackRate;
606
451
  node.connect(gainNode);
607
- const started = scheduleAudioNode(node, mediaTimestamp);
452
+ const started = scheduleAudioNode(node, mediaTimestamp, originalUnloopedMediaTimestamp, currentTime);
608
453
  if (started.type === "not-started") {
609
- if (debugAudioScheduling) {
610
- Internals4.Log.info({ logLevel: "trace", tag: "audio-scheduling" }, "not started, disconnected: %s %s", mediaTimestamp.toFixed(3), buffer.duration.toFixed(3));
611
- }
454
+ Internals2.Log.verbose({ logLevel, tag: "audio-scheduling" }, "not started, disconnected: %s %s", mediaTimestamp.toFixed(3), buffer.duration.toFixed(3));
612
455
  node.disconnect();
613
456
  return;
614
457
  }
615
- const iterator = audioBufferIterator;
616
- iterator.addQueuedAudioNode({
458
+ audioBufferIterator.addQueuedAudioNode({
617
459
  node,
618
460
  timestamp: mediaTimestamp,
619
461
  buffer,
620
462
  scheduledTime: started.scheduledTime,
621
- playbackRate,
622
463
  scheduledAtAnchor: sharedAudioContext.audioSyncAnchor.value
623
464
  });
624
- node.onended = () => {
625
- setTimeout(() => {
626
- iterator.removeQueuedAudioNode(node);
627
- }, 30);
628
- };
629
- };
630
- const resumeScheduledAudioChunks = ({
631
- playbackRate,
632
- scheduleAudioNode,
633
- debugAudioScheduling
634
- }) => {
635
- if (muted) {
636
- return;
637
- }
638
- if (!audioBufferIterator) {
639
- return;
640
- }
641
- for (const chunk of audioBufferIterator.getAndClearAudioChunksForAfterResuming()) {
642
- scheduleAudioChunk({
643
- buffer: chunk.buffer,
644
- mediaTimestamp: chunk.timestamp,
645
- playbackRate,
646
- scheduleAudioNode,
647
- debugAudioScheduling
648
- });
649
- }
650
465
  };
651
466
  const onAudioChunk = ({
652
- getIsPlaying,
653
467
  buffer,
654
468
  playbackRate,
655
469
  scheduleAudioNode,
656
- debugAudioScheduling
470
+ logLevel,
471
+ currentTime
657
472
  }) => {
658
473
  if (muted) {
659
474
  return;
660
475
  }
661
476
  const startTime = getStartTime();
662
- const endTime = getEndTime();
663
- if (buffer.timestamp + buffer.duration <= startTime) {
477
+ const sequenceEndTime = getSequenceEndTimestamp();
478
+ if (buffer.timestamp + buffer.buffer.duration <= startTime) {
664
479
  return;
665
480
  }
666
- if (buffer.timestamp >= endTime) {
481
+ if (buffer.timestamp >= sequenceEndTime) {
667
482
  return;
668
483
  }
669
- if (getIsPlaying() && sharedAudioContext.audioContext.state === "running" && (sharedAudioContext.audioContext.getOutputTimestamp().contextTime ?? 0) > 0) {
670
- resumeScheduledAudioChunks({
671
- playbackRate,
672
- scheduleAudioNode,
673
- debugAudioScheduling
674
- });
675
- scheduleAudioChunk({
676
- buffer: buffer.buffer,
677
- mediaTimestamp: buffer.timestamp,
678
- playbackRate,
679
- scheduleAudioNode,
680
- debugAudioScheduling
681
- });
682
- } else {
683
- if (!audioBufferIterator) {
684
- throw new Error("Audio buffer iterator not found");
685
- }
686
- if (debugAudioScheduling) {
687
- Internals4.Log.info({ logLevel: "trace", tag: "audio-scheduling" }, "not ready, added to queue: %s %s", buffer.timestamp.toFixed(3), buffer.duration.toFixed(3));
688
- }
689
- audioBufferIterator.addChunkForAfterResuming(buffer.buffer, buffer.timestamp);
690
- }
484
+ const scheduledStart = Math.max(buffer.timestamp, startTime);
485
+ const scheduledEnd = Math.min(buffer.timestamp + buffer.buffer.duration, sequenceEndTime);
486
+ totalAudioScheduledInSeconds += Math.max(0, scheduledEnd - scheduledStart);
487
+ scheduleAudioChunk({
488
+ buffer: buffer.buffer.buffer,
489
+ mediaTimestamp: buffer.timestamp,
490
+ playbackRate,
491
+ scheduleAudioNode,
492
+ logLevel,
493
+ originalUnloopedMediaTimestamp: buffer.buffer.timestamp,
494
+ currentTime
495
+ });
691
496
  drawDebugOverlay();
692
497
  };
693
- const startAudioIterator = async ({
498
+ const proceedScheduling = ({
499
+ iterator,
694
500
  nonce,
501
+ getTargetTime,
695
502
  playbackRate,
696
- startFromSecond,
697
- getIsPlaying,
698
503
  scheduleAudioNode,
699
- debugAudioScheduling
504
+ onScheduled,
505
+ onDestroyed,
506
+ onDone,
507
+ logLevel,
508
+ currentTime,
509
+ getAudioContextCurrentTimeMockedInTest
700
510
  }) => {
701
- let __stack = [];
702
- try {
703
- if (muted) {
704
- return;
705
- }
706
- audioBufferIterator?.destroy(sharedAudioContext);
707
- const delayHandle = __using(__stack, delayPlaybackHandleIfNotPremounting(), 0);
708
- currentDelayHandle = delayHandle;
709
- const iterator = makeAudioIterator({
710
- startFromSecond,
711
- maximumTimestamp: getEndTime(),
712
- cache: prewarmedAudioIteratorCache,
713
- debugAudioScheduling
714
- });
715
- audioIteratorsCreated++;
716
- audioBufferIterator = iterator;
717
- try {
718
- for (let i = 0;i < 6; i++) {
719
- const result = await iterator.getNext();
720
- if (iterator.isDestroyed()) {
721
- return;
722
- }
723
- if (nonce.isStale()) {
724
- return;
725
- }
726
- if (!result.value) {
727
- return;
728
- }
729
- onAudioChunk({
730
- getIsPlaying,
731
- buffer: result.value,
732
- playbackRate,
733
- scheduleAudioNode,
734
- debugAudioScheduling
735
- });
511
+ waitForTurn({
512
+ getPriority: () => {
513
+ if (iterator.isDestroyed()) {
514
+ return null;
736
515
  }
737
- await iterator.bufferAsFarAsPossible((buffer) => {
738
- if (!nonce.isStale()) {
739
- onAudioChunk({
740
- getIsPlaying,
741
- buffer,
742
- playbackRate,
743
- scheduleAudioNode,
744
- debugAudioScheduling
745
- });
746
- }
747
- }, Math.min(startFromSecond + MAX_BUFFER_AHEAD_SECONDS, getEndTime()));
748
- } catch (e) {
516
+ const guessedNextTimestamp = iterator.guessNextTimestamp();
517
+ const targetTime = getTargetTime(guessedNextTimestamp, currentTime);
518
+ if (targetTime === null) {
519
+ return null;
520
+ }
521
+ const scheduledTime = getScheduledTime({
522
+ mediaTimestamp: guessedNextTimestamp,
523
+ targetTime,
524
+ currentTime,
525
+ sequenceStartTime: getStartTime()
526
+ });
527
+ return scheduledTime - getAudioContextCurrentTimeMockedInTest();
528
+ },
529
+ fn: () => iterator.getNextFn(),
530
+ onDone: (result, next) => {
531
+ if (iterator.isDestroyed()) {
532
+ next();
533
+ onDestroyed();
534
+ return;
535
+ }
536
+ if (!result.value) {
537
+ next();
538
+ return;
539
+ }
540
+ onScheduled(result.value.timestamp);
541
+ notifyNodeScheduled();
542
+ onAudioChunk({
543
+ buffer: result.value,
544
+ playbackRate,
545
+ scheduleAudioNode,
546
+ logLevel,
547
+ currentTime
548
+ });
549
+ proceedScheduling({
550
+ iterator,
551
+ nonce,
552
+ getTargetTime,
553
+ playbackRate,
554
+ scheduleAudioNode,
555
+ onScheduled,
556
+ onDestroyed,
557
+ onDone,
558
+ logLevel,
559
+ currentTime,
560
+ getAudioContextCurrentTimeMockedInTest
561
+ });
562
+ next();
563
+ },
564
+ onError: (e) => {
749
565
  if (e instanceof InputDisposedError) {
750
566
  return;
751
567
  }
752
- throw e;
753
- }
754
- } catch (_catch) {
755
- var _err = _catch, _hasErr = 1;
756
- } finally {
757
- __callDispose(__stack, _err, _hasErr);
758
- }
568
+ if (e instanceof StaleWaiterError) {
569
+ return;
570
+ }
571
+ throw e;
572
+ }
573
+ });
759
574
  };
760
- const pausePlayback = () => {
761
- if (!audioBufferIterator) {
575
+ const startAudioIterator = ({
576
+ nonce,
577
+ playbackRate,
578
+ startFromSecond,
579
+ scheduleAudioNode,
580
+ getTargetTime,
581
+ logLevel,
582
+ loop,
583
+ unscheduleAudioNode,
584
+ getAudioContextCurrentTimeMockedInTest
585
+ }) => {
586
+ if (muted) {
762
587
  return;
763
588
  }
764
- audioBufferIterator.moveQueuedChunksToPauseQueue();
589
+ const maximumTimestamp = getMediaEndTimestamp();
590
+ if (startFromSecond >= maximumTimestamp) {
591
+ return;
592
+ }
593
+ audioBufferIterator?.destroy();
594
+ const delayHandle = delayPlaybackHandleIfNotPremounting();
595
+ currentDelayHandle = delayHandle;
596
+ const iterator = makeAudioIterator({
597
+ startFromSecond,
598
+ maximumTimestamp,
599
+ audioSink,
600
+ logLevel,
601
+ loop,
602
+ playbackRate,
603
+ sequenceDurationInSeconds: getSequenceDurationInSeconds(),
604
+ unscheduleAudioNode
605
+ });
606
+ audioIteratorsCreated++;
607
+ audioBufferIterator = iterator;
608
+ proceedScheduling({
609
+ iterator,
610
+ nonce,
611
+ getTargetTime,
612
+ playbackRate,
613
+ scheduleAudioNode,
614
+ onScheduled: () => {
615
+ delayHandle.unblock();
616
+ },
617
+ onDestroyed: () => {
618
+ delayHandle.unblock();
619
+ },
620
+ onDone: () => {
621
+ delayHandle.unblock();
622
+ },
623
+ logLevel,
624
+ currentTime: sharedAudioContext.audioContext.currentTime,
625
+ getAudioContextCurrentTimeMockedInTest
626
+ });
765
627
  };
766
- const seek = async ({
628
+ const seek = ({
767
629
  newTime,
768
630
  nonce,
769
631
  playbackRate,
770
- getIsPlaying,
771
632
  scheduleAudioNode,
772
- debugAudioScheduling
633
+ getTargetTime,
634
+ logLevel,
635
+ loop,
636
+ trimBefore,
637
+ trimAfter,
638
+ sequenceOffset,
639
+ sequenceDurationInFrames,
640
+ fps,
641
+ getAudioContextCurrentTimeMockedInTest
773
642
  }) => {
774
- if (muted) {
643
+ if (currentSeek.time === newTime && currentSeek.playbackRate === playbackRate && currentSeek.trimBefore === trimBefore && currentSeek.trimAfter === trimAfter && currentSeek.sequenceOffset === sequenceOffset && currentSeek.sequenceDurationInFrames === sequenceDurationInFrames && currentSeek.loop === loop && currentSeek.fps === fps) {
775
644
  return;
776
645
  }
777
- if (getIsLooping()) {
778
- if (getEndTime() - newTime < 1) {
779
- prewarmedAudioIteratorCache.prewarmIteratorForLooping({
780
- timeToSeek: getStartTime(),
781
- maximumTimestamp: getEndTime()
782
- });
783
- }
784
- }
785
- if (!audioBufferIterator) {
786
- await startAudioIterator({
787
- nonce,
788
- playbackRate,
789
- startFromSecond: newTime,
790
- getIsPlaying,
791
- scheduleAudioNode,
792
- debugAudioScheduling
793
- });
646
+ currentSeek = {
647
+ time: newTime,
648
+ playbackRate,
649
+ trimBefore,
650
+ trimAfter,
651
+ sequenceOffset,
652
+ sequenceDurationInFrames,
653
+ loop,
654
+ fps
655
+ };
656
+ if (muted) {
794
657
  return;
795
658
  }
796
- const queuedPeriod = audioBufferIterator.getQueuedPeriod();
797
- const queuedPeriodMinusLatency = queuedPeriod ? {
798
- from: queuedPeriod.from - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT - sharedAudioContext.audioContext.baseLatency - sharedAudioContext.audioContext.outputLatency,
799
- until: queuedPeriod.until
800
- } : null;
801
- const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriodMinusLatency);
802
- if (!currentTimeIsAlreadyQueued) {
803
- const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, (buffer) => {
804
- if (!nonce.isStale()) {
805
- onAudioChunk({
806
- getIsPlaying,
807
- buffer,
808
- playbackRate,
809
- scheduleAudioNode,
810
- debugAudioScheduling
811
- });
812
- }
813
- });
814
- if (nonce.isStale()) {
815
- return;
816
- }
817
- if (audioSatisfyResult.type === "ended") {
659
+ if (audioBufferIterator && !audioBufferIterator.isDestroyed()) {
660
+ const queuedPeriod = audioBufferIterator.getQueuedPeriod();
661
+ const queuedPeriodMinusLatency = queuedPeriod ? {
662
+ from: queuedPeriod.from - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT - sharedAudioContext.audioContext.baseLatency - sharedAudioContext.audioContext.outputLatency,
663
+ until: queuedPeriod.until
664
+ } : null;
665
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriodMinusLatency);
666
+ if (currentTimeIsAlreadyQueued) {
667
+ processNext();
818
668
  return;
819
669
  }
820
- if (audioSatisfyResult.type === "not-satisfied") {
821
- await startAudioIterator({
822
- nonce,
823
- playbackRate,
824
- startFromSecond: newTime,
825
- getIsPlaying,
826
- scheduleAudioNode,
827
- debugAudioScheduling
828
- });
670
+ const currentIteratorTimestamp = audioBufferIterator.guessNextTimestamp();
671
+ if (currentIteratorTimestamp < newTime && Math.abs(currentIteratorTimestamp - newTime) < 1) {
672
+ processNext();
829
673
  return;
830
674
  }
831
- if (audioSatisfyResult.type === "satisfied") {}
832
675
  }
833
- await audioBufferIterator.bufferAsFarAsPossible((buffer) => {
834
- if (!nonce.isStale()) {
835
- onAudioChunk({
836
- getIsPlaying,
837
- buffer,
838
- playbackRate,
839
- scheduleAudioNode,
840
- debugAudioScheduling
841
- });
842
- }
843
- }, Math.min(newTime + MAX_BUFFER_AHEAD_SECONDS, getEndTime()));
676
+ startAudioIterator({
677
+ nonce,
678
+ playbackRate,
679
+ startFromSecond: newTime,
680
+ scheduleAudioNode,
681
+ getTargetTime,
682
+ logLevel,
683
+ loop,
684
+ unscheduleAudioNode: sharedAudioContext.unscheduleAudioNode,
685
+ getAudioContextCurrentTimeMockedInTest
686
+ });
844
687
  };
845
688
  return {
846
689
  startAudioIterator,
847
- resumeScheduledAudioChunks,
848
- pausePlayback,
849
690
  getAudioBufferIterator: () => audioBufferIterator,
850
691
  destroyIterator: () => {
851
- prewarmedAudioIteratorCache.destroy();
852
- audioBufferIterator?.destroy(sharedAudioContext);
692
+ audioBufferIterator?.destroy();
853
693
  audioBufferIterator = null;
854
694
  if (currentDelayHandle) {
855
695
  currentDelayHandle.unblock();
@@ -858,6 +698,7 @@ var audioIteratorManager = ({
858
698
  },
859
699
  seek,
860
700
  getAudioIteratorsCreated: () => audioIteratorsCreated,
701
+ getTotalAudioScheduledInSeconds: () => totalAudioScheduledInSeconds,
861
702
  setMuted: (newMuted) => {
862
703
  muted = newMuted;
863
704
  gainNode.gain.value = muted ? 0 : currentVolume;
@@ -866,7 +707,8 @@ var audioIteratorManager = ({
866
707
  currentVolume = Math.max(0, volume);
867
708
  gainNode.gain.value = muted ? 0 : currentVolume;
868
709
  },
869
- scheduleAudioChunk
710
+ scheduleAudioChunk,
711
+ waitForNScheduledNodes
870
712
  };
871
713
  };
872
714
 
@@ -886,6 +728,7 @@ var drawPreviewOverlay = ({
886
728
  "Debug overlay",
887
729
  `Video iterators created: ${videoIteratorManager?.getVideoIteratorsCreated()}`,
888
730
  `Audio iterators created: ${audioIteratorManager2?.getAudioIteratorsCreated()}`,
731
+ `Audio scheduled: ${(audioIteratorManager2?.getTotalAudioScheduledInSeconds() ?? 0).toFixed(3)}s`,
889
732
  `Frames rendered: ${videoIteratorManager?.getFramesRendered()}`,
890
733
  `Audio context state: ${audioContextState}`,
891
734
  audioTime ? `Audio time: ${((audioTime - anchorValue) * playbackRate).toFixed(3)}s` : null
@@ -943,55 +786,149 @@ var makeNonceManager = () => {
943
786
 
944
787
  // src/video-iterator-manager.ts
945
788
  import { CanvasSink } from "mediabunny";
946
- import { Internals as Internals5 } from "remotion";
789
+ import { Internals as Internals3 } from "remotion";
790
+
791
+ // src/canvas-ahead-of-time.ts
792
+ var canvasesAheadOfTime = (videoSink, startTimestamp) => {
793
+ const iterator = videoSink.canvases(startTimestamp);
794
+ let inFlight = iterator.next();
795
+ let resolved = null;
796
+ const trackResolution = () => {
797
+ const captured = inFlight;
798
+ captured.then((result) => {
799
+ if (captured === inFlight) {
800
+ resolved = result;
801
+ }
802
+ }, () => {
803
+ return;
804
+ });
805
+ };
806
+ trackResolution();
807
+ const advance = () => {
808
+ inFlight = iterator.next();
809
+ resolved = null;
810
+ trackResolution();
811
+ };
812
+ const next = () => {
813
+ if (resolved) {
814
+ if (resolved.done) {
815
+ return { type: "ready", frame: null };
816
+ }
817
+ const frame = resolved.value;
818
+ advance();
819
+ return { type: "ready", frame };
820
+ }
821
+ const captured = inFlight;
822
+ return {
823
+ type: "pending",
824
+ wait: async () => {
825
+ const result = await captured;
826
+ if (captured === inFlight && !result.done) {
827
+ advance();
828
+ }
829
+ return result.done ? null : result.value;
830
+ }
831
+ };
832
+ };
833
+ const closeFrame = (frame) => {
834
+ frame.close?.();
835
+ };
836
+ const closeIterator = async () => {
837
+ if (resolved) {
838
+ if (!resolved.done) {
839
+ closeFrame(resolved.value);
840
+ }
841
+ } else {
842
+ const captured = inFlight;
843
+ captured.then((result) => {
844
+ if (!result.done) {
845
+ closeFrame(result.value);
846
+ }
847
+ }, () => {
848
+ return;
849
+ });
850
+ }
851
+ await iterator.return();
852
+ };
853
+ return { next, closeIterator };
854
+ };
855
+
856
+ // src/prewarm-iterator-for-looping.ts
857
+ var makePrewarmedVideoIteratorCache = (videoSink) => {
858
+ const prewarmedVideoIterators = new Map;
859
+ const prewarmIteratorForLooping = ({ timeToSeek }) => {
860
+ if (!prewarmedVideoIterators.has(timeToSeek)) {
861
+ prewarmedVideoIterators.set(timeToSeek, canvasesAheadOfTime(videoSink, timeToSeek));
862
+ }
863
+ };
864
+ const makeIteratorOrUsePrewarmed = (timeToSeek) => {
865
+ const prewarmedIterator = prewarmedVideoIterators.get(timeToSeek);
866
+ if (prewarmedIterator) {
867
+ prewarmedVideoIterators.delete(timeToSeek);
868
+ return prewarmedIterator;
869
+ }
870
+ return canvasesAheadOfTime(videoSink, timeToSeek);
871
+ };
872
+ const destroy = () => {
873
+ for (const iterator of prewarmedVideoIterators.values()) {
874
+ iterator.closeIterator();
875
+ }
876
+ prewarmedVideoIterators.clear();
877
+ };
878
+ return {
879
+ prewarmIteratorForLooping,
880
+ makeIteratorOrUsePrewarmed,
881
+ destroy
882
+ };
883
+ };
884
+
885
+ // src/helpers/round-to-4-digits.ts
886
+ var roundTo4Digits = (timestamp) => {
887
+ return Math.round(timestamp * 1000) / 1000;
888
+ };
947
889
 
948
890
  // src/video/video-preview-iterator.ts
949
891
  var createVideoIterator = async (timeToSeek, cache) => {
950
892
  let destroyed = false;
951
893
  const iterator = cache.makeIteratorOrUsePrewarmed(timeToSeek);
952
894
  let iteratorEnded = false;
953
- const initialFrame = (await iterator.next())?.value ?? null;
895
+ const firstAwait = iterator.next();
896
+ const initialFrame = firstAwait && firstAwait.type === "ready" ? firstAwait.frame : await firstAwait.wait();
954
897
  let lastReturnedFrame = initialFrame;
955
- const getNextOrNullIfNotAvailable = async () => {
898
+ const getNextOrNullIfNotAvailable = () => {
956
899
  const next = iterator.next();
957
- const result = await Promise.race([
958
- next,
959
- new Promise((resolve) => {
960
- Promise.resolve().then(() => resolve());
961
- })
962
- ]);
963
- if (!result) {
900
+ if (next.type === "pending") {
964
901
  return {
965
902
  type: "need-to-wait-for-it",
966
903
  waitPromise: async () => {
967
- const res = await next;
968
- if (res.value) {
969
- lastReturnedFrame = res.value;
904
+ const res = await next.wait();
905
+ if (res) {
906
+ lastReturnedFrame = res;
970
907
  } else {
971
908
  iteratorEnded = true;
972
909
  }
973
- return res.value;
910
+ return res;
974
911
  }
975
912
  };
976
913
  }
977
- if (result.value) {
978
- lastReturnedFrame = result.value;
914
+ if (next.frame) {
915
+ lastReturnedFrame = next.frame;
979
916
  } else {
980
917
  iteratorEnded = true;
981
918
  }
982
919
  return {
983
920
  type: "got-frame-or-end",
984
- frame: result.value ?? null
921
+ frame: next.frame ?? null
985
922
  };
986
923
  };
987
924
  const destroy = () => {
988
925
  destroyed = true;
989
926
  lastReturnedFrame = null;
990
- iterator.return().catch(() => {
927
+ iterator.closeIterator().catch(() => {
991
928
  return;
992
929
  });
993
930
  };
994
- const tryToSatisfySeek = async (time) => {
931
+ const tryToSatisfySeek = (time) => {
995
932
  if (lastReturnedFrame) {
996
933
  const frameTimestamp = roundTo4Digits(lastReturnedFrame.timestamp);
997
934
  if (roundTo4Digits(time) < frameTimestamp) {
@@ -1030,7 +967,7 @@ var createVideoIterator = async (timeToSeek, cache) => {
1030
967
  };
1031
968
  }
1032
969
  while (true) {
1033
- const frame = await getNextOrNullIfNotAvailable();
970
+ const frame = getNextOrNullIfNotAvailable();
1034
971
  if (frame.type === "need-to-wait-for-it") {
1035
972
  return {
1036
973
  type: "not-satisfied",
@@ -1084,7 +1021,7 @@ var videoIteratorManager = ({
1084
1021
  logLevel,
1085
1022
  getOnVideoFrameCallback,
1086
1023
  videoTrack,
1087
- getEndTime,
1024
+ getLoopSegmentMediaEndTimestamp,
1088
1025
  getStartTime,
1089
1026
  getIsLooping
1090
1027
  }) => {
@@ -1115,7 +1052,7 @@ var videoIteratorManager = ({
1115
1052
  if (callback) {
1116
1053
  callback(frame.canvas);
1117
1054
  }
1118
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
1055
+ Internals3.Log.trace({ logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
1119
1056
  };
1120
1057
  const startVideoIterator = async (timeToSeek, nonce) => {
1121
1058
  let __stack = [];
@@ -1150,13 +1087,13 @@ var videoIteratorManager = ({
1150
1087
  return;
1151
1088
  }
1152
1089
  if (getIsLooping()) {
1153
- if (getEndTime() - newTime < 1) {
1090
+ if (getLoopSegmentMediaEndTimestamp() - newTime < 1) {
1154
1091
  prewarmedVideoIteratorCache.prewarmIteratorForLooping({
1155
1092
  timeToSeek: getStartTime()
1156
1093
  });
1157
1094
  }
1158
1095
  }
1159
- const videoSatisfyResult = await videoFrameIterator.tryToSatisfySeek(newTime);
1096
+ const videoSatisfyResult = videoFrameIterator.tryToSatisfySeek(newTime);
1160
1097
  if (videoSatisfyResult.type === "satisfied") {
1161
1098
  drawFrame(videoSatisfyResult.frame);
1162
1099
  return;
@@ -1190,6 +1127,7 @@ var videoIteratorManager = ({
1190
1127
 
1191
1128
  // src/media-player.ts
1192
1129
  class MediaPlayer {
1130
+ tagType;
1193
1131
  canvas;
1194
1132
  context;
1195
1133
  src;
@@ -1200,16 +1138,15 @@ class MediaPlayer {
1200
1138
  sharedAudioContext;
1201
1139
  audioIteratorManager = null;
1202
1140
  videoIteratorManager = null;
1203
- sequenceOffset;
1204
1141
  playing = false;
1205
1142
  loop = false;
1206
1143
  fps;
1207
1144
  trimBefore;
1208
1145
  trimAfter;
1209
- durationInFrames;
1146
+ sequenceDurationInFrames;
1147
+ sequenceOffset;
1210
1148
  totalDuration;
1211
1149
  debugOverlay = false;
1212
- debugAudioScheduling = false;
1213
1150
  nonceManager;
1214
1151
  onVideoFrameCallback = null;
1215
1152
  initializationPromise = null;
@@ -1230,7 +1167,6 @@ class MediaPlayer {
1230
1167
  audioStreamIndex,
1231
1168
  fps,
1232
1169
  debugOverlay,
1233
- debugAudioScheduling,
1234
1170
  bufferState,
1235
1171
  isPremounting,
1236
1172
  isPostmounting,
@@ -1238,7 +1174,8 @@ class MediaPlayer {
1238
1174
  onVideoFrameCallback,
1239
1175
  playing,
1240
1176
  sequenceOffset,
1241
- credentials
1177
+ credentials,
1178
+ tagType
1242
1179
  }) {
1243
1180
  this.canvas = canvas ?? null;
1244
1181
  this.src = src;
@@ -1252,11 +1189,10 @@ class MediaPlayer {
1252
1189
  this.audioStreamIndex = audioStreamIndex ?? 0;
1253
1190
  this.fps = fps;
1254
1191
  this.debugOverlay = debugOverlay;
1255
- this.debugAudioScheduling = debugAudioScheduling;
1256
1192
  this.bufferState = bufferState;
1257
1193
  this.isPremounting = isPremounting;
1258
1194
  this.isPostmounting = isPostmounting;
1259
- this.durationInFrames = durationInFrames;
1195
+ this.sequenceDurationInFrames = durationInFrames;
1260
1196
  this.nonceManager = makeNonceManager();
1261
1197
  this.onVideoFrameCallback = onVideoFrameCallback;
1262
1198
  this.playing = playing;
@@ -1267,6 +1203,7 @@ class MediaPlayer {
1267
1203
  } : undefined),
1268
1204
  formats: ALL_FORMATS
1269
1205
  });
1206
+ this.tagType = tagType;
1270
1207
  if (canvas) {
1271
1208
  const context = canvas.getContext("2d", {
1272
1209
  alpha: true,
@@ -1293,8 +1230,14 @@ class MediaPlayer {
1293
1230
  getStartTime() {
1294
1231
  return (this.trimBefore ?? 0) / this.fps;
1295
1232
  }
1296
- getEndTime() {
1297
- const mediaEndTime = calculateEndTime({
1233
+ getSequenceEndTimestamp() {
1234
+ return this.sequenceDurationInFrames / this.fps * this.playbackRate + this.getStartTime();
1235
+ }
1236
+ getSequenceDurationInSeconds() {
1237
+ return this.sequenceDurationInFrames / this.fps;
1238
+ }
1239
+ getMediaEndTimestamp() {
1240
+ return calculateEndTime({
1298
1241
  mediaDurationInSeconds: this.totalDuration,
1299
1242
  ifNoMediaDuration: "fail",
1300
1243
  src: this.src,
@@ -1302,11 +1245,9 @@ class MediaPlayer {
1302
1245
  trimBefore: this.trimBefore,
1303
1246
  fps: this.fps
1304
1247
  });
1305
- if (this.loop) {
1306
- return mediaEndTime;
1307
- }
1308
- const sequenceEndMediaTime = this.durationInFrames / this.fps * this.playbackRate + (this.trimBefore ?? 0) / this.fps;
1309
- return Math.min(mediaEndTime, sequenceEndMediaTime);
1248
+ }
1249
+ getLoopSegmentMediaEndTimestamp() {
1250
+ return Math.min(this.getMediaEndTimestamp(), this.getSequenceEndTimestamp());
1310
1251
  }
1311
1252
  async _initialize(startTimeUnresolved, initialMuted) {
1312
1253
  let __stack = [];
@@ -1326,7 +1267,7 @@ class MediaPlayer {
1326
1267
  if (isNetworkError(err)) {
1327
1268
  throw error;
1328
1269
  }
1329
- Internals6.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
1270
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
1330
1271
  return { type: "unknown-container-format" };
1331
1272
  }
1332
1273
  const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
@@ -1342,7 +1283,7 @@ class MediaPlayer {
1342
1283
  if (!videoTrack && !audioTrack) {
1343
1284
  return { type: "no-tracks" };
1344
1285
  }
1345
- if (videoTrack) {
1286
+ if (videoTrack && this.tagType === "video") {
1346
1287
  const canDecode = await videoTrack.canDecode();
1347
1288
  if (!canDecode) {
1348
1289
  return { type: "cannot-decode" };
@@ -1358,7 +1299,7 @@ class MediaPlayer {
1358
1299
  getOnVideoFrameCallback: () => this.onVideoFrameCallback,
1359
1300
  logLevel: this.logLevel,
1360
1301
  drawDebugOverlay: this.drawDebugOverlay,
1361
- getEndTime: () => this.getEndTime(),
1302
+ getLoopSegmentMediaEndTimestamp: () => this.getLoopSegmentMediaEndTimestamp(),
1362
1303
  getStartTime: () => this.getStartTime(),
1363
1304
  getIsLooping: () => this.loop
1364
1305
  });
@@ -1379,11 +1320,19 @@ class MediaPlayer {
1379
1320
  audioTrack,
1380
1321
  delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
1381
1322
  sharedAudioContext: this.sharedAudioContext,
1382
- getIsLooping: () => this.loop,
1383
- getEndTime: () => this.getEndTime(),
1323
+ getMediaEndTimestamp: () => this.getMediaEndTimestamp(),
1324
+ getSequenceEndTimestamp: () => this.getSequenceEndTimestamp(),
1384
1325
  getStartTime: () => this.getStartTime(),
1385
1326
  initialMuted,
1386
- drawDebugOverlay: this.drawDebugOverlay
1327
+ drawDebugOverlay: this.drawDebugOverlay,
1328
+ initialPlaybackRate: this.playbackRate * this.globalPlaybackRate,
1329
+ getSequenceDurationInSeconds: () => this.getSequenceDurationInSeconds(),
1330
+ initialTrimBefore: this.trimBefore,
1331
+ initialTrimAfter: this.trimAfter,
1332
+ initialSequenceOffset: this.sequenceOffset,
1333
+ initialSequenceDurationInFrames: this.sequenceDurationInFrames,
1334
+ initialLoop: this.loop,
1335
+ initialFps: this.fps
1387
1336
  });
1388
1337
  }
1389
1338
  const nonce = this.nonceManager.createAsyncOperation();
@@ -1393,9 +1342,12 @@ class MediaPlayer {
1393
1342
  nonce,
1394
1343
  playbackRate: this.playbackRate * this.globalPlaybackRate,
1395
1344
  startFromSecond: startTime,
1396
- getIsPlaying: () => this.playing,
1397
1345
  scheduleAudioNode: this.scheduleAudioNode,
1398
- debugAudioScheduling: this.debugAudioScheduling
1346
+ getTargetTime: this.getTargetTime,
1347
+ logLevel: this.logLevel,
1348
+ loop: this.loop,
1349
+ unscheduleAudioNode: this.sharedAudioContext.unscheduleAudioNode,
1350
+ getAudioContextCurrentTimeMockedInTest: () => this.sharedAudioContext.audioContext.currentTime
1399
1351
  }) : Promise.resolve(),
1400
1352
  this.videoIteratorManager ? this.videoIteratorManager.startVideoIterator(startTime, nonce) : Promise.resolve()
1401
1353
  ]);
@@ -1403,16 +1355,16 @@ class MediaPlayer {
1403
1355
  if (this.isDisposalError()) {
1404
1356
  return { type: "disposed" };
1405
1357
  }
1406
- Internals6.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
1358
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
1407
1359
  }
1408
1360
  return { type: "success", durationInSeconds };
1409
1361
  } catch (error) {
1410
1362
  const err = error;
1411
1363
  if (isNetworkError(err)) {
1412
- Internals6.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
1364
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
1413
1365
  return { type: "network-error" };
1414
1366
  }
1415
- Internals6.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
1367
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
1416
1368
  throw error;
1417
1369
  }
1418
1370
  } catch (_catch) {
@@ -1438,21 +1390,27 @@ class MediaPlayer {
1438
1390
  if (nonce.isStale()) {
1439
1391
  return;
1440
1392
  }
1441
- const shouldSeekAudio = this.audioIteratorManager && this.getAudioPlaybackTime(this.sharedAudioContext?.audioContext.currentTime ?? 0) !== newTime;
1442
1393
  try {
1443
1394
  await Promise.all([
1444
1395
  this.videoIteratorManager?.seek({
1445
1396
  newTime,
1446
1397
  nonce
1447
1398
  }),
1448
- shouldSeekAudio ? this.audioIteratorManager?.seek({
1399
+ this.audioIteratorManager?.seek({
1449
1400
  newTime,
1450
1401
  nonce,
1451
1402
  playbackRate: this.playbackRate * this.globalPlaybackRate,
1452
- getIsPlaying: () => this.playing,
1403
+ getTargetTime: this.getTargetTime,
1404
+ logLevel: this.logLevel,
1405
+ loop: this.loop,
1406
+ trimBefore: this.trimBefore,
1407
+ trimAfter: this.trimAfter,
1408
+ sequenceOffset: this.sequenceOffset,
1409
+ sequenceDurationInFrames: this.sequenceDurationInFrames,
1410
+ fps: this.fps,
1453
1411
  scheduleAudioNode: this.scheduleAudioNode,
1454
- debugAudioScheduling: this.debugAudioScheduling
1455
- }) : null
1412
+ getAudioContextCurrentTimeMockedInTest: () => this.sharedAudioContext.audioContext.currentTime
1413
+ })
1456
1414
  ]);
1457
1415
  } catch (error) {
1458
1416
  if (this.isDisposalError()) {
@@ -1461,17 +1419,7 @@ class MediaPlayer {
1461
1419
  throw error;
1462
1420
  }
1463
1421
  }
1464
- playAudio() {
1465
- if (this.audioIteratorManager && this.sharedAudioContext?.audioContext.state === "running" && (this.sharedAudioContext?.audioContext?.getOutputTimestamp().contextTime ?? 0) > 0) {
1466
- this.audioIteratorManager.resumeScheduledAudioChunks({
1467
- playbackRate: this.playbackRate * this.globalPlaybackRate,
1468
- scheduleAudioNode: this.scheduleAudioNode,
1469
- debugAudioScheduling: this.debugAudioScheduling
1470
- });
1471
- }
1472
- }
1473
1422
  play() {
1474
- this.playAudio();
1475
1423
  if (this.playing) {
1476
1424
  return;
1477
1425
  }
@@ -1498,7 +1446,6 @@ class MediaPlayer {
1498
1446
  return;
1499
1447
  }
1500
1448
  this.playing = false;
1501
- this.audioIteratorManager?.pausePlayback();
1502
1449
  this.drawDebugOverlay();
1503
1450
  }
1504
1451
  setMuted(muted) {
@@ -1523,73 +1470,46 @@ class MediaPlayer {
1523
1470
  src: this.src
1524
1471
  });
1525
1472
  }
1526
- async updateAfterTrimChange(unloopedTimeInSeconds) {
1527
- if (!this.audioIteratorManager && !this.videoIteratorManager) {
1528
- return;
1529
- }
1530
- const newMediaTime = this.getTrimmedTime(unloopedTimeInSeconds);
1531
- this.audioIteratorManager?.destroyIterator();
1532
- if (newMediaTime !== null) {
1533
- if (!this.playing && this.videoIteratorManager) {
1534
- await this.seekToWithQueue(newMediaTime);
1535
- }
1536
- }
1537
- }
1538
1473
  async setTrimBefore(trimBefore, unloopedTimeInSeconds) {
1539
1474
  if (this.trimBefore !== trimBefore) {
1540
1475
  this.trimBefore = trimBefore;
1541
- await this.updateAfterTrimChange(unloopedTimeInSeconds);
1476
+ this.audioIteratorManager?.destroyIterator();
1477
+ await this.seekTo(unloopedTimeInSeconds);
1542
1478
  }
1543
1479
  }
1544
1480
  async setTrimAfter(trimAfter, unloopedTimeInSeconds) {
1545
1481
  if (this.trimAfter !== trimAfter) {
1546
1482
  this.trimAfter = trimAfter;
1547
- await this.updateAfterTrimChange(unloopedTimeInSeconds);
1483
+ this.audioIteratorManager?.destroyIterator();
1484
+ await this.seekTo(unloopedTimeInSeconds);
1548
1485
  }
1549
1486
  }
1550
1487
  setDebugOverlay(debugOverlay) {
1551
1488
  this.debugOverlay = debugOverlay;
1552
1489
  }
1553
- setDebugAudioScheduling(debugAudioScheduling) {
1554
- this.debugAudioScheduling = debugAudioScheduling;
1555
- }
1556
- rescheduleAudioChunks() {
1557
- if (!this.audioIteratorManager) {
1558
- return;
1559
- }
1560
- if (!this.sharedAudioContext) {
1561
- return;
1562
- }
1563
- const iterator = this.audioIteratorManager.getAudioBufferIterator();
1564
- if (!iterator) {
1565
- return;
1566
- }
1567
- iterator.moveQueuedChunksToPauseQueue();
1568
- if (this.playing && this.sharedAudioContext.audioContext.state === "running" && (this.sharedAudioContext.audioContext?.getOutputTimestamp().contextTime ?? 0) > 0) {
1569
- this.audioIteratorManager.resumeScheduledAudioChunks({
1570
- playbackRate: this.playbackRate * this.globalPlaybackRate,
1571
- scheduleAudioNode: this.scheduleAudioNode,
1572
- debugAudioScheduling: this.debugAudioScheduling
1573
- });
1574
- }
1575
- }
1576
1490
  async setPlaybackRate(rate, unloopedTimeInSeconds) {
1577
1491
  const previousRate = this.playbackRate;
1578
1492
  if (previousRate !== rate) {
1579
1493
  this.playbackRate = rate;
1580
- this.rescheduleAudioChunks();
1494
+ this.audioIteratorManager?.destroyIterator();
1581
1495
  await this.seekTo(unloopedTimeInSeconds);
1582
1496
  }
1583
1497
  }
1584
- setGlobalPlaybackRate(rate) {
1498
+ async setGlobalPlaybackRate(rate, unloopedTimeInSeconds) {
1585
1499
  const previousRate = this.globalPlaybackRate;
1586
1500
  if (previousRate !== rate) {
1587
1501
  this.globalPlaybackRate = rate;
1588
- this.rescheduleAudioChunks();
1502
+ this.audioIteratorManager?.destroyIterator();
1503
+ await this.seekTo(unloopedTimeInSeconds);
1589
1504
  }
1590
1505
  }
1591
- setFps(fps) {
1592
- this.fps = fps;
1506
+ async setFps(fps, unloopedTimeInSeconds) {
1507
+ const previousFps = this.fps;
1508
+ if (previousFps !== fps) {
1509
+ this.fps = fps;
1510
+ this.audioIteratorManager?.destroyIterator();
1511
+ await this.seekTo(unloopedTimeInSeconds);
1512
+ }
1593
1513
  }
1594
1514
  setIsPremounting(isPremounting) {
1595
1515
  this.isPremounting = isPremounting;
@@ -1597,14 +1517,28 @@ class MediaPlayer {
1597
1517
  setIsPostmounting(isPostmounting) {
1598
1518
  this.isPostmounting = isPostmounting;
1599
1519
  }
1600
- setLoop(loop) {
1601
- this.loop = loop;
1520
+ async setLoop(loop, unloopedTimeInSeconds) {
1521
+ const previousLoop = this.loop;
1522
+ if (previousLoop !== loop) {
1523
+ this.loop = loop;
1524
+ this.audioIteratorManager?.destroyIterator();
1525
+ await this.seekTo(unloopedTimeInSeconds);
1526
+ }
1602
1527
  }
1603
- setSequenceOffset(offset) {
1604
- this.sequenceOffset = offset;
1528
+ async setSequenceOffset(offset, unloopedTimeInSeconds) {
1529
+ const previousOffset = this.sequenceOffset;
1530
+ if (previousOffset !== offset) {
1531
+ this.sequenceOffset = offset;
1532
+ this.audioIteratorManager?.destroyIterator();
1533
+ await this.seekTo(unloopedTimeInSeconds);
1534
+ }
1605
1535
  }
1606
- setDurationInFrames(durationInFrames) {
1607
- this.durationInFrames = durationInFrames;
1536
+ async setSequenceDurationInFrames(sequenceDurationInFrames, unloopedTimeInSeconds) {
1537
+ const previousDuration = this.sequenceDurationInFrames;
1538
+ if (previousDuration !== sequenceDurationInFrames) {
1539
+ this.sequenceDurationInFrames = sequenceDurationInFrames;
1540
+ await this.seekTo(unloopedTimeInSeconds);
1541
+ }
1608
1542
  }
1609
1543
  async dispose() {
1610
1544
  if (this.initializationPromise) {
@@ -1617,41 +1551,59 @@ class MediaPlayer {
1617
1551
  this.audioIteratorManager?.destroyIterator();
1618
1552
  this.input.dispose();
1619
1553
  }
1620
- scheduleAudioNode = (node, mediaTimestamp) => {
1554
+ getTargetTime = (mediaTimestamp, currentTime) => {
1621
1555
  if (!this.sharedAudioContext) {
1622
1556
  throw new Error("Shared audio context not found");
1623
1557
  }
1624
- const { audioContext } = this.sharedAudioContext;
1625
- const { currentTime } = audioContext;
1626
1558
  const globalTime = (currentTime - this.sharedAudioContext.audioSyncAnchor.value) * this.globalPlaybackRate;
1627
1559
  const timeInSeconds = globalTime - this.sequenceOffset;
1628
1560
  const localTime = this.getTrimmedTime(timeInSeconds);
1629
1561
  if (localTime === null) {
1630
- return { type: "not-started" };
1562
+ return null;
1631
1563
  }
1632
1564
  const targetTime = (mediaTimestamp - localTime) / (this.playbackRate * this.globalPlaybackRate);
1565
+ return targetTime;
1566
+ };
1567
+ scheduleAudioNode = (node, mediaTimestamp, originalUnloopedMediaTimestamp, currentTime) => {
1568
+ if (!this.sharedAudioContext) {
1569
+ throw new Error("Shared audio context not found");
1570
+ }
1571
+ const targetTime = this.getTargetTime(mediaTimestamp, currentTime);
1572
+ if (targetTime === null) {
1573
+ return {
1574
+ type: "not-started",
1575
+ reason: "no target for" + mediaTimestamp.toFixed(3) + "," + currentTime.toFixed(3)
1576
+ };
1577
+ }
1578
+ const sequenceStartTime = this.getStartTime();
1579
+ const loopSegmentMediaEndTimestamp = this.getLoopSegmentMediaEndTimestamp();
1580
+ const offset = getOffset({
1581
+ mediaTimestamp,
1582
+ targetTime,
1583
+ sequenceStartTime
1584
+ });
1585
+ const duration = getDurationOfNode({
1586
+ bufferDuration: node.buffer?.duration ?? 0,
1587
+ loopSegmentMediaEndTimestamp,
1588
+ offset,
1589
+ originalUnloopedMediaTimestamp
1590
+ });
1591
+ const scheduledTime = getScheduledTime({
1592
+ mediaTimestamp,
1593
+ targetTime,
1594
+ currentTime,
1595
+ sequenceStartTime
1596
+ });
1633
1597
  return this.sharedAudioContext.scheduleAudioNode({
1634
1598
  node,
1635
1599
  mediaTimestamp,
1636
- targetTime,
1637
1600
  currentTime,
1638
- sequenceEndTime: this.getEndTime(),
1639
- sequenceStartTime: this.getStartTime(),
1640
- debugAudioScheduling: this.debugAudioScheduling
1601
+ scheduledTime,
1602
+ duration,
1603
+ offset,
1604
+ originalUnloopedMediaTimestamp
1641
1605
  });
1642
1606
  };
1643
- getAudioPlaybackTime(currentTime) {
1644
- if (!this.sharedAudioContext) {
1645
- throw new Error("Shared audio context not found");
1646
- }
1647
- const globalTime = (currentTime - this.sharedAudioContext.audioSyncAnchor.value) * this.globalPlaybackRate;
1648
- const localTime = globalTime - this.sequenceOffset;
1649
- const trimmedTime = this.getTrimmedTime(localTime);
1650
- if (trimmedTime !== null) {
1651
- return trimmedTime;
1652
- }
1653
- return localTime * this.playbackRate + (this.trimBefore ?? 0) / this.fps;
1654
- }
1655
1607
  setVideoFrameCallback(callback) {
1656
1608
  this.onVideoFrameCallback = callback;
1657
1609
  }
@@ -1671,6 +1623,12 @@ class MediaPlayer {
1671
1623
  });
1672
1624
  }
1673
1625
  };
1626
+ audioSyncAnchorChanged = () => {
1627
+ if (!this.audioIteratorManager) {
1628
+ return;
1629
+ }
1630
+ this.audioIteratorManager.destroyIterator();
1631
+ };
1674
1632
  }
1675
1633
 
1676
1634
  // src/on-error.ts
@@ -1696,7 +1654,7 @@ var callOnErrorAndResolve = ({
1696
1654
 
1697
1655
  // src/show-in-timeline.ts
1698
1656
  import { useMemo } from "react";
1699
- import { Internals as Internals7, useVideoConfig } from "remotion";
1657
+ import { Internals as Internals5, useVideoConfig } from "remotion";
1700
1658
  var useLoopDisplay = ({
1701
1659
  loop,
1702
1660
  mediaDurationInSeconds,
@@ -1709,7 +1667,7 @@ var useLoopDisplay = ({
1709
1667
  if (!loop || !mediaDurationInSeconds) {
1710
1668
  return;
1711
1669
  }
1712
- const durationInFrames = Internals7.calculateMediaDuration({
1670
+ const durationInFrames = Internals5.calculateMediaDuration({
1713
1671
  mediaDurationInFrames: mediaDurationInSeconds * fps,
1714
1672
  playbackRate,
1715
1673
  trimAfter,
@@ -1735,7 +1693,7 @@ var useLoopDisplay = ({
1735
1693
 
1736
1694
  // src/use-common-effects.ts
1737
1695
  import { useContext, useLayoutEffect } from "react";
1738
- import { Internals as Internals8 } from "remotion";
1696
+ import { Internals as Internals6 } from "remotion";
1739
1697
  var useCommonEffects = ({
1740
1698
  mediaPlayerRef,
1741
1699
  mediaPlayerReady,
@@ -1752,39 +1710,14 @@ var useCommonEffects = ({
1752
1710
  fps,
1753
1711
  sequenceOffset,
1754
1712
  loop,
1755
- debugAudioScheduling,
1756
1713
  durationInFrames,
1757
1714
  isPremounting,
1758
1715
  isPostmounting,
1759
1716
  currentTime,
1760
1717
  logLevel,
1761
- sharedAudioContext,
1762
1718
  label
1763
1719
  }) => {
1764
- const absoluteTime = Internals8.useAbsoluteTimelinePosition();
1765
- const { playing: playingWhilePremounting } = useContext(Internals8.PremountContext);
1766
- useLayoutEffect(() => {
1767
- if (sharedAudioContext?.audioContext && sharedAudioContext.audioSyncAnchor) {
1768
- setGlobalTimeAnchor({
1769
- audioContext: sharedAudioContext.audioContext,
1770
- audioSyncAnchor: sharedAudioContext.audioSyncAnchor,
1771
- absoluteTimeInSeconds: absoluteTime / fps,
1772
- globalPlaybackRate,
1773
- debugAudioScheduling,
1774
- logLevel
1775
- });
1776
- }
1777
- }, [
1778
- absoluteTime,
1779
- globalPlaybackRate,
1780
- sharedAudioContext,
1781
- fps,
1782
- debugAudioScheduling,
1783
- logLevel
1784
- ]);
1785
- if (playingWhilePremounting) {
1786
- mediaPlayerRef.current?.playAudio();
1787
- }
1720
+ const sharedAudioContext = useContext(Internals6.SharedAudioContext);
1788
1721
  useLayoutEffect(() => {
1789
1722
  const mediaPlayer = mediaPlayerRef.current;
1790
1723
  if (!mediaPlayer)
@@ -1802,6 +1735,18 @@ var useCommonEffects = ({
1802
1735
  frame,
1803
1736
  mediaPlayerRef
1804
1737
  ]);
1738
+ useLayoutEffect(() => {
1739
+ if (!sharedAudioContext)
1740
+ return;
1741
+ const { remove } = sharedAudioContext.audioSyncAnchorEmitter.subscribe((event) => {
1742
+ if (event === "changed") {
1743
+ mediaPlayerRef.current?.audioSyncAnchorChanged();
1744
+ }
1745
+ });
1746
+ return () => {
1747
+ remove();
1748
+ };
1749
+ }, [sharedAudioContext, mediaPlayerRef]);
1805
1750
  useLayoutEffect(() => {
1806
1751
  const mediaPlayer = mediaPlayerRef.current;
1807
1752
  if (!mediaPlayer || !mediaPlayerReady) {
@@ -1841,22 +1786,22 @@ var useCommonEffects = ({
1841
1786
  if (!mediaPlayer || !mediaPlayerReady) {
1842
1787
  return;
1843
1788
  }
1844
- mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);
1845
- }, [globalPlaybackRate, mediaPlayerReady, mediaPlayerRef]);
1789
+ mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate, currentTimeRef.current);
1790
+ }, [globalPlaybackRate, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);
1846
1791
  useLayoutEffect(() => {
1847
1792
  const mediaPlayer = mediaPlayerRef.current;
1848
1793
  if (!mediaPlayer || !mediaPlayerReady) {
1849
1794
  return;
1850
1795
  }
1851
- mediaPlayer.setLoop(loop);
1852
- }, [loop, mediaPlayerReady, mediaPlayerRef]);
1796
+ mediaPlayer.setLoop(loop, currentTimeRef.current);
1797
+ }, [loop, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);
1853
1798
  useLayoutEffect(() => {
1854
1799
  const mediaPlayer = mediaPlayerRef.current;
1855
1800
  if (!mediaPlayer || !mediaPlayerReady) {
1856
1801
  return;
1857
1802
  }
1858
- mediaPlayer.setDurationInFrames(durationInFrames);
1859
- }, [durationInFrames, mediaPlayerReady, mediaPlayerRef]);
1803
+ mediaPlayer.setSequenceDurationInFrames(durationInFrames, currentTimeRef.current);
1804
+ }, [durationInFrames, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);
1860
1805
  useLayoutEffect(() => {
1861
1806
  const mediaPlayer = mediaPlayerRef.current;
1862
1807
  if (!mediaPlayer || !mediaPlayerReady) {
@@ -1876,34 +1821,27 @@ var useCommonEffects = ({
1876
1821
  if (!mediaPlayer || !mediaPlayerReady) {
1877
1822
  return;
1878
1823
  }
1879
- mediaPlayer.setFps(fps);
1880
- }, [fps, mediaPlayerReady, mediaPlayerRef]);
1881
- useLayoutEffect(() => {
1882
- const mediaPlayer = mediaPlayerRef.current;
1883
- if (!mediaPlayer || !mediaPlayerReady) {
1884
- return;
1885
- }
1886
- mediaPlayer.setSequenceOffset(sequenceOffset);
1887
- }, [sequenceOffset, mediaPlayerReady, mediaPlayerRef]);
1824
+ mediaPlayer.setFps(fps, currentTimeRef.current);
1825
+ }, [fps, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);
1888
1826
  useLayoutEffect(() => {
1889
1827
  const mediaPlayer = mediaPlayerRef.current;
1890
1828
  if (!mediaPlayer || !mediaPlayerReady) {
1891
1829
  return;
1892
1830
  }
1893
- mediaPlayer.setDebugAudioScheduling(debugAudioScheduling);
1894
- }, [debugAudioScheduling, mediaPlayerReady, mediaPlayerRef]);
1831
+ mediaPlayer.setSequenceOffset(sequenceOffset, currentTimeRef.current);
1832
+ }, [sequenceOffset, mediaPlayerReady, mediaPlayerRef, currentTimeRef]);
1895
1833
  useLayoutEffect(() => {
1896
1834
  const mediaPlayer = mediaPlayerRef.current;
1897
1835
  if (!mediaPlayer || !mediaPlayerReady)
1898
1836
  return;
1899
1837
  mediaPlayer.seekTo(currentTime).catch(() => {});
1900
- Internals8.Log.trace({ logLevel, tag: "@remotion/media" }, `[${label}] Updating target time to ${currentTime.toFixed(3)}s`);
1838
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[${label}] Updating target time to ${currentTime.toFixed(3)}s`);
1901
1839
  }, [currentTime, logLevel, mediaPlayerReady, label, mediaPlayerRef]);
1902
1840
  };
1903
1841
 
1904
1842
  // src/use-media-in-timeline.ts
1905
- import { useContext as useContext2, useState, useEffect } from "react";
1906
- import { Internals as Internals9, useCurrentFrame } from "remotion";
1843
+ import { useContext as useContext2, useEffect, useState } from "react";
1844
+ import { Internals as Internals7 } from "remotion";
1907
1845
  var useMediaInTimeline = ({
1908
1846
  volume,
1909
1847
  mediaVolume,
@@ -1920,12 +1858,10 @@ var useMediaInTimeline = ({
1920
1858
  trimAfter,
1921
1859
  controls
1922
1860
  }) => {
1923
- const parentSequence = useContext2(Internals9.SequenceContext);
1924
- const startsAt = Internals9.useMediaStartsAt();
1925
- const { registerSequence, unregisterSequence } = useContext2(Internals9.SequenceManager);
1926
- const [sequenceId] = useState(() => String(Math.random()));
1861
+ const parentSequence = useContext2(Internals7.SequenceContext);
1862
+ const startsAt = Internals7.useMediaStartsAt();
1863
+ const { registerSequence, unregisterSequence } = useContext2(Internals7.SequenceManager);
1927
1864
  const [mediaId] = useState(() => String(Math.random()));
1928
- const frame = useCurrentFrame();
1929
1865
  const {
1930
1866
  volumes,
1931
1867
  duration,
@@ -1934,7 +1870,7 @@ var useMediaInTimeline = ({
1934
1870
  rootId,
1935
1871
  isStudio,
1936
1872
  finalDisplayName
1937
- } = Internals9.useBasicMediaInTimeline({
1873
+ } = Internals7.useBasicMediaInTimeline({
1938
1874
  volume,
1939
1875
  mediaVolume,
1940
1876
  mediaType,
@@ -1954,32 +1890,13 @@ var useMediaInTimeline = ({
1954
1890
  if (!showInTimeline) {
1955
1891
  return;
1956
1892
  }
1957
- const loopIteration = loopDisplay ? Math.floor(frame / loopDisplay.durationInFrames) : 0;
1958
- if (loopDisplay) {
1959
- registerSequence({
1960
- type: "sequence",
1961
- premountDisplay,
1962
- postmountDisplay,
1963
- parent: parentSequence?.id ?? null,
1964
- displayName: finalDisplayName,
1965
- rootId,
1966
- showInTimeline: true,
1967
- nonce: nonce.get(),
1968
- loopDisplay,
1969
- stack,
1970
- from: 0,
1971
- duration,
1972
- id: sequenceId,
1973
- controls: null
1974
- });
1975
- }
1976
1893
  registerSequence({
1977
1894
  type: mediaType,
1978
1895
  src,
1979
1896
  id: mediaId,
1980
- duration: loopDisplay?.durationInFrames ?? duration,
1981
- from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
1982
- parent: loopDisplay ? sequenceId : parentSequence?.id ?? null,
1897
+ duration,
1898
+ from: 0,
1899
+ parent: parentSequence?.id ?? null,
1983
1900
  displayName: finalDisplayName,
1984
1901
  rootId,
1985
1902
  volume: volumes,
@@ -1987,17 +1904,14 @@ var useMediaInTimeline = ({
1987
1904
  nonce: nonce.get(),
1988
1905
  startMediaFrom: 0 - startsAt + (trimBefore ?? 0),
1989
1906
  doesVolumeChange,
1990
- loopDisplay: undefined,
1907
+ loopDisplay,
1991
1908
  playbackRate,
1992
1909
  stack,
1993
- premountDisplay: null,
1994
- postmountDisplay: null,
1910
+ premountDisplay,
1911
+ postmountDisplay,
1995
1912
  controls: controls ?? null
1996
1913
  });
1997
1914
  return () => {
1998
- if (loopDisplay) {
1999
- unregisterSequence(sequenceId);
2000
- }
2001
1915
  unregisterSequence(mediaId);
2002
1916
  };
2003
1917
  }, [
@@ -2016,14 +1930,12 @@ var useMediaInTimeline = ({
2016
1930
  premountDisplay,
2017
1931
  registerSequence,
2018
1932
  rootId,
2019
- sequenceId,
2020
1933
  showInTimeline,
2021
1934
  src,
2022
1935
  stack,
2023
1936
  startsAt,
2024
1937
  unregisterSequence,
2025
1938
  volumes,
2026
- frame,
2027
1939
  trimBefore
2028
1940
  ]);
2029
1941
  return {
@@ -2044,7 +1956,7 @@ var {
2044
1956
  warnAboutTooHighVolume,
2045
1957
  usePreload,
2046
1958
  SequenceContext
2047
- } = Internals10;
1959
+ } = Internals8;
2048
1960
  var AudioForPreviewAssertedShowing = ({
2049
1961
  src,
2050
1962
  playbackRate,
@@ -2062,20 +1974,19 @@ var AudioForPreviewAssertedShowing = ({
2062
1974
  toneFrequency,
2063
1975
  audioStreamIndex,
2064
1976
  fallbackHtml5AudioProps,
2065
- debugAudioScheduling,
2066
1977
  onError,
2067
1978
  credentials,
2068
1979
  controls
2069
1980
  }) => {
2070
1981
  const videoConfig = useUnsafeVideoConfig();
2071
- const frame = useCurrentFrame2();
1982
+ const frame = useCurrentFrame();
2072
1983
  const mediaPlayerRef = useRef(null);
2073
1984
  const initialTrimBeforeRef = useRef(trimBefore);
2074
1985
  const initialTrimAfterRef = useRef(trimAfter);
2075
1986
  const [mediaPlayerReady, setMediaPlayerReady] = useState2(false);
2076
1987
  const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState2(false);
2077
1988
  const [playing] = Timeline.usePlayingState();
2078
- const timelineContext = Internals10.useTimelineContext();
1989
+ const timelineContext = Internals8.useTimelineContext();
2079
1990
  const globalPlaybackRate = timelineContext.playbackRate;
2080
1991
  const sharedAudioContext = useContext3(SharedAudioContext);
2081
1992
  const buffer = useBufferState();
@@ -2126,12 +2037,12 @@ var AudioForPreviewAssertedShowing = ({
2126
2037
  trimBefore,
2127
2038
  controls
2128
2039
  });
2129
- const bufferingContext = useContext3(Internals10.BufferingContextReact);
2040
+ const bufferingContext = useContext3(Internals8.BufferingContextReact);
2130
2041
  if (!bufferingContext) {
2131
2042
  throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
2132
2043
  }
2133
2044
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
2134
- const isPlayerBuffering = Internals10.useIsPlayerBuffering(bufferingContext);
2045
+ const isPlayerBuffering = Internals8.useIsPlayerBuffering(bufferingContext);
2135
2046
  const initialPlaying = useRef(playing && !isPlayerBuffering);
2136
2047
  const initialIsPremounting = useRef(isPremounting);
2137
2048
  const initialIsPostmounting = useRef(isPostmounting);
@@ -2156,13 +2067,11 @@ var AudioForPreviewAssertedShowing = ({
2156
2067
  fps: videoConfig.fps,
2157
2068
  sequenceOffset,
2158
2069
  loop,
2159
- debugAudioScheduling,
2160
2070
  durationInFrames: videoConfig.durationInFrames,
2161
2071
  isPremounting,
2162
2072
  isPostmounting,
2163
2073
  currentTime,
2164
2074
  logLevel,
2165
- sharedAudioContext,
2166
2075
  label: "AudioForPreview"
2167
2076
  });
2168
2077
  useEffect2(() => {
@@ -2170,12 +2079,27 @@ var AudioForPreviewAssertedShowing = ({
2170
2079
  return;
2171
2080
  if (!sharedAudioContext.audioContext)
2172
2081
  return;
2173
- const { audioContext, audioSyncAnchor, scheduleAudioNode } = sharedAudioContext;
2082
+ const {
2083
+ audioContext,
2084
+ gainNode,
2085
+ audioSyncAnchor,
2086
+ scheduleAudioNode,
2087
+ unscheduleAudioNode
2088
+ } = sharedAudioContext;
2089
+ if (!gainNode) {
2090
+ return;
2091
+ }
2174
2092
  try {
2175
2093
  const player = new MediaPlayer({
2176
2094
  src: preloadedSrc,
2177
2095
  logLevel,
2178
- sharedAudioContext: { audioContext, audioSyncAnchor, scheduleAudioNode },
2096
+ sharedAudioContext: {
2097
+ audioContext,
2098
+ gainNode,
2099
+ audioSyncAnchor,
2100
+ scheduleAudioNode,
2101
+ unscheduleAudioNode
2102
+ },
2179
2103
  loop,
2180
2104
  trimAfter: initialTrimAfterRef.current,
2181
2105
  trimBefore: initialTrimBeforeRef.current,
@@ -2184,7 +2108,6 @@ var AudioForPreviewAssertedShowing = ({
2184
2108
  playbackRate: initialPlaybackRate.current,
2185
2109
  audioStreamIndex: audioStreamIndex ?? 0,
2186
2110
  debugOverlay: false,
2187
- debugAudioScheduling,
2188
2111
  bufferState: buffer,
2189
2112
  isPostmounting: initialIsPostmounting.current,
2190
2113
  isPremounting: initialIsPremounting.current,
@@ -2193,7 +2116,8 @@ var AudioForPreviewAssertedShowing = ({
2193
2116
  onVideoFrameCallback: null,
2194
2117
  playing: initialPlaying.current,
2195
2118
  sequenceOffset: initialSequenceOffset.current,
2196
- credentials
2119
+ credentials,
2120
+ tagType: "audio"
2197
2121
  });
2198
2122
  mediaPlayerRef.current = player;
2199
2123
  player.initialize(currentTimeRef.current, initialMuted.current).then((result) => {
@@ -2211,7 +2135,7 @@ var AudioForPreviewAssertedShowing = ({
2211
2135
  if (action === "fail") {
2212
2136
  throw errorToUse;
2213
2137
  } else {
2214
- Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
2138
+ Internals8.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
2215
2139
  setShouldFallbackToNativeAudio(true);
2216
2140
  }
2217
2141
  };
@@ -2234,7 +2158,7 @@ var AudioForPreviewAssertedShowing = ({
2234
2158
  if (result.type === "success") {
2235
2159
  setMediaPlayerReady(true);
2236
2160
  setMediaDurationInSeconds(result.durationInSeconds);
2237
- Internals10.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] MediaPlayer initialized successfully`);
2161
+ Internals8.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] MediaPlayer initialized successfully`);
2238
2162
  }
2239
2163
  }).catch((error) => {
2240
2164
  const [action, errorToUse] = callOnErrorAndResolve({
@@ -2247,7 +2171,7 @@ var AudioForPreviewAssertedShowing = ({
2247
2171
  if (action === "fail") {
2248
2172
  throw errorToUse;
2249
2173
  } else {
2250
- Internals10.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] Failed to initialize MediaPlayer", error);
2174
+ Internals8.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] Failed to initialize MediaPlayer", error);
2251
2175
  setShouldFallbackToNativeAudio(true);
2252
2176
  }
2253
2177
  });
@@ -2262,12 +2186,12 @@ var AudioForPreviewAssertedShowing = ({
2262
2186
  if (action === "fail") {
2263
2187
  throw errorToUse;
2264
2188
  }
2265
- Internals10.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] MediaPlayer initialization failed", errorToUse);
2189
+ Internals8.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] MediaPlayer initialization failed", errorToUse);
2266
2190
  setShouldFallbackToNativeAudio(true);
2267
2191
  }
2268
2192
  return () => {
2269
2193
  if (mediaPlayerRef.current) {
2270
- Internals10.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Disposing MediaPlayer`);
2194
+ Internals8.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Disposing MediaPlayer`);
2271
2195
  mediaPlayerRef.current.dispose();
2272
2196
  mediaPlayerRef.current = null;
2273
2197
  }
@@ -2282,7 +2206,6 @@ var AudioForPreviewAssertedShowing = ({
2282
2206
  videoConfig.fps,
2283
2207
  audioStreamIndex,
2284
2208
  disallowFallbackToHtml5Audio,
2285
- debugAudioScheduling,
2286
2209
  buffer,
2287
2210
  onError,
2288
2211
  credentials
@@ -2326,14 +2249,13 @@ var AudioForPreview = ({
2326
2249
  toneFrequency,
2327
2250
  audioStreamIndex,
2328
2251
  fallbackHtml5AudioProps,
2329
- debugAudioScheduling,
2330
2252
  onError,
2331
2253
  credentials,
2332
2254
  controls
2333
2255
  }) => {
2334
2256
  const preloadedSrc = usePreload(src);
2335
- const defaultLogLevel = Internals10.useLogLevel();
2336
- const frame = useCurrentFrame2();
2257
+ const defaultLogLevel = Internals8.useLogLevel();
2258
+ const frame = useCurrentFrame();
2337
2259
  const videoConfig = useVideoConfig2();
2338
2260
  const currentTime = frame / videoConfig.fps;
2339
2261
  const showShow = useMemo2(() => {
@@ -2376,7 +2298,6 @@ var AudioForPreview = ({
2376
2298
  stack,
2377
2299
  disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false,
2378
2300
  toneFrequency,
2379
- debugAudioScheduling: debugAudioScheduling ?? false,
2380
2301
  onError,
2381
2302
  credentials,
2382
2303
  fallbackHtml5AudioProps,
@@ -2389,22 +2310,22 @@ import { useContext as useContext4, useLayoutEffect as useLayoutEffect2, useMemo
2389
2310
  import {
2390
2311
  cancelRender as cancelRender2,
2391
2312
  Html5Audio,
2392
- Internals as Internals18,
2313
+ Internals as Internals16,
2393
2314
  random,
2394
- useCurrentFrame as useCurrentFrame3,
2315
+ useCurrentFrame as useCurrentFrame2,
2395
2316
  useDelayRender,
2396
2317
  useRemotionEnvironment
2397
2318
  } from "remotion";
2398
2319
 
2399
2320
  // src/caches.ts
2400
2321
  import React2 from "react";
2401
- import { cancelRender, Internals as Internals15 } from "remotion";
2322
+ import { cancelRender, Internals as Internals13 } from "remotion";
2402
2323
 
2403
2324
  // src/audio-extraction/audio-manager.ts
2404
- import { Internals as Internals12 } from "remotion";
2325
+ import { Internals as Internals10 } from "remotion";
2405
2326
 
2406
2327
  // src/audio-extraction/audio-iterator.ts
2407
- import { Internals as Internals11 } from "remotion";
2328
+ import { Internals as Internals9 } from "remotion";
2408
2329
 
2409
2330
  // src/audio-extraction/audio-cache.ts
2410
2331
  var makeAudioCache = () => {
@@ -2483,7 +2404,7 @@ var warnAboutMatroskaOnce = (src, logLevel) => {
2483
2404
  return;
2484
2405
  }
2485
2406
  warned[src] = true;
2486
- Internals11.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
2407
+ Internals9.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
2487
2408
  };
2488
2409
  var makeAudioIterator2 = ({
2489
2410
  audioSampleSink,
@@ -2551,7 +2472,7 @@ var makeAudioIterator2 = ({
2551
2472
  if (openTimestamps.length > 0) {
2552
2473
  const first = openTimestamps[0];
2553
2474
  const last = openTimestamps[openTimestamps.length - 1];
2554
- Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
2475
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
2555
2476
  }
2556
2477
  };
2557
2478
  const getCacheStats = () => {
@@ -2648,7 +2569,7 @@ var makeAudioManager = () => {
2648
2569
  if (seenKeys.has(key)) {
2649
2570
  iterator.prepareForDeletion();
2650
2571
  iterators.splice(iterators.indexOf(iterator), 1);
2651
- Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
2572
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
2652
2573
  }
2653
2574
  seenKeys.add(key);
2654
2575
  }
@@ -2669,7 +2590,7 @@ var makeAudioManager = () => {
2669
2590
  attempts++;
2670
2591
  }
2671
2592
  if ((await getTotalCacheStats()).totalSize > maxCacheSize && attempts >= maxAttempts) {
2672
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio cache: Exceeded max cache size after ${maxAttempts} attempts. Still ${(await getTotalCacheStats()).totalSize} bytes used, target was ${maxCacheSize} bytes.`);
2593
+ Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio cache: Exceeded max cache size after ${maxAttempts} attempts. Still ${(await getTotalCacheStats()).totalSize} bytes used, target was ${maxCacheSize} bytes.`);
2673
2594
  }
2674
2595
  for (const iterator of iterators) {
2675
2596
  if (iterator.src === src && await iterator.waitForCompletion() && iterator.canSatisfyRequestedTime(timeInSeconds)) {
@@ -2738,7 +2659,7 @@ var makeAudioManager = () => {
2738
2659
  };
2739
2660
 
2740
2661
  // src/video-extraction/keyframe-manager.ts
2741
- import { Internals as Internals14 } from "remotion";
2662
+ import { Internals as Internals12 } from "remotion";
2742
2663
 
2743
2664
  // src/render-timestamp-range.ts
2744
2665
  var renderTimestampRange = (timestamps) => {
@@ -2752,7 +2673,7 @@ var renderTimestampRange = (timestamps) => {
2752
2673
  };
2753
2674
 
2754
2675
  // src/video-extraction/keyframe-bank.ts
2755
- import { Internals as Internals13 } from "remotion";
2676
+ import { Internals as Internals11 } from "remotion";
2756
2677
 
2757
2678
  // src/video-extraction/get-allocation-size.ts
2758
2679
  var getAllocationSize = (sample) => {
@@ -2815,7 +2736,7 @@ var makeKeyframeBank = async ({
2815
2736
  }
2816
2737
  }
2817
2738
  if (deletedTimestamps.length > 0) {
2818
- Internals13.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
2739
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
2819
2740
  }
2820
2741
  };
2821
2742
  const hasDecodedEnoughForTimestamp = (timestamp) => {
@@ -2838,7 +2759,7 @@ var makeKeyframeBank = async ({
2838
2759
  frameTimestamps.push(frame.timestamp);
2839
2760
  allocationSize += getAllocationSize(frame);
2840
2761
  lastUsed = Date.now();
2841
- Internals13.Log.trace({ logLevel, tag: "@remotion/media" }, `Added frame at ${frame.timestamp}sec to bank`);
2762
+ Internals11.Log.trace({ logLevel, tag: "@remotion/media" }, `Added frame at ${frame.timestamp}sec to bank`);
2842
2763
  };
2843
2764
  const ensureEnoughFramesForTimestamp = async (timestampInSeconds, logLevel, fps) => {
2844
2765
  while (!hasDecodedEnoughForTimestamp(timestampInSeconds)) {
@@ -2893,7 +2814,7 @@ var makeKeyframeBank = async ({
2893
2814
  throw new Error("No first frame found");
2894
2815
  }
2895
2816
  const startTimestampInSeconds = firstFrame.value.timestamp;
2896
- Internals13.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec`);
2817
+ Internals11.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec`);
2897
2818
  addFrame(firstFrame.value, parentLogLevel);
2898
2819
  const getRangeOfTimestamps = () => {
2899
2820
  if (frameTimestamps.length === 0) {
@@ -2911,7 +2832,7 @@ var makeKeyframeBank = async ({
2911
2832
  const prepareForDeletion = (logLevel, reason) => {
2912
2833
  const range = getRangeOfTimestamps();
2913
2834
  if (range) {
2914
- Internals13.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion (${reason}) of keyframe bank from ${range?.firstTimestamp}sec to ${range?.lastTimestamp}sec`);
2835
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion (${reason}) of keyframe bank from ${range?.firstTimestamp}sec to ${range?.lastTimestamp}sec`);
2915
2836
  }
2916
2837
  let framesDeleted = 0;
2917
2838
  for (const frameTimestamp of frameTimestamps.slice()) {
@@ -2984,10 +2905,10 @@ var makeKeyframeManager = () => {
2984
2905
  if (size === 0) {
2985
2906
  continue;
2986
2907
  }
2987
- Internals14.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
2908
+ Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
2988
2909
  }
2989
2910
  }
2990
- Internals14.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
2911
+ Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
2991
2912
  };
2992
2913
  const getCacheStats = () => {
2993
2914
  let count = 0;
@@ -3041,7 +2962,7 @@ var makeKeyframeManager = () => {
3041
2962
  const { framesDeleted } = mostInThePastBank.prepareForDeletion(logLevel, "deleted oldest keyframe bank to stay under max cache size");
3042
2963
  sources[mostInThePastSrc].splice(mostInThePastIndex, 1);
3043
2964
  if (range) {
3044
- Internals14.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${range?.firstTimestamp}sec to ${range?.lastTimestamp}sec to free up memory.`);
2965
+ Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${range?.firstTimestamp}sec to ${range?.lastTimestamp}sec to free up memory.`);
3045
2966
  }
3046
2967
  }
3047
2968
  return { finish: false };
@@ -3055,12 +2976,12 @@ var makeKeyframeManager = () => {
3055
2976
  if (finish) {
3056
2977
  break;
3057
2978
  }
3058
- Internals14.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
2979
+ Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
3059
2980
  cacheStats = getTotalCacheStats();
3060
2981
  attempts++;
3061
2982
  }
3062
2983
  if (cacheStats.totalSize > maxCacheSize && attempts >= maxAttempts) {
3063
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Exceeded max cache size after ${maxAttempts} attempts. Remaining cache size: ${(cacheStats.totalSize / 1024 / 1024).toFixed(1)} MB, target was ${(maxCacheSize / 1024 / 1024).toFixed(1)} MB.`);
2984
+ Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Exceeded max cache size after ${maxAttempts} attempts. Remaining cache size: ${(cacheStats.totalSize / 1024 / 1024).toFixed(1)} MB, target was ${(maxCacheSize / 1024 / 1024).toFixed(1)} MB.`);
3064
2985
  }
3065
2986
  };
3066
2987
  const clearKeyframeBanksBeforeTime = ({
@@ -3081,7 +3002,7 @@ var makeKeyframeManager = () => {
3081
3002
  }
3082
3003
  if (range.lastTimestamp < threshold) {
3083
3004
  bank.prepareForDeletion(logLevel, "cleared before threshold " + threshold);
3084
- Internals14.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${range.firstTimestamp}sec to ${range.lastTimestamp}sec`);
3005
+ Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${range.firstTimestamp}sec to ${range.lastTimestamp}sec`);
3085
3006
  const bankIndex = banks.indexOf(bank);
3086
3007
  delete sources[src][bankIndex];
3087
3008
  } else {
@@ -3103,7 +3024,7 @@ var makeKeyframeManager = () => {
3103
3024
  const existingBanks = sources[src] ?? [];
3104
3025
  const existingBank = existingBanks?.find((bank) => bank.canSatisfyTimestamp(timestamp));
3105
3026
  if (!existingBank) {
3106
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `Creating new keyframe bank for src ${src} at timestamp ${timestamp}`);
3027
+ Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, `Creating new keyframe bank for src ${src} at timestamp ${timestamp}`);
3107
3028
  const newKeyframeBank = await makeKeyframeBank({
3108
3029
  videoSampleSink,
3109
3030
  logLevel,
@@ -3114,10 +3035,10 @@ var makeKeyframeManager = () => {
3114
3035
  return newKeyframeBank;
3115
3036
  }
3116
3037
  if (existingBank.canSatisfyTimestamp(timestamp)) {
3117
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists and satisfies timestamp ${timestamp}`);
3038
+ Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists and satisfies timestamp ${timestamp}`);
3118
3039
  return existingBank;
3119
3040
  }
3120
- Internals14.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
3041
+ Internals12.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
3121
3042
  existingBank.prepareForDeletion(logLevel, "already existed but evicted");
3122
3043
  sources[src] = sources[src].filter((bank) => bank !== existingBank);
3123
3044
  const replacementKeybank = await makeKeyframeBank({
@@ -3208,20 +3129,20 @@ var getUncachedMaxCacheSize = (logLevel) => {
3208
3129
  if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
3209
3130
  cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
3210
3131
  }
3211
- Internals15.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
3132
+ Internals13.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
3212
3133
  return window.remotion_mediaCacheSizeInBytes;
3213
3134
  }
3214
3135
  if (typeof window !== "undefined" && window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
3215
3136
  const value = window.remotion_initialMemoryAvailable / 2;
3216
3137
  if (value < 500 * 1024 * 1024) {
3217
- Internals15.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
3138
+ Internals13.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
3218
3139
  return 500 * 1024 * 1024;
3219
3140
  }
3220
3141
  if (value > 20000 * 1024 * 1024) {
3221
- Internals15.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
3142
+ Internals13.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
3222
3143
  return 20000 * 1024 * 1024;
3223
3144
  }
3224
- Internals15.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
3145
+ Internals13.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
3225
3146
  return value;
3226
3147
  }
3227
3148
  return 1000 * 1000 * 1000;
@@ -3235,7 +3156,7 @@ var getMaxVideoCacheSize = (logLevel) => {
3235
3156
  return cachedMaxCacheSize;
3236
3157
  };
3237
3158
  var useMaxMediaCacheSize = (logLevel) => {
3238
- const context = React2.useContext(Internals15.MaxMediaCacheSizeContext);
3159
+ const context = React2.useContext(Internals13.MaxMediaCacheSizeContext);
3239
3160
  if (context === null) {
3240
3161
  return getMaxVideoCacheSize(logLevel);
3241
3162
  }
@@ -3489,7 +3410,7 @@ var combineAudioDataAndClosePrevious = (audioDataArray) => {
3489
3410
  };
3490
3411
 
3491
3412
  // src/get-sink.ts
3492
- import { Internals as Internals16 } from "remotion";
3413
+ import { Internals as Internals14 } from "remotion";
3493
3414
 
3494
3415
  // src/video-extraction/get-frames-since-keyframe.ts
3495
3416
  import {
@@ -3644,7 +3565,7 @@ var getSink = (src, logLevel, credentials) => {
3644
3565
  const cacheKey = credentials ? `${src}::${credentials}` : src;
3645
3566
  let promise = sinkPromises[cacheKey];
3646
3567
  if (!promise) {
3647
- Internals16.Log.verbose({
3568
+ Internals14.Log.verbose({
3648
3569
  logLevel,
3649
3570
  tag: "@remotion/media"
3650
3571
  }, `Sink for ${src} was not found, creating new sink`);
@@ -3785,7 +3706,7 @@ var extractAudio = (params) => {
3785
3706
  };
3786
3707
 
3787
3708
  // src/video-extraction/extract-frame.ts
3788
- import { Internals as Internals17 } from "remotion";
3709
+ import { Internals as Internals15 } from "remotion";
3789
3710
  var extractFrameInternal = async ({
3790
3711
  src,
3791
3712
  timeInSeconds: unloopedTimeInSeconds,
@@ -3867,7 +3788,7 @@ var extractFrameInternal = async ({
3867
3788
  durationInSeconds: await sink.getDuration()
3868
3789
  };
3869
3790
  } catch (err) {
3870
- Internals17.Log.info({ logLevel, tag: "@remotion/media" }, `Error decoding ${src} at time ${timeInSeconds}: ${err}`, err);
3791
+ Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Error decoding ${src} at time ${timeInSeconds}: ${err}`, err);
3871
3792
  return { type: "cannot-decode", durationInSeconds: mediaDurationInSeconds };
3872
3793
  }
3873
3794
  };
@@ -4271,13 +4192,13 @@ var AudioForRendering = ({
4271
4192
  onError,
4272
4193
  credentials
4273
4194
  }) => {
4274
- const defaultLogLevel = Internals18.useLogLevel();
4195
+ const defaultLogLevel = Internals16.useLogLevel();
4275
4196
  const logLevel = overriddenLogLevel ?? defaultLogLevel;
4276
- const frame = useCurrentFrame3();
4277
- const absoluteFrame = Internals18.useTimelinePosition();
4278
- const videoConfig = Internals18.useUnsafeVideoConfig();
4279
- const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals18.RenderAssetManager);
4280
- const startsAt = Internals18.useMediaStartsAt();
4197
+ const frame = useCurrentFrame2();
4198
+ const absoluteFrame = Internals16.useTimelinePosition();
4199
+ const videoConfig = Internals16.useUnsafeVideoConfig();
4200
+ const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals16.RenderAssetManager);
4201
+ const startsAt = Internals16.useMediaStartsAt();
4281
4202
  const environment = useRemotionEnvironment();
4282
4203
  if (!videoConfig) {
4283
4204
  throw new Error("No video config found");
@@ -4288,7 +4209,7 @@ var AudioForRendering = ({
4288
4209
  const { fps } = videoConfig;
4289
4210
  const { delayRender, continueRender } = useDelayRender();
4290
4211
  const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);
4291
- const sequenceContext = useContext4(Internals18.SequenceContext);
4212
+ const sequenceContext = useContext4(Internals16.SequenceContext);
4292
4213
  const id = useMemo3(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
4293
4214
  src,
4294
4215
  sequenceContext?.cumulatedFrom,
@@ -4296,7 +4217,7 @@ var AudioForRendering = ({
4296
4217
  sequenceContext?.durationInFrames
4297
4218
  ]);
4298
4219
  const maxCacheSize = useMaxMediaCacheSize(logLevel);
4299
- const audioEnabled = Internals18.useAudioEnabled();
4220
+ const audioEnabled = Internals16.useAudioEnabled();
4300
4221
  useLayoutEffect2(() => {
4301
4222
  const timestamp = frame / fps;
4302
4223
  const durationInSeconds = 1 / fps;
@@ -4347,7 +4268,7 @@ var AudioForRendering = ({
4347
4268
  if (action === "fail") {
4348
4269
  cancelRender2(errorToUse);
4349
4270
  }
4350
- Internals18.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
4271
+ Internals16.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
4351
4272
  setReplaceWithHtml5Audio(true);
4352
4273
  };
4353
4274
  if (result.type === "unknown-container-format") {
@@ -4374,12 +4295,12 @@ var AudioForRendering = ({
4374
4295
  frame,
4375
4296
  startsAt
4376
4297
  });
4377
- const volume = Internals18.evaluateVolume({
4298
+ const volume = Internals16.evaluateVolume({
4378
4299
  volume: volumeProp,
4379
4300
  frame: volumePropsFrame,
4380
4301
  mediaVolume: 1
4381
4302
  });
4382
- Internals18.warnAboutTooHighVolume(volume);
4303
+ Internals16.warnAboutTooHighVolume(volume);
4383
4304
  if (audio && volume > 0) {
4384
4305
  applyVolume(audio.data, volume);
4385
4306
  registerRenderAsset({
@@ -4456,7 +4377,7 @@ var AudioForRendering = ({
4456
4377
 
4457
4378
  // src/audio/audio.tsx
4458
4379
  import { jsx as jsx3 } from "react/jsx-runtime";
4459
- var { validateMediaProps } = Internals19;
4380
+ var { validateMediaProps } = Internals17;
4460
4381
  var audioSchema = {
4461
4382
  volume: {
4462
4383
  type: "number",
@@ -4505,11 +4426,11 @@ var AudioInner = (props) => {
4505
4426
  })
4506
4427
  });
4507
4428
  };
4508
- var Audio = Internals19.wrapInSchema(AudioInner, audioSchema);
4509
- Internals19.addSequenceStackTraces(Audio);
4429
+ var Audio = Internals17.wrapInSchema(AudioInner, audioSchema);
4430
+ Internals17.addSequenceStackTraces(Audio);
4510
4431
 
4511
4432
  // src/video/video.tsx
4512
- import { Internals as Internals23, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
4433
+ import { Internals as Internals21, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
4513
4434
 
4514
4435
  // src/video/video-for-preview.tsx
4515
4436
  import {
@@ -4522,9 +4443,9 @@ import {
4522
4443
  } from "react";
4523
4444
  import {
4524
4445
  Html5Video,
4525
- Internals as Internals21,
4446
+ Internals as Internals19,
4526
4447
  useBufferState as useBufferState2,
4527
- useCurrentFrame as useCurrentFrame4,
4448
+ useCurrentFrame as useCurrentFrame3,
4528
4449
  useVideoConfig as useVideoConfig3
4529
4450
  } from "remotion";
4530
4451
 
@@ -4552,7 +4473,7 @@ var getCachedVideoFrame = (src) => {
4552
4473
  };
4553
4474
 
4554
4475
  // src/video/warn-object-fit-css.ts
4555
- import { Internals as Internals20 } from "remotion";
4476
+ import { Internals as Internals18 } from "remotion";
4556
4477
  var OBJECT_FIT_CLASS_PATTERN = /\bobject-(contain|cover|fill|none|scale-down)\b/;
4557
4478
  var warnedStyle = false;
4558
4479
  var warnedClassName = false;
@@ -4563,11 +4484,11 @@ var warnAboutObjectFitInStyleOrClassName = ({
4563
4484
  }) => {
4564
4485
  if (!warnedStyle && style?.objectFit) {
4565
4486
  warnedStyle = true;
4566
- Internals20.Log.warn({ logLevel, tag: "@remotion/media" }, "Use the `objectFit` prop instead of the `style` prop.");
4487
+ Internals18.Log.warn({ logLevel, tag: "@remotion/media" }, "Use the `objectFit` prop instead of the `style` prop.");
4567
4488
  }
4568
4489
  if (!warnedClassName && className && OBJECT_FIT_CLASS_PATTERN.test(className)) {
4569
4490
  warnedClassName = true;
4570
- Internals20.Log.warn({ logLevel, tag: "@remotion/media" }, "Use the `objectFit` prop instead of `object-*` CSS class names.");
4491
+ Internals18.Log.warn({ logLevel, tag: "@remotion/media" }, "Use the `objectFit` prop instead of `object-*` CSS class names.");
4571
4492
  }
4572
4493
  };
4573
4494
 
@@ -4585,7 +4506,7 @@ var {
4585
4506
  usePreload: usePreload2,
4586
4507
  SequenceContext: SequenceContext2,
4587
4508
  SequenceVisibilityToggleContext
4588
- } = Internals21;
4509
+ } = Internals19;
4589
4510
  var VideoForPreviewAssertedShowing = ({
4590
4511
  src: unpreloadedSrc,
4591
4512
  style,
@@ -4606,7 +4527,6 @@ var VideoForPreviewAssertedShowing = ({
4606
4527
  fallbackOffthreadVideoProps,
4607
4528
  audioStreamIndex,
4608
4529
  debugOverlay,
4609
- debugAudioScheduling,
4610
4530
  headless,
4611
4531
  onError,
4612
4532
  credentials,
@@ -4617,7 +4537,7 @@ var VideoForPreviewAssertedShowing = ({
4617
4537
  const src = usePreload2(unpreloadedSrc);
4618
4538
  const canvasRef = useRef2(null);
4619
4539
  const videoConfig = useUnsafeVideoConfig2();
4620
- const frame = useCurrentFrame4();
4540
+ const frame = useCurrentFrame3();
4621
4541
  const mediaPlayerRef = useRef2(null);
4622
4542
  const initialTrimBeforeRef = useRef2(trimBefore);
4623
4543
  const initialTrimAfterRef = useRef2(trimAfter);
@@ -4625,7 +4545,7 @@ var VideoForPreviewAssertedShowing = ({
4625
4545
  const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);
4626
4546
  const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState4(false);
4627
4547
  const [playing] = Timeline2.usePlayingState();
4628
- const timelineContext = Internals21.useTimelineContext();
4548
+ const timelineContext = Internals19.useTimelineContext();
4629
4549
  const globalPlaybackRate = timelineContext.playbackRate;
4630
4550
  const sharedAudioContext = useContext5(SharedAudioContext2);
4631
4551
  const buffer = useBufferState2();
@@ -4675,19 +4595,19 @@ var VideoForPreviewAssertedShowing = ({
4675
4595
  const currentTimeRef = useRef2(currentTime);
4676
4596
  currentTimeRef.current = currentTime;
4677
4597
  const preloadedSrc = usePreload2(src);
4678
- const buffering = useContext5(Internals21.BufferingContextReact);
4598
+ const buffering = useContext5(Internals19.BufferingContextReact);
4679
4599
  if (!buffering) {
4680
4600
  throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
4681
4601
  }
4682
4602
  const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
4683
- const isPlayerBuffering = Internals21.useIsPlayerBuffering(buffering);
4603
+ const isPlayerBuffering = Internals19.useIsPlayerBuffering(buffering);
4684
4604
  const initialPlaying = useRef2(playing && !isPlayerBuffering);
4685
4605
  const initialIsPremounting = useRef2(isPremounting);
4686
4606
  const initialIsPostmounting = useRef2(isPostmounting);
4687
4607
  const initialGlobalPlaybackRate = useRef2(globalPlaybackRate);
4688
4608
  const initialPlaybackRate = useRef2(playbackRate);
4689
4609
  const initialMuted = useRef2(effectiveMuted);
4690
- const initialDurationInFrames = useRef2(videoConfig.durationInFrames);
4610
+ const initialSequenceDuration = useRef2(videoConfig.durationInFrames);
4691
4611
  const initialSequenceOffset = useRef2(sequenceOffset);
4692
4612
  const hasDrawnRealFrameRef = useRef2(false);
4693
4613
  const isPremountingRef = useRef2(isPremounting);
@@ -4732,13 +4652,28 @@ var VideoForPreviewAssertedShowing = ({
4732
4652
  return;
4733
4653
  if (!sharedAudioContext.audioContext)
4734
4654
  return;
4735
- const { audioContext, audioSyncAnchor, scheduleAudioNode } = sharedAudioContext;
4655
+ const {
4656
+ audioContext,
4657
+ gainNode,
4658
+ audioSyncAnchor,
4659
+ scheduleAudioNode,
4660
+ unscheduleAudioNode
4661
+ } = sharedAudioContext;
4662
+ if (!gainNode) {
4663
+ return;
4664
+ }
4736
4665
  try {
4737
4666
  const player = new MediaPlayer({
4738
4667
  canvas: canvasRef.current,
4739
4668
  src: preloadedSrc,
4740
4669
  logLevel,
4741
- sharedAudioContext: { audioContext, audioSyncAnchor, scheduleAudioNode },
4670
+ sharedAudioContext: {
4671
+ audioContext,
4672
+ gainNode,
4673
+ audioSyncAnchor,
4674
+ scheduleAudioNode,
4675
+ unscheduleAudioNode
4676
+ },
4742
4677
  loop,
4743
4678
  trimAfter: initialTrimAfterRef.current,
4744
4679
  trimBefore: initialTrimBeforeRef.current,
@@ -4746,16 +4681,16 @@ var VideoForPreviewAssertedShowing = ({
4746
4681
  playbackRate: initialPlaybackRate.current,
4747
4682
  audioStreamIndex,
4748
4683
  debugOverlay,
4749
- debugAudioScheduling,
4750
4684
  bufferState: buffer,
4751
4685
  isPremounting: initialIsPremounting.current,
4752
4686
  isPostmounting: initialIsPostmounting.current,
4753
4687
  globalPlaybackRate: initialGlobalPlaybackRate.current,
4754
- durationInFrames: initialDurationInFrames.current,
4688
+ durationInFrames: initialSequenceDuration.current,
4755
4689
  onVideoFrameCallback: initialOnVideoFrameRef.current ?? null,
4756
4690
  playing: initialPlaying.current,
4757
4691
  sequenceOffset: initialSequenceOffset.current,
4758
- credentials
4692
+ credentials,
4693
+ tagType: "video"
4759
4694
  });
4760
4695
  mediaPlayerRef.current = player;
4761
4696
  player.initialize(currentTimeRef.current, initialMuted.current).then((result) => {
@@ -4773,7 +4708,7 @@ var VideoForPreviewAssertedShowing = ({
4773
4708
  if (action === "fail") {
4774
4709
  throw errorToUse;
4775
4710
  }
4776
- Internals21.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
4711
+ Internals19.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
4777
4712
  setShouldFallbackToNativeVideo(true);
4778
4713
  };
4779
4714
  if (result.type === "unknown-container-format") {
@@ -4808,7 +4743,7 @@ var VideoForPreviewAssertedShowing = ({
4808
4743
  if (action === "fail") {
4809
4744
  throw errorToUse;
4810
4745
  }
4811
- Internals21.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", errorToUse);
4746
+ Internals19.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", errorToUse);
4812
4747
  setShouldFallbackToNativeVideo(true);
4813
4748
  });
4814
4749
  } catch (error) {
@@ -4822,12 +4757,12 @@ var VideoForPreviewAssertedShowing = ({
4822
4757
  if (action === "fail") {
4823
4758
  throw errorToUse;
4824
4759
  }
4825
- Internals21.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", errorToUse);
4760
+ Internals19.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", errorToUse);
4826
4761
  setShouldFallbackToNativeVideo(true);
4827
4762
  }
4828
4763
  return () => {
4829
4764
  if (mediaPlayerRef.current) {
4830
- Internals21.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
4765
+ Internals19.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
4831
4766
  mediaPlayerRef.current.dispose();
4832
4767
  mediaPlayerRef.current = null;
4833
4768
  }
@@ -4839,7 +4774,6 @@ var VideoForPreviewAssertedShowing = ({
4839
4774
  audioStreamIndex,
4840
4775
  buffer,
4841
4776
  debugOverlay,
4842
- debugAudioScheduling,
4843
4777
  disallowFallbackToOffthreadVideo,
4844
4778
  logLevel,
4845
4779
  loop,
@@ -4851,7 +4785,7 @@ var VideoForPreviewAssertedShowing = ({
4851
4785
  ]);
4852
4786
  warnAboutObjectFitInStyleOrClassName({ style, className, logLevel });
4853
4787
  const classNameValue = useMemo4(() => {
4854
- return [Internals21.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals21.truthy).join(" ");
4788
+ return [Internals19.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals19.truthy).join(" ");
4855
4789
  }, [className]);
4856
4790
  useCommonEffects({
4857
4791
  mediaPlayerRef,
@@ -4869,13 +4803,11 @@ var VideoForPreviewAssertedShowing = ({
4869
4803
  fps: videoConfig.fps,
4870
4804
  sequenceOffset,
4871
4805
  loop,
4872
- debugAudioScheduling,
4873
4806
  durationInFrames: videoConfig.durationInFrames,
4874
4807
  isPremounting,
4875
4808
  isPostmounting,
4876
4809
  currentTime,
4877
4810
  logLevel,
4878
- sharedAudioContext,
4879
4811
  label: "VideoForPreview"
4880
4812
  });
4881
4813
  useLayoutEffect3(() => {
@@ -4927,7 +4859,7 @@ var VideoForPreviewAssertedShowing = ({
4927
4859
  });
4928
4860
  };
4929
4861
  var VideoForPreview = (props) => {
4930
- const frame = useCurrentFrame4();
4862
+ const frame = useCurrentFrame3();
4931
4863
  const videoConfig = useVideoConfig3();
4932
4864
  const currentTime = frame / videoConfig.fps;
4933
4865
  const showShow = useMemo4(() => {
@@ -4969,10 +4901,10 @@ import {
4969
4901
  useState as useState5
4970
4902
  } from "react";
4971
4903
  import {
4972
- Internals as Internals22,
4904
+ Internals as Internals20,
4973
4905
  Loop,
4974
4906
  random as random2,
4975
- useCurrentFrame as useCurrentFrame5,
4907
+ useCurrentFrame as useCurrentFrame4,
4976
4908
  useDelayRender as useDelayRender2,
4977
4909
  useRemotionEnvironment as useRemotionEnvironment3,
4978
4910
  useVideoConfig as useVideoConfig4
@@ -5007,12 +4939,12 @@ var VideoForRendering = ({
5007
4939
  if (!src) {
5008
4940
  throw new TypeError("No `src` was passed to <Video>.");
5009
4941
  }
5010
- const frame = useCurrentFrame5();
5011
- const absoluteFrame = Internals22.useTimelinePosition();
4942
+ const frame = useCurrentFrame4();
4943
+ const absoluteFrame = Internals20.useTimelinePosition();
5012
4944
  const { fps } = useVideoConfig4();
5013
- const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals22.RenderAssetManager);
5014
- const startsAt = Internals22.useMediaStartsAt();
5015
- const sequenceContext = useContext6(Internals22.SequenceContext);
4945
+ const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals20.RenderAssetManager);
4946
+ const startsAt = Internals20.useMediaStartsAt();
4947
+ const sequenceContext = useContext6(Internals20.SequenceContext);
5016
4948
  const id = useMemo5(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
5017
4949
  src,
5018
4950
  sequenceContext?.cumulatedFrom,
@@ -5023,8 +4955,8 @@ var VideoForRendering = ({
5023
4955
  const { delayRender, continueRender, cancelRender: cancelRender3 } = useDelayRender2();
5024
4956
  const canvasRef = useRef3(null);
5025
4957
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
5026
- const audioEnabled = Internals22.useAudioEnabled();
5027
- const videoEnabled = Internals22.useVideoEnabled();
4958
+ const audioEnabled = Internals20.useAudioEnabled();
4959
+ const videoEnabled = Internals20.useVideoEnabled();
5028
4960
  const maxCacheSize = useMaxMediaCacheSize(logLevel);
5029
4961
  const [error, setError] = useState5(null);
5030
4962
  if (error) {
@@ -5089,7 +5021,7 @@ var VideoForRendering = ({
5089
5021
  return;
5090
5022
  }
5091
5023
  if (window.remotion_isMainTab) {
5092
- Internals22.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
5024
+ Internals20.Log.warn({ logLevel, tag: "@remotion/media" }, fallbackMessage);
5093
5025
  }
5094
5026
  setReplaceWithOffthreadVideo({
5095
5027
  durationInSeconds: mediaDurationInSeconds
@@ -5144,12 +5076,12 @@ var VideoForRendering = ({
5144
5076
  frame,
5145
5077
  startsAt
5146
5078
  });
5147
- const volume = Internals22.evaluateVolume({
5079
+ const volume = Internals20.evaluateVolume({
5148
5080
  volume: volumeProp,
5149
5081
  frame: volumePropsFrame,
5150
5082
  mediaVolume: 1
5151
5083
  });
5152
- Internals22.warnAboutTooHighVolume(volume);
5084
+ Internals20.warnAboutTooHighVolume(volume);
5153
5085
  if (audio && volume > 0) {
5154
5086
  applyVolume(audio.data, volume);
5155
5087
  registerRenderAsset({
@@ -5207,7 +5139,7 @@ var VideoForRendering = ({
5207
5139
  ]);
5208
5140
  warnAboutObjectFitInStyleOrClassName({ style, className, logLevel });
5209
5141
  const classNameValue = useMemo5(() => {
5210
- return [Internals22.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals22.truthy).join(" ");
5142
+ return [Internals20.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals20.truthy).join(" ");
5211
5143
  }, [className]);
5212
5144
  const styleWithObjectFit = useMemo5(() => {
5213
5145
  return {
@@ -5216,7 +5148,7 @@ var VideoForRendering = ({
5216
5148
  };
5217
5149
  }, [objectFitProp, style]);
5218
5150
  if (replaceWithOffthreadVideo) {
5219
- const fallback = /* @__PURE__ */ jsx5(Internals22.InnerOffthreadVideo, {
5151
+ const fallback = /* @__PURE__ */ jsx5(Internals20.InnerOffthreadVideo, {
5220
5152
  src,
5221
5153
  playbackRate: playbackRate ?? 1,
5222
5154
  muted: muted ?? false,
@@ -5256,7 +5188,7 @@ var VideoForRendering = ({
5256
5188
  }
5257
5189
  return /* @__PURE__ */ jsx5(Loop, {
5258
5190
  layout: "none",
5259
- durationInFrames: Internals22.calculateMediaDuration({
5191
+ durationInFrames: Internals20.calculateMediaDuration({
5260
5192
  trimAfter: trimAfterValue,
5261
5193
  mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
5262
5194
  playbackRate,
@@ -5279,7 +5211,7 @@ var VideoForRendering = ({
5279
5211
 
5280
5212
  // src/video/video.tsx
5281
5213
  import { jsx as jsx6 } from "react/jsx-runtime";
5282
- var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals23;
5214
+ var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals21;
5283
5215
  var videoSchema = {
5284
5216
  volume: {
5285
5217
  type: "number",
@@ -5349,7 +5281,6 @@ var InnerVideo = ({
5349
5281
  toneFrequency,
5350
5282
  showInTimeline,
5351
5283
  debugOverlay,
5352
- debugAudioScheduling,
5353
5284
  headless,
5354
5285
  onError,
5355
5286
  credentials,
@@ -5422,7 +5353,6 @@ var InnerVideo = ({
5422
5353
  disallowFallbackToOffthreadVideo,
5423
5354
  fallbackOffthreadVideoProps,
5424
5355
  debugOverlay: debugOverlay ?? false,
5425
- debugAudioScheduling: debugAudioScheduling ?? false,
5426
5356
  headless: headless ?? false,
5427
5357
  onError,
5428
5358
  credentials,
@@ -5454,7 +5384,6 @@ var VideoInner = ({
5454
5384
  stack,
5455
5385
  toneFrequency,
5456
5386
  debugOverlay,
5457
- debugAudioScheduling,
5458
5387
  headless,
5459
5388
  onError,
5460
5389
  credentials,
@@ -5464,7 +5393,7 @@ var VideoInner = ({
5464
5393
  from,
5465
5394
  durationInFrames
5466
5395
  }) => {
5467
- const fallbackLogLevel = Internals23.useLogLevel();
5396
+ const fallbackLogLevel = Internals21.useLogLevel();
5468
5397
  return /* @__PURE__ */ jsx6(Sequence2, {
5469
5398
  layout: "none",
5470
5399
  from: from ?? 0,
@@ -5493,7 +5422,6 @@ var VideoInner = ({
5493
5422
  toneFrequency: toneFrequency ?? 1,
5494
5423
  stack,
5495
5424
  debugOverlay: debugOverlay ?? false,
5496
- debugAudioScheduling: debugAudioScheduling ?? false,
5497
5425
  headless: headless ?? false,
5498
5426
  onError,
5499
5427
  credentials,
@@ -5503,8 +5431,8 @@ var VideoInner = ({
5503
5431
  })
5504
5432
  });
5505
5433
  };
5506
- var Video = Internals23.wrapInSchema(VideoInner, videoSchema);
5507
- Internals23.addSequenceStackTraces(Video);
5434
+ var Video = Internals21.wrapInSchema(VideoInner, videoSchema);
5435
+ Internals21.addSequenceStackTraces(Video);
5508
5436
 
5509
5437
  // src/index.ts
5510
5438
  var experimental_Audio = Audio;