@remotion/webcodecs 4.0.226 → 4.0.228

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,24 +1,175 @@
1
+ // src/log.ts
2
+ import { MediaParserInternals } from "@remotion/media-parser";
3
+ var { Log } = MediaParserInternals;
4
+
5
+ // src/with-resolvers.ts
6
+ var withResolvers = function() {
7
+ let resolve;
8
+ let reject;
9
+ const promise = new Promise((res, rej) => {
10
+ resolve = res;
11
+ reject = rej;
12
+ });
13
+ return { promise, resolve, reject };
14
+ };
15
+ var withResolversAndWaitForReturn = () => {
16
+ const { promise, reject, resolve } = withResolvers();
17
+ const { promise: returnPromise, resolve: resolveReturn } = withResolvers();
18
+ return {
19
+ getPromiseToImmediatelyReturn: () => {
20
+ resolveReturn(undefined);
21
+ return promise;
22
+ },
23
+ reject: (reason) => {
24
+ returnPromise.then(() => reject(reason));
25
+ },
26
+ resolve
27
+ };
28
+ };
29
+
30
+ // src/io-manager/event-emitter.ts
31
+ class IoEventEmitter {
32
+ listeners = {
33
+ input: [],
34
+ output: [],
35
+ processed: []
36
+ };
37
+ addEventListener(name, callback) {
38
+ this.listeners[name].push(callback);
39
+ }
40
+ removeEventListener(name, callback) {
41
+ this.listeners[name] = this.listeners[name].filter((l) => l !== callback);
42
+ }
43
+ dispatchEvent(dispatchName, context) {
44
+ this.listeners[dispatchName].forEach((callback) => {
45
+ callback({ detail: context });
46
+ });
47
+ }
48
+ }
49
+
50
+ // src/io-manager/io-synchronizer.ts
51
+ var makeIoSynchronizer = (logLevel, label) => {
52
+ const eventEmitter = new IoEventEmitter;
53
+ let lastInput = 0;
54
+ let lastInputKeyframe = 0;
55
+ let lastOutput = 0;
56
+ let inputsSinceLastOutput = 0;
57
+ let inputs = [];
58
+ let keyframes = [];
59
+ let unprocessed = 0;
60
+ const getUnprocessed = () => unprocessed;
61
+ const getUnemittedItems = () => {
62
+ inputs = inputs.filter((input) => input > lastOutput);
63
+ return inputs.length;
64
+ };
65
+ const getUnemittedKeyframes = () => {
66
+ keyframes = keyframes.filter((keyframe) => keyframe > lastOutput);
67
+ return keyframes.length;
68
+ };
69
+ const printState = (prefix) => {
70
+ Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last input keyframe = ${lastInputKeyframe} Last output = ${lastOutput} Inputs since last output = ${inputsSinceLastOutput}, Queue = ${getUnemittedItems()} (${getUnemittedKeyframes()} keyframes), Unprocessed = ${getUnprocessed()}`);
71
+ };
72
+ const inputItem = (timestamp, keyFrame) => {
73
+ lastInput = timestamp;
74
+ if (keyFrame) {
75
+ lastInputKeyframe = timestamp;
76
+ keyframes.push(timestamp);
77
+ }
78
+ inputsSinceLastOutput++;
79
+ inputs.push(timestamp);
80
+ eventEmitter.dispatchEvent("input", {
81
+ timestamp,
82
+ keyFrame
83
+ });
84
+ printState("Input item");
85
+ };
86
+ const onOutput = (timestamp) => {
87
+ lastOutput = timestamp;
88
+ inputsSinceLastOutput = 0;
89
+ eventEmitter.dispatchEvent("output", {
90
+ timestamp
91
+ });
92
+ unprocessed++;
93
+ printState("Got output");
94
+ };
95
+ const waitForOutput = () => {
96
+ const { promise, resolve } = withResolvers();
97
+ const on = () => {
98
+ eventEmitter.removeEventListener("output", on);
99
+ resolve();
100
+ };
101
+ eventEmitter.addEventListener("output", on);
102
+ return promise;
103
+ };
104
+ const waitForProcessed = () => {
105
+ const { promise, resolve } = withResolvers();
106
+ const on = () => {
107
+ eventEmitter.removeEventListener("processed", on);
108
+ resolve();
109
+ };
110
+ eventEmitter.addEventListener("processed", on);
111
+ return promise;
112
+ };
113
+ const waitFor = async ({
114
+ _unprocessed,
115
+ unemitted
116
+ }) => {
117
+ while (getUnemittedItems() > unemitted) {
118
+ await waitForOutput();
119
+ }
120
+ while (getUnprocessed() > _unprocessed) {
121
+ await waitForProcessed();
122
+ }
123
+ };
124
+ const waitForFinish = async () => {
125
+ await waitFor({ _unprocessed: 0, unemitted: 0 });
126
+ };
127
+ const onProcessed = () => {
128
+ eventEmitter.dispatchEvent("processed", {});
129
+ unprocessed--;
130
+ };
131
+ return {
132
+ inputItem,
133
+ onOutput,
134
+ waitFor,
135
+ waitForFinish,
136
+ onProcessed,
137
+ getUnprocessed
138
+ };
139
+ };
140
+
1
141
  // src/audio-decoder.ts
2
142
  var createAudioDecoder = ({
3
143
  onFrame,
4
144
  onError,
5
145
  signal,
6
- config
146
+ config,
147
+ logLevel
7
148
  }) => {
8
149
  if (signal.aborted) {
9
150
  throw new Error("Not creating audio decoder, already aborted");
10
151
  }
152
+ const ioSynchronizer = makeIoSynchronizer(logLevel, "Audio decoder");
11
153
  let outputQueue = Promise.resolve();
12
- let outputQueueSize = 0;
13
- let dequeueResolver = () => {
14
- };
15
154
  const audioDecoder = new AudioDecoder({
16
155
  output(inputFrame) {
17
- outputQueueSize++;
18
- outputQueue = outputQueue.then(() => onFrame(inputFrame)).then(() => {
19
- dequeueResolver();
20
- outputQueueSize--;
156
+ ioSynchronizer.onOutput(inputFrame.timestamp);
157
+ const abortHandler = () => {
158
+ inputFrame.close();
159
+ };
160
+ signal.addEventListener("abort", abortHandler, { once: true });
161
+ outputQueue = outputQueue.then(() => {
162
+ if (signal.aborted) {
163
+ return;
164
+ }
165
+ return onFrame(inputFrame);
166
+ }).then(() => {
167
+ ioSynchronizer.onProcessed();
168
+ signal.removeEventListener("abort", abortHandler);
21
169
  return Promise.resolve();
170
+ }).catch((err) => {
171
+ inputFrame.close();
172
+ onError(err);
22
173
  });
23
174
  },
24
175
  error(error) {
@@ -36,32 +187,15 @@ var createAudioDecoder = ({
36
187
  close();
37
188
  };
38
189
  signal.addEventListener("abort", onAbort);
39
- const getQueueSize = () => {
40
- return audioDecoder.decodeQueueSize + outputQueueSize;
41
- };
42
190
  audioDecoder.configure(config);
43
- const waitForDequeue = async () => {
44
- await new Promise((r) => {
45
- dequeueResolver = r;
46
- audioDecoder.addEventListener("dequeue", () => r(), {
47
- once: true
48
- });
49
- });
50
- };
51
- const waitForFinish = async () => {
52
- while (getQueueSize() > 0) {
53
- await waitForDequeue();
54
- }
55
- };
56
191
  const processSample = async (audioSample) => {
57
192
  if (audioDecoder.state === "closed") {
58
193
  return;
59
194
  }
60
- while (getQueueSize() > 10) {
61
- await waitForDequeue();
62
- }
195
+ await ioSynchronizer.waitFor({ unemitted: 100, _unprocessed: 2 });
63
196
  const chunk = new EncodedAudioChunk(audioSample);
64
197
  audioDecoder.decode(chunk);
198
+ ioSynchronizer.inputItem(chunk.timestamp, audioSample.type === "key");
65
199
  };
66
200
  let queue = Promise.resolve();
67
201
  return {
@@ -71,11 +205,10 @@ var createAudioDecoder = ({
71
205
  },
72
206
  waitForFinish: async () => {
73
207
  await audioDecoder.flush();
74
- await waitForFinish();
208
+ await ioSynchronizer.waitForFinish();
75
209
  await outputQueue;
76
210
  },
77
211
  close,
78
- getQueueSize,
79
212
  flush: async () => {
80
213
  await audioDecoder.flush();
81
214
  }
@@ -87,22 +220,27 @@ var createAudioEncoder = ({
87
220
  onError,
88
221
  codec,
89
222
  signal,
90
- config: audioEncoderConfig
223
+ config: audioEncoderConfig,
224
+ logLevel
91
225
  }) => {
92
226
  if (signal.aborted) {
93
227
  throw new Error("Not creating audio encoder, already aborted");
94
228
  }
229
+ const ioSynchronizer = makeIoSynchronizer(logLevel, "Audio encoder");
95
230
  let prom = Promise.resolve();
96
- let outputQueue = 0;
97
- let dequeueResolver = () => {
98
- };
99
231
  const encoder = new AudioEncoder({
100
232
  output: (chunk) => {
101
- outputQueue++;
102
- prom = prom.then(() => onChunk(chunk)).then(() => {
103
- outputQueue--;
104
- dequeueResolver();
233
+ ioSynchronizer.onOutput(chunk.timestamp);
234
+ prom = prom.then(() => {
235
+ if (signal.aborted) {
236
+ return;
237
+ }
238
+ return onChunk(chunk);
239
+ }).then(() => {
240
+ ioSynchronizer.onProcessed();
105
241
  return Promise.resolve();
242
+ }).catch((err) => {
243
+ onError(err);
106
244
  });
107
245
  },
108
246
  error(error) {
@@ -123,34 +261,17 @@ var createAudioEncoder = ({
123
261
  if (codec !== "opus") {
124
262
  throw new Error('Only `codec: "opus"` is supported currently');
125
263
  }
126
- const getQueueSize = () => {
127
- return encoder.encodeQueueSize + outputQueue;
128
- };
129
264
  encoder.configure(audioEncoderConfig);
130
- const waitForDequeue = async () => {
131
- await new Promise((r) => {
132
- dequeueResolver = r;
133
- encoder.addEventListener("dequeue", () => r(), {
134
- once: true
135
- });
136
- });
137
- };
138
- const waitForFinish = async () => {
139
- while (getQueueSize() > 0) {
140
- await waitForDequeue();
141
- }
142
- };
143
265
  const encodeFrame = async (audioData) => {
144
266
  if (encoder.state === "closed") {
145
267
  return;
146
268
  }
147
- while (getQueueSize() > 10) {
148
- await waitForDequeue();
149
- }
269
+ await ioSynchronizer.waitFor({ unemitted: 2, _unprocessed: 2 });
150
270
  if (encoder.state === "closed") {
151
271
  return;
152
272
  }
153
273
  encoder.encode(audioData);
274
+ ioSynchronizer.inputItem(audioData.timestamp, true);
154
275
  };
155
276
  let queue = Promise.resolve();
156
277
  return {
@@ -160,11 +281,10 @@ var createAudioEncoder = ({
160
281
  },
161
282
  waitForFinish: async () => {
162
283
  await encoder.flush();
163
- await waitForFinish();
284
+ await ioSynchronizer.waitForFinish();
164
285
  await prom;
165
286
  },
166
287
  close,
167
- getQueueSize,
168
288
  flush: async () => {
169
289
  await encoder.flush();
170
290
  }
@@ -172,11 +292,37 @@ var createAudioEncoder = ({
172
292
  };
173
293
  // src/convert-media.ts
174
294
  import {
175
- MediaParserInternals,
295
+ MediaParserInternals as MediaParserInternals2,
176
296
  parseMedia
177
297
  } from "@remotion/media-parser";
298
+
299
+ // src/auto-select-writer.ts
178
300
  import { bufferWriter } from "@remotion/media-parser/buffer";
179
301
  import { canUseWebFsWriter, webFsWriter } from "@remotion/media-parser/web-fs";
302
+ var autoSelectWriter = async (writer, logLevel) => {
303
+ if (writer) {
304
+ Log.verbose(logLevel, "Using writer provided by user");
305
+ return writer;
306
+ }
307
+ Log.verbose(logLevel, "Determining best writer");
308
+ if (await canUseWebFsWriter()) {
309
+ Log.verbose(logLevel, "Using WebFS writer because it is supported");
310
+ return webFsWriter;
311
+ }
312
+ Log.verbose(logLevel, "Using buffer writer because WebFS writer is not supported");
313
+ return bufferWriter;
314
+ };
315
+
316
+ // src/calculate-progress.ts
317
+ var calculateProgress = ({
318
+ millisecondsWritten,
319
+ expectedOutputMilliseconds
320
+ }) => {
321
+ if (expectedOutputMilliseconds === null) {
322
+ return null;
323
+ }
324
+ return millisecondsWritten / expectedOutputMilliseconds;
325
+ };
180
326
 
181
327
  // src/error-cause.ts
182
328
  var error_cause_default = Error;
@@ -247,7 +393,8 @@ var makeAudioTrackHandler = ({
247
393
  abortConversion,
248
394
  onMediaStateUpdate,
249
395
  onAudioTrack,
250
- bitrate
396
+ bitrate,
397
+ logLevel
251
398
  }) => async (track) => {
252
399
  const audioEncoderConfig = await getAudioEncoderConfig({
253
400
  codec: audioCodec,
@@ -313,7 +460,8 @@ var makeAudioTrackHandler = ({
313
460
  },
314
461
  codec: audioCodec,
315
462
  signal: controller.signal,
316
- config: audioEncoderConfig
463
+ config: audioEncoderConfig,
464
+ logLevel
317
465
  });
318
466
  const audioDecoder = createAudioDecoder({
319
467
  onFrame: async (frame) => {
@@ -328,7 +476,8 @@ var makeAudioTrackHandler = ({
328
476
  }));
329
477
  },
330
478
  signal: controller.signal,
331
- config: audioDecoderConfig
479
+ config: audioDecoderConfig,
480
+ logLevel
332
481
  });
333
482
  state.addWaitForFinishPromise(async () => {
334
483
  await audioDecoder.waitForFinish();
@@ -384,19 +533,30 @@ var createVideoDecoder = ({
384
533
  onFrame,
385
534
  onError,
386
535
  signal,
387
- config
536
+ config,
537
+ logLevel
388
538
  }) => {
539
+ const ioSynchronizer = makeIoSynchronizer(logLevel, "Video decoder");
389
540
  let outputQueue = Promise.resolve();
390
- let outputQueueSize = 0;
391
- let dequeueResolver = () => {
392
- };
393
541
  const videoDecoder = new VideoDecoder({
394
542
  output(inputFrame) {
395
- outputQueueSize++;
396
- outputQueue = outputQueue.then(() => onFrame(inputFrame)).then(() => {
397
- outputQueueSize--;
398
- dequeueResolver();
543
+ ioSynchronizer.onOutput(inputFrame.timestamp);
544
+ const abortHandler = () => {
545
+ inputFrame.close();
546
+ };
547
+ signal.addEventListener("abort", abortHandler, { once: true });
548
+ outputQueue = outputQueue.then(() => {
549
+ if (signal.aborted) {
550
+ return;
551
+ }
552
+ return onFrame(inputFrame);
553
+ }).then(() => {
554
+ ioSynchronizer.onProcessed();
555
+ signal.removeEventListener("abort", abortHandler);
399
556
  return Promise.resolve();
557
+ }).catch((err) => {
558
+ inputFrame.close();
559
+ onError(err);
400
560
  });
401
561
  },
402
562
  error(error) {
@@ -414,37 +574,20 @@ var createVideoDecoder = ({
414
574
  close();
415
575
  };
416
576
  signal.addEventListener("abort", onAbort);
417
- const getQueueSize = () => {
418
- return videoDecoder.decodeQueueSize + outputQueueSize;
419
- };
420
577
  videoDecoder.configure(config);
421
- const waitForDequeue = async () => {
422
- await new Promise((r) => {
423
- dequeueResolver = r;
424
- videoDecoder.addEventListener("dequeue", () => r(), {
425
- once: true
426
- });
427
- });
428
- };
429
- const waitForFinish = async () => {
430
- while (getQueueSize() > 0) {
431
- await waitForDequeue();
432
- }
433
- };
434
578
  const processSample = async (sample) => {
435
579
  if (videoDecoder.state === "closed") {
436
580
  return;
437
581
  }
438
- while (getQueueSize() > 10) {
439
- await waitForDequeue();
440
- }
441
582
  if (videoDecoder.state === "closed") {
442
583
  return;
443
584
  }
585
+ await ioSynchronizer.waitFor({ unemitted: 20, _unprocessed: 2 });
444
586
  if (sample.type === "key") {
445
587
  await videoDecoder.flush();
446
588
  }
447
589
  videoDecoder.decode(new EncodedVideoChunk(sample));
590
+ ioSynchronizer.inputItem(sample.timestamp, sample.type === "key");
448
591
  };
449
592
  let inputQueue = Promise.resolve();
450
593
  return {
@@ -454,12 +597,11 @@ var createVideoDecoder = ({
454
597
  },
455
598
  waitForFinish: async () => {
456
599
  await videoDecoder.flush();
457
- await waitForFinish();
600
+ await ioSynchronizer.waitForFinish();
458
601
  await outputQueue;
459
602
  await inputQueue;
460
603
  },
461
604
  close,
462
- getQueueSize,
463
605
  flush: async () => {
464
606
  await videoDecoder.flush();
465
607
  }
@@ -493,25 +635,34 @@ var createVideoEncoder = ({
493
635
  onChunk,
494
636
  onError,
495
637
  signal,
496
- config
638
+ config,
639
+ logLevel
497
640
  }) => {
498
641
  if (signal.aborted) {
499
642
  throw new Error("Not creating video encoder, already aborted");
500
643
  }
644
+ const ioSynchronizer = makeIoSynchronizer(logLevel, "Video encoder");
501
645
  let outputQueue = Promise.resolve();
502
- let outputQueueSize = 0;
503
- let dequeueResolver = () => {
504
- };
505
646
  const encoder = new VideoEncoder({
506
647
  error(error) {
507
648
  onError(error);
508
649
  },
509
650
  output(chunk) {
510
- outputQueueSize++;
511
- outputQueue = outputQueue.then(() => onChunk(chunk)).then(() => {
512
- outputQueueSize--;
513
- dequeueResolver();
651
+ if (chunk.duration === null) {
652
+ throw new Error("Duration is null");
653
+ }
654
+ const timestamp = chunk.timestamp + chunk.duration;
655
+ ioSynchronizer.onOutput(timestamp);
656
+ outputQueue = outputQueue.then(() => {
657
+ if (signal.aborted) {
658
+ return;
659
+ }
660
+ return onChunk(chunk);
661
+ }).then(() => {
662
+ ioSynchronizer.onProcessed();
514
663
  return Promise.resolve();
664
+ }).catch((err) => {
665
+ onError(err);
515
666
  });
516
667
  }
517
668
  });
@@ -526,37 +677,24 @@ var createVideoEncoder = ({
526
677
  close();
527
678
  };
528
679
  signal.addEventListener("abort", onAbort);
529
- const getQueueSize = () => {
530
- return encoder.encodeQueueSize + outputQueueSize;
531
- };
532
680
  encoder.configure(config);
533
681
  let framesProcessed = 0;
534
- const waitForDequeue = async () => {
535
- await new Promise((r) => {
536
- dequeueResolver = r;
537
- encoder.addEventListener("dequeue", () => r(), {
538
- once: true
539
- });
540
- });
541
- };
542
- const waitForFinish = async () => {
543
- while (getQueueSize() > 0) {
544
- await waitForDequeue();
545
- }
546
- };
547
682
  const encodeFrame = async (frame) => {
548
683
  if (encoder.state === "closed") {
549
684
  return;
550
685
  }
551
- while (getQueueSize() > 10) {
552
- await waitForDequeue();
553
- }
686
+ await ioSynchronizer.waitFor({
687
+ unemitted: 2,
688
+ _unprocessed: 2
689
+ });
554
690
  if (encoder.state === "closed") {
555
691
  return;
556
692
  }
693
+ const keyFrame = framesProcessed % 40 === 0;
557
694
  encoder.encode(frame, {
558
- keyFrame: framesProcessed % 40 === 0
695
+ keyFrame
559
696
  });
697
+ ioSynchronizer.inputItem(frame.timestamp, keyFrame);
560
698
  framesProcessed++;
561
699
  };
562
700
  let inputQueue = Promise.resolve();
@@ -568,10 +706,9 @@ var createVideoEncoder = ({
568
706
  waitForFinish: async () => {
569
707
  await encoder.flush();
570
708
  await outputQueue;
571
- await waitForFinish();
709
+ await ioSynchronizer.waitForFinish();
572
710
  },
573
711
  close,
574
- getQueueSize,
575
712
  flush: async () => {
576
713
  await encoder.flush();
577
714
  }
@@ -609,8 +746,12 @@ var makeVideoTrackHandler = ({
609
746
  convertMediaState,
610
747
  controller,
611
748
  videoCodec,
612
- onVideoTrack
749
+ onVideoTrack,
750
+ logLevel
613
751
  }) => async (track) => {
752
+ if (controller.signal.aborted) {
753
+ throw new error_cause_default("Aborted");
754
+ }
614
755
  const videoEncoderConfig = await getVideoEncoderConfig({
615
756
  codec: videoCodec === "vp9" ? "vp09.00.10.08" : videoCodec,
616
757
  height: track.displayAspectHeight,
@@ -636,8 +777,8 @@ var makeVideoTrackHandler = ({
636
777
  codec: track.codecWithoutConfig,
637
778
  codecPrivate: track.codecPrivate
638
779
  });
639
- return (sample) => {
640
- state.addSample(new EncodedVideoChunk(sample), videoTrack.trackNumber, true);
780
+ return async (sample) => {
781
+ await state.addSample(new EncodedVideoChunk(sample), videoTrack.trackNumber, true);
641
782
  convertMediaState.decodedVideoFrames++;
642
783
  onMediaStateUpdate?.({ ...convertMediaState });
643
784
  };
@@ -670,7 +811,8 @@ var makeVideoTrackHandler = ({
670
811
  }));
671
812
  },
672
813
  signal: controller.signal,
673
- config: videoEncoderConfig
814
+ config: videoEncoderConfig,
815
+ logLevel
674
816
  });
675
817
  const videoDecoder = createVideoDecoder({
676
818
  config: videoDecoderConfig,
@@ -686,7 +828,8 @@ var makeVideoTrackHandler = ({
686
828
  cause: err
687
829
  }));
688
830
  },
689
- signal: controller.signal
831
+ signal: controller.signal,
832
+ logLevel
690
833
  });
691
834
  state.addWaitForFinishPromise(async () => {
692
835
  await videoDecoder.waitForFinish();
@@ -699,29 +842,26 @@ var makeVideoTrackHandler = ({
699
842
  };
700
843
  };
701
844
 
702
- // src/with-resolvers.ts
703
- var withResolvers = function() {
704
- let resolve;
705
- let reject;
706
- const promise = new Promise((res, rej) => {
707
- resolve = res;
708
- reject = rej;
709
- });
710
- return { promise, resolve, reject };
711
- };
712
-
713
845
  // src/convert-media.ts
714
- var convertMedia = async ({
846
+ var convertMedia = async function({
715
847
  src,
716
848
  onVideoFrame,
717
- onMediaStateUpdate,
849
+ onMediaStateUpdate: onMediaStateDoNoCallDirectly,
718
850
  audioCodec,
719
851
  to,
720
852
  videoCodec,
721
853
  signal: userPassedAbortSignal,
722
854
  onAudioTrack: userAudioResolver,
723
- onVideoTrack: userVideoResolver
724
- }) => {
855
+ onVideoTrack: userVideoResolver,
856
+ reader,
857
+ fields,
858
+ logLevel = "info",
859
+ writer,
860
+ ...more
861
+ }) {
862
+ if (userPassedAbortSignal?.aborted) {
863
+ return Promise.reject(new error_cause_default("Aborted"));
864
+ }
725
865
  if (to !== "webm") {
726
866
  return Promise.reject(new TypeError('Only `to: "webm"` is supported currently'));
727
867
  }
@@ -731,7 +871,7 @@ var convertMedia = async ({
731
871
  if (videoCodec !== "vp8" && videoCodec !== "vp9") {
732
872
  return Promise.reject(new TypeError('Only `videoCodec: "vp8"` and `videoCodec: "vp9"` are supported currently'));
733
873
  }
734
- const { promise, resolve, reject } = withResolvers();
874
+ const { resolve, reject, getPromiseToImmediatelyReturn } = withResolversAndWaitForReturn();
735
875
  const controller = new AbortController;
736
876
  const abortConversion = (errCause) => {
737
877
  reject(errCause);
@@ -747,10 +887,35 @@ var convertMedia = async ({
747
887
  decodedAudioFrames: 0,
748
888
  decodedVideoFrames: 0,
749
889
  encodedVideoFrames: 0,
750
- encodedAudioFrames: 0
890
+ encodedAudioFrames: 0,
891
+ bytesWritten: 0,
892
+ millisecondsWritten: 0,
893
+ expectedOutputMilliseconds: null,
894
+ overallProgress: 0
895
+ };
896
+ const onMediaStateUpdate = (newState) => {
897
+ if (controller.signal.aborted) {
898
+ return;
899
+ }
900
+ onMediaStateDoNoCallDirectly?.(newState);
751
901
  };
752
- const canUseWebFs = await canUseWebFsWriter();
753
- const state = await MediaParserInternals.createMedia(canUseWebFs ? webFsWriter : bufferWriter);
902
+ const state = await MediaParserInternals2.createMedia({
903
+ writer: await autoSelectWriter(writer, logLevel),
904
+ onBytesProgress: (bytesWritten) => {
905
+ convertMediaState.bytesWritten = bytesWritten;
906
+ onMediaStateUpdate?.(convertMediaState);
907
+ },
908
+ onMillisecondsProgress: (millisecondsWritten) => {
909
+ if (millisecondsWritten > convertMediaState.millisecondsWritten) {
910
+ convertMediaState.millisecondsWritten = millisecondsWritten;
911
+ convertMediaState.overallProgress = calculateProgress({
912
+ millisecondsWritten: convertMediaState.millisecondsWritten,
913
+ expectedOutputMilliseconds: convertMediaState.expectedOutputMilliseconds
914
+ });
915
+ onMediaStateUpdate?.(convertMediaState);
916
+ }
917
+ }
918
+ });
754
919
  const onVideoTrack = makeVideoTrackHandler({
755
920
  state,
756
921
  onVideoFrame: onVideoFrame ?? null,
@@ -759,7 +924,8 @@ var convertMedia = async ({
759
924
  convertMediaState,
760
925
  controller,
761
926
  videoCodec,
762
- onVideoTrack: userVideoResolver ?? defaultResolveVideoAction
927
+ onVideoTrack: userVideoResolver ?? defaultResolveVideoAction,
928
+ logLevel
763
929
  });
764
930
  const onAudioTrack = makeAudioTrackHandler({
765
931
  abortConversion,
@@ -769,23 +935,46 @@ var convertMedia = async ({
769
935
  onMediaStateUpdate: onMediaStateUpdate ?? null,
770
936
  state,
771
937
  onAudioTrack: userAudioResolver ?? defaultResolveAudioAction,
772
- bitrate: 128000
938
+ bitrate: 128000,
939
+ logLevel
773
940
  });
774
941
  parseMedia({
775
942
  src,
776
943
  onVideoTrack,
777
944
  onAudioTrack,
778
- signal: controller.signal
945
+ signal: controller.signal,
946
+ fields: {
947
+ ...fields,
948
+ durationInSeconds: true
949
+ },
950
+ reader,
951
+ ...more,
952
+ onDurationInSeconds: (durationInSeconds) => {
953
+ if (durationInSeconds === null) {
954
+ return null;
955
+ }
956
+ const casted = more;
957
+ if (casted.onDurationInSeconds) {
958
+ casted.onDurationInSeconds(durationInSeconds);
959
+ }
960
+ const expectedOutputMilliseconds = durationInSeconds * 1000;
961
+ convertMediaState.expectedOutputMilliseconds = expectedOutputMilliseconds;
962
+ convertMediaState.overallProgress = calculateProgress({
963
+ millisecondsWritten: convertMediaState.millisecondsWritten,
964
+ expectedOutputMilliseconds
965
+ });
966
+ onMediaStateUpdate(convertMediaState);
967
+ }
779
968
  }).then(() => {
780
969
  return state.waitForFinish();
781
970
  }).then(() => {
782
971
  resolve({ save: state.save, remove: state.remove });
783
972
  }).catch((err) => {
784
973
  reject(err);
785
- }).finally(() => {
974
+ });
975
+ return getPromiseToImmediatelyReturn().finally(() => {
786
976
  userPassedAbortSignal?.removeEventListener("abort", onUserAbort);
787
977
  });
788
- return promise;
789
978
  };
790
979
  export {
791
980
  createVideoEncoder,