mediabunny 1.3.2 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/dist/bundles/mediabunny.cjs +545 -276
- package/dist/bundles/mediabunny.min.cjs +4 -4
- package/dist/bundles/mediabunny.min.mjs +4 -4
- package/dist/bundles/mediabunny.mjs +545 -276
- package/dist/mediabunny.d.ts +9 -0
- package/dist/modules/codec-data.js +2 -2
- package/dist/modules/codec.js +2 -2
- package/dist/modules/conversion.js +7 -7
- package/dist/modules/index.js +14 -14
- package/dist/modules/input-format.js +11 -11
- package/dist/modules/input-track.js +5 -5
- package/dist/modules/input.js +4 -4
- package/dist/modules/isobmff/isobmff-boxes.js +5 -5
- package/dist/modules/isobmff/isobmff-demuxer.js +9 -9
- package/dist/modules/isobmff/isobmff-muxer.d.ts.map +1 -1
- package/dist/modules/isobmff/isobmff-muxer.js +22 -15
- package/dist/modules/matroska/matroska-demuxer.js +9 -9
- package/dist/modules/matroska/matroska-muxer.js +8 -8
- package/dist/modules/media-sink.js +7 -7
- package/dist/modules/media-source.d.ts +4 -0
- package/dist/modules/media-source.d.ts.map +1 -1
- package/dist/modules/media-source.js +455 -207
- package/dist/modules/mp3/mp3-demuxer.js +6 -6
- package/dist/modules/mp3/mp3-muxer.js +4 -4
- package/dist/modules/mp3/mp3-reader.js +2 -2
- package/dist/modules/mp3/mp3-writer.js +1 -1
- package/dist/modules/muxer.js +1 -1
- package/dist/modules/ogg/ogg-demuxer.js +9 -9
- package/dist/modules/ogg/ogg-misc.js +2 -2
- package/dist/modules/ogg/ogg-muxer.js +6 -6
- package/dist/modules/ogg/ogg-reader.js +1 -1
- package/dist/modules/output-format.js +6 -6
- package/dist/modules/output.d.ts.map +1 -1
- package/dist/modules/output.js +6 -7
- package/dist/modules/packet.js +1 -1
- package/dist/modules/reader.js +1 -1
- package/dist/modules/sample.d.ts +5 -0
- package/dist/modules/sample.d.ts.map +1 -1
- package/dist/modules/sample.js +38 -1
- package/dist/modules/source.js +1 -1
- package/dist/modules/target.js +1 -1
- package/dist/modules/wave/wave-demuxer.js +6 -6
- package/dist/modules/wave/wave-muxer.js +5 -5
- package/dist/modules/writer.js +1 -1
- package/package.json +2 -2
- package/src/isobmff/isobmff-muxer.ts +13 -6
- package/src/media-source.ts +546 -236
- package/src/output.ts +2 -3
- package/src/sample.ts +52 -0
|
@@ -4433,6 +4433,106 @@ var Mediabunny = (() => {
|
|
|
4433
4433
|
return string;
|
|
4434
4434
|
};
|
|
4435
4435
|
|
|
4436
|
+
// src/isobmff/isobmff-reader.ts
|
|
4437
|
+
var MIN_BOX_HEADER_SIZE = 8;
|
|
4438
|
+
var MAX_BOX_HEADER_SIZE = 16;
|
|
4439
|
+
var IsobmffReader = class {
|
|
4440
|
+
constructor(reader) {
|
|
4441
|
+
this.reader = reader;
|
|
4442
|
+
this.pos = 0;
|
|
4443
|
+
}
|
|
4444
|
+
readBytes(length) {
|
|
4445
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + length);
|
|
4446
|
+
this.pos += length;
|
|
4447
|
+
return new Uint8Array(view2.buffer, offset, length);
|
|
4448
|
+
}
|
|
4449
|
+
readU8() {
|
|
4450
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 1);
|
|
4451
|
+
this.pos++;
|
|
4452
|
+
return view2.getUint8(offset);
|
|
4453
|
+
}
|
|
4454
|
+
readU16() {
|
|
4455
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 2);
|
|
4456
|
+
this.pos += 2;
|
|
4457
|
+
return view2.getUint16(offset, false);
|
|
4458
|
+
}
|
|
4459
|
+
readI16() {
|
|
4460
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 2);
|
|
4461
|
+
this.pos += 2;
|
|
4462
|
+
return view2.getInt16(offset, false);
|
|
4463
|
+
}
|
|
4464
|
+
readU24() {
|
|
4465
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 3);
|
|
4466
|
+
this.pos += 3;
|
|
4467
|
+
const high = view2.getUint16(offset, false);
|
|
4468
|
+
const low = view2.getUint8(offset + 2);
|
|
4469
|
+
return high * 256 + low;
|
|
4470
|
+
}
|
|
4471
|
+
readU32() {
|
|
4472
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 4);
|
|
4473
|
+
this.pos += 4;
|
|
4474
|
+
return view2.getUint32(offset, false);
|
|
4475
|
+
}
|
|
4476
|
+
readI32() {
|
|
4477
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 4);
|
|
4478
|
+
this.pos += 4;
|
|
4479
|
+
return view2.getInt32(offset, false);
|
|
4480
|
+
}
|
|
4481
|
+
readU64() {
|
|
4482
|
+
const high = this.readU32();
|
|
4483
|
+
const low = this.readU32();
|
|
4484
|
+
return high * 4294967296 + low;
|
|
4485
|
+
}
|
|
4486
|
+
readI64() {
|
|
4487
|
+
const high = this.readI32();
|
|
4488
|
+
const low = this.readU32();
|
|
4489
|
+
return high * 4294967296 + low;
|
|
4490
|
+
}
|
|
4491
|
+
readF64() {
|
|
4492
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 8);
|
|
4493
|
+
this.pos += 8;
|
|
4494
|
+
return view2.getFloat64(offset, false);
|
|
4495
|
+
}
|
|
4496
|
+
readFixed_16_16() {
|
|
4497
|
+
return this.readI32() / 65536;
|
|
4498
|
+
}
|
|
4499
|
+
readFixed_2_30() {
|
|
4500
|
+
return this.readI32() / 1073741824;
|
|
4501
|
+
}
|
|
4502
|
+
readAscii(length) {
|
|
4503
|
+
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + length);
|
|
4504
|
+
this.pos += length;
|
|
4505
|
+
let str = "";
|
|
4506
|
+
for (let i = 0; i < length; i++) {
|
|
4507
|
+
str += String.fromCharCode(view2.getUint8(offset + i));
|
|
4508
|
+
}
|
|
4509
|
+
return str;
|
|
4510
|
+
}
|
|
4511
|
+
readIsomVariableInteger() {
|
|
4512
|
+
let result = 0;
|
|
4513
|
+
for (let i = 0; i < 4; i++) {
|
|
4514
|
+
result <<= 7;
|
|
4515
|
+
const nextByte = this.readU8();
|
|
4516
|
+
result |= nextByte & 127;
|
|
4517
|
+
if ((nextByte & 128) === 0) {
|
|
4518
|
+
break;
|
|
4519
|
+
}
|
|
4520
|
+
}
|
|
4521
|
+
return result;
|
|
4522
|
+
}
|
|
4523
|
+
readBoxHeader() {
|
|
4524
|
+
let totalSize = this.readU32();
|
|
4525
|
+
const name = this.readAscii(4);
|
|
4526
|
+
let headerSize = 8;
|
|
4527
|
+
const hasLargeSize = totalSize === 1;
|
|
4528
|
+
if (hasLargeSize) {
|
|
4529
|
+
totalSize = this.readU64();
|
|
4530
|
+
headerSize = 16;
|
|
4531
|
+
}
|
|
4532
|
+
return { name, totalSize, headerSize, contentSize: totalSize - headerSize };
|
|
4533
|
+
}
|
|
4534
|
+
};
|
|
4535
|
+
|
|
4436
4536
|
// src/isobmff/isobmff-muxer.ts
|
|
4437
4537
|
var GLOBAL_TIMESCALE = 1e3;
|
|
4438
4538
|
var TIMESTAMP_OFFSET = 2082844800;
|
|
@@ -5067,8 +5167,7 @@ var Mediabunny = (() => {
|
|
|
5067
5167
|
const moofBox = moof(fragmentNumber, tracksInFragment);
|
|
5068
5168
|
const moofOffset = this.writer.getPos();
|
|
5069
5169
|
const mdatStartPos = moofOffset + this.boxWriter.measureBox(moofBox);
|
|
5070
|
-
|
|
5071
|
-
let currentPos = mdatStartPos + mdatHeaderSize;
|
|
5170
|
+
let currentPos = mdatStartPos + MIN_BOX_HEADER_SIZE;
|
|
5072
5171
|
let fragmentStartTimestamp = Infinity;
|
|
5073
5172
|
for (const trackData of tracksInFragment) {
|
|
5074
5173
|
trackData.currentChunk.offset = currentPos;
|
|
@@ -5079,6 +5178,12 @@ var Mediabunny = (() => {
|
|
|
5079
5178
|
fragmentStartTimestamp = Math.min(fragmentStartTimestamp, trackData.currentChunk.startTimestamp);
|
|
5080
5179
|
}
|
|
5081
5180
|
const mdatSize = currentPos - mdatStartPos;
|
|
5181
|
+
const needsLargeMdatSize = mdatSize >= 2 ** 32;
|
|
5182
|
+
if (needsLargeMdatSize) {
|
|
5183
|
+
for (const trackData of tracksInFragment) {
|
|
5184
|
+
trackData.currentChunk.offset += MAX_BOX_HEADER_SIZE - MIN_BOX_HEADER_SIZE;
|
|
5185
|
+
}
|
|
5186
|
+
}
|
|
5082
5187
|
if (this.format._options.onMoof) {
|
|
5083
5188
|
this.writer.startTrackingWrites();
|
|
5084
5189
|
}
|
|
@@ -5092,10 +5197,10 @@ var Mediabunny = (() => {
|
|
|
5092
5197
|
if (this.format._options.onMdat) {
|
|
5093
5198
|
this.writer.startTrackingWrites();
|
|
5094
5199
|
}
|
|
5095
|
-
const mdatBox = mdat(
|
|
5200
|
+
const mdatBox = mdat(needsLargeMdatSize);
|
|
5096
5201
|
mdatBox.size = mdatSize;
|
|
5097
5202
|
this.boxWriter.writeBox(mdatBox);
|
|
5098
|
-
this.writer.seek(mdatStartPos +
|
|
5203
|
+
this.writer.seek(mdatStartPos + (needsLargeMdatSize ? MAX_BOX_HEADER_SIZE : MIN_BOX_HEADER_SIZE));
|
|
5099
5204
|
for (const trackData of tracksInFragment) {
|
|
5100
5205
|
for (const sample of trackData.currentChunk.samples) {
|
|
5101
5206
|
this.writer.write(sample.data);
|
|
@@ -7981,6 +8086,46 @@ ${cue.notes ?? ""}`;
|
|
|
7981
8086
|
}
|
|
7982
8087
|
this.timestamp = newTimestamp;
|
|
7983
8088
|
}
|
|
8089
|
+
/**
|
|
8090
|
+
* Creates AudioSamples from an AudioBuffer, starting at the given timestamp in seconds. Typically creates exactly
|
|
8091
|
+
* one sample, but may create multiple if the AudioBuffer is exceedingly large.
|
|
8092
|
+
*/
|
|
8093
|
+
static fromAudioBuffer(audioBuffer, timestamp) {
|
|
8094
|
+
if (!(audioBuffer instanceof AudioBuffer)) {
|
|
8095
|
+
throw new TypeError("audioBuffer must be an AudioBuffer.");
|
|
8096
|
+
}
|
|
8097
|
+
const MAX_FLOAT_COUNT = 64 * 1024 * 1024;
|
|
8098
|
+
const numberOfChannels = audioBuffer.numberOfChannels;
|
|
8099
|
+
const sampleRate = audioBuffer.sampleRate;
|
|
8100
|
+
const totalFrames = audioBuffer.length;
|
|
8101
|
+
const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);
|
|
8102
|
+
let currentRelativeFrame = 0;
|
|
8103
|
+
let remainingFrames = totalFrames;
|
|
8104
|
+
const result = [];
|
|
8105
|
+
while (remainingFrames > 0) {
|
|
8106
|
+
const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);
|
|
8107
|
+
const chunkData = new Float32Array(numberOfChannels * framesToCopy);
|
|
8108
|
+
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
8109
|
+
audioBuffer.copyFromChannel(
|
|
8110
|
+
chunkData.subarray(channel * framesToCopy, channel * framesToCopy + framesToCopy),
|
|
8111
|
+
channel,
|
|
8112
|
+
currentRelativeFrame
|
|
8113
|
+
);
|
|
8114
|
+
}
|
|
8115
|
+
const audioSample = new _AudioSample({
|
|
8116
|
+
format: "f32-planar",
|
|
8117
|
+
sampleRate,
|
|
8118
|
+
numberOfFrames: framesToCopy,
|
|
8119
|
+
numberOfChannels,
|
|
8120
|
+
timestamp: timestamp + currentRelativeFrame / sampleRate,
|
|
8121
|
+
data: chunkData
|
|
8122
|
+
});
|
|
8123
|
+
result.push(audioSample);
|
|
8124
|
+
currentRelativeFrame += framesToCopy;
|
|
8125
|
+
remainingFrames -= framesToCopy;
|
|
8126
|
+
}
|
|
8127
|
+
return result;
|
|
8128
|
+
}
|
|
7984
8129
|
};
|
|
7985
8130
|
var getBytesPerSample = (format) => {
|
|
7986
8131
|
switch (format) {
|
|
@@ -10386,7 +10531,7 @@ ${cue.notes ?? ""}`;
|
|
|
10386
10531
|
}
|
|
10387
10532
|
}
|
|
10388
10533
|
/** @internal */
|
|
10389
|
-
_start() {
|
|
10534
|
+
async _start() {
|
|
10390
10535
|
}
|
|
10391
10536
|
/** @internal */
|
|
10392
10537
|
async _flushAndClose() {
|
|
@@ -10516,60 +10661,66 @@ ${cue.notes ?? ""}`;
|
|
|
10516
10661
|
this.encoderError = null;
|
|
10517
10662
|
}
|
|
10518
10663
|
async add(videoSample, shouldClose, encodeOptions) {
|
|
10519
|
-
|
|
10520
|
-
|
|
10521
|
-
|
|
10522
|
-
if (
|
|
10523
|
-
|
|
10524
|
-
|
|
10525
|
-
|
|
10526
|
-
|
|
10527
|
-
|
|
10528
|
-
|
|
10529
|
-
|
|
10530
|
-
|
|
10531
|
-
if (!this.encoderInitialized) {
|
|
10532
|
-
if (!this.ensureEncoderPromise) {
|
|
10533
|
-
void this.ensureEncoder(videoSample);
|
|
10664
|
+
try {
|
|
10665
|
+
this.checkForEncoderError();
|
|
10666
|
+
this.source._ensureValidAdd();
|
|
10667
|
+
if (this.lastWidth !== null && this.lastHeight !== null) {
|
|
10668
|
+
if (videoSample.codedWidth !== this.lastWidth || videoSample.codedHeight !== this.lastHeight) {
|
|
10669
|
+
throw new Error(
|
|
10670
|
+
`Video sample size must remain constant. Expected ${this.lastWidth}x${this.lastHeight}, got ${videoSample.codedWidth}x${videoSample.codedHeight}.`
|
|
10671
|
+
);
|
|
10672
|
+
}
|
|
10673
|
+
} else {
|
|
10674
|
+
this.lastWidth = videoSample.codedWidth;
|
|
10675
|
+
this.lastHeight = videoSample.codedHeight;
|
|
10534
10676
|
}
|
|
10535
10677
|
if (!this.encoderInitialized) {
|
|
10536
|
-
|
|
10678
|
+
if (!this.ensureEncoderPromise) {
|
|
10679
|
+
void this.ensureEncoder(videoSample);
|
|
10680
|
+
}
|
|
10681
|
+
if (!this.encoderInitialized) {
|
|
10682
|
+
await this.ensureEncoderPromise;
|
|
10683
|
+
}
|
|
10537
10684
|
}
|
|
10538
|
-
|
|
10539
|
-
|
|
10540
|
-
|
|
10541
|
-
|
|
10542
|
-
|
|
10543
|
-
|
|
10544
|
-
|
|
10545
|
-
|
|
10546
|
-
|
|
10547
|
-
|
|
10548
|
-
|
|
10549
|
-
|
|
10550
|
-
|
|
10685
|
+
assert(this.encoderInitialized);
|
|
10686
|
+
const keyFrameInterval = this.encodingConfig.keyFrameInterval ?? 5;
|
|
10687
|
+
const multipleOfKeyFrameInterval = Math.floor(videoSample.timestamp / keyFrameInterval);
|
|
10688
|
+
const finalEncodeOptions = {
|
|
10689
|
+
...encodeOptions,
|
|
10690
|
+
keyFrame: encodeOptions?.keyFrame || keyFrameInterval === 0 || multipleOfKeyFrameInterval !== this.lastMultipleOfKeyFrameInterval
|
|
10691
|
+
};
|
|
10692
|
+
this.lastMultipleOfKeyFrameInterval = multipleOfKeyFrameInterval;
|
|
10693
|
+
if (this.customEncoder) {
|
|
10694
|
+
this.customEncoderQueueSize++;
|
|
10695
|
+
const promise = this.customEncoderCallSerializer.call(() => this.customEncoder.encode(videoSample, finalEncodeOptions)).then(() => {
|
|
10696
|
+
this.customEncoderQueueSize--;
|
|
10697
|
+
if (shouldClose) {
|
|
10698
|
+
videoSample.close();
|
|
10699
|
+
}
|
|
10700
|
+
}).catch((error) => {
|
|
10701
|
+
this.encoderError ??= error;
|
|
10702
|
+
});
|
|
10703
|
+
if (this.customEncoderQueueSize >= 4) {
|
|
10704
|
+
await promise;
|
|
10705
|
+
}
|
|
10706
|
+
} else {
|
|
10707
|
+
assert(this.encoder);
|
|
10708
|
+
const videoFrame = videoSample.toVideoFrame();
|
|
10709
|
+
this.encoder.encode(videoFrame, finalEncodeOptions);
|
|
10710
|
+
videoFrame.close();
|
|
10551
10711
|
if (shouldClose) {
|
|
10552
10712
|
videoSample.close();
|
|
10553
10713
|
}
|
|
10554
|
-
|
|
10555
|
-
|
|
10556
|
-
|
|
10557
|
-
if (this.customEncoderQueueSize >= 4) {
|
|
10558
|
-
await promise;
|
|
10714
|
+
if (this.encoder.encodeQueueSize >= 4) {
|
|
10715
|
+
await new Promise((resolve) => this.encoder.addEventListener("dequeue", resolve, { once: true }));
|
|
10716
|
+
}
|
|
10559
10717
|
}
|
|
10560
|
-
|
|
10561
|
-
|
|
10562
|
-
const videoFrame = videoSample.toVideoFrame();
|
|
10563
|
-
this.encoder.encode(videoFrame, finalEncodeOptions);
|
|
10564
|
-
videoFrame.close();
|
|
10718
|
+
await this.muxer.mutex.currentPromise;
|
|
10719
|
+
} finally {
|
|
10565
10720
|
if (shouldClose) {
|
|
10566
10721
|
videoSample.close();
|
|
10567
10722
|
}
|
|
10568
|
-
if (this.encoder.encodeQueueSize >= 4) {
|
|
10569
|
-
await new Promise((resolve) => this.encoder.addEventListener("dequeue", resolve, { once: true }));
|
|
10570
|
-
}
|
|
10571
10723
|
}
|
|
10572
|
-
await this.muxer.mutex.currentPromise;
|
|
10573
10724
|
}
|
|
10574
10725
|
async ensureEncoder(videoSample) {
|
|
10575
10726
|
if (this.encoder) {
|
|
@@ -10735,36 +10886,94 @@ ${cue.notes ?? ""}`;
|
|
|
10735
10886
|
super(encodingConfig.codec);
|
|
10736
10887
|
/** @internal */
|
|
10737
10888
|
this._abortController = null;
|
|
10889
|
+
/** @internal */
|
|
10890
|
+
this._workerTrackId = null;
|
|
10891
|
+
/** @internal */
|
|
10892
|
+
this._workerListener = null;
|
|
10893
|
+
/** @internal */
|
|
10894
|
+
this._promiseWithResolvers = promiseWithResolvers();
|
|
10895
|
+
/** @internal */
|
|
10896
|
+
this._errorPromiseAccessed = false;
|
|
10738
10897
|
this._encoder = new VideoEncoderWrapper(this, encodingConfig);
|
|
10739
10898
|
this._track = track;
|
|
10740
10899
|
}
|
|
10900
|
+
/** A promise that rejects upon any error within this source. This promise never resolves. */
|
|
10901
|
+
get errorPromise() {
|
|
10902
|
+
this._errorPromiseAccessed = true;
|
|
10903
|
+
return this._promiseWithResolvers.promise;
|
|
10904
|
+
}
|
|
10741
10905
|
/** @internal */
|
|
10742
|
-
_start() {
|
|
10906
|
+
async _start() {
|
|
10907
|
+
if (!this._errorPromiseAccessed) {
|
|
10908
|
+
console.warn(
|
|
10909
|
+
"Make sure not to ignore the `errorPromise` field on MediaStreamVideoTrackSource, so that any internal errors get bubbled up properly."
|
|
10910
|
+
);
|
|
10911
|
+
}
|
|
10743
10912
|
this._abortController = new AbortController();
|
|
10744
|
-
let
|
|
10745
|
-
|
|
10746
|
-
const
|
|
10747
|
-
|
|
10748
|
-
|
|
10749
|
-
|
|
10750
|
-
|
|
10751
|
-
|
|
10752
|
-
|
|
10753
|
-
|
|
10754
|
-
|
|
10913
|
+
let firstVideoFrameTimestamp = null;
|
|
10914
|
+
let errored = false;
|
|
10915
|
+
const onVideoFrame = (videoFrame) => {
|
|
10916
|
+
if (errored) {
|
|
10917
|
+
videoFrame.close();
|
|
10918
|
+
return;
|
|
10919
|
+
}
|
|
10920
|
+
if (firstVideoFrameTimestamp === null) {
|
|
10921
|
+
firstVideoFrameTimestamp = videoFrame.timestamp / 1e6;
|
|
10922
|
+
const muxer = this._connectedTrack.output._muxer;
|
|
10923
|
+
if (muxer.firstMediaStreamTimestamp === null) {
|
|
10924
|
+
muxer.firstMediaStreamTimestamp = performance.now() / 1e3;
|
|
10925
|
+
this._timestampOffset = -firstVideoFrameTimestamp;
|
|
10926
|
+
} else {
|
|
10927
|
+
this._timestampOffset = performance.now() / 1e3 - muxer.firstMediaStreamTimestamp - firstVideoFrameTimestamp;
|
|
10755
10928
|
}
|
|
10756
|
-
void this._encoder.add(new VideoSample(videoFrame), true).catch((error) => {
|
|
10757
|
-
this._abortController?.abort();
|
|
10758
|
-
throw error;
|
|
10759
|
-
});
|
|
10760
10929
|
}
|
|
10761
|
-
|
|
10762
|
-
|
|
10763
|
-
|
|
10764
|
-
|
|
10765
|
-
|
|
10766
|
-
|
|
10767
|
-
|
|
10930
|
+
if (this._encoder.getQueueSize() >= 4) {
|
|
10931
|
+
videoFrame.close();
|
|
10932
|
+
return;
|
|
10933
|
+
}
|
|
10934
|
+
void this._encoder.add(new VideoSample(videoFrame), true).catch((error) => {
|
|
10935
|
+
errored = true;
|
|
10936
|
+
this._abortController?.abort();
|
|
10937
|
+
this._promiseWithResolvers.reject(error);
|
|
10938
|
+
if (this._workerTrackId !== null) {
|
|
10939
|
+
sendMessageToMediaStreamTrackProcessorWorker({
|
|
10940
|
+
type: "stopTrack",
|
|
10941
|
+
trackId: this._workerTrackId
|
|
10942
|
+
});
|
|
10943
|
+
}
|
|
10944
|
+
});
|
|
10945
|
+
};
|
|
10946
|
+
if (typeof MediaStreamTrackProcessor !== "undefined") {
|
|
10947
|
+
const processor = new MediaStreamTrackProcessor({ track: this._track });
|
|
10948
|
+
const consumer = new WritableStream({ write: onVideoFrame });
|
|
10949
|
+
processor.readable.pipeTo(consumer, {
|
|
10950
|
+
signal: this._abortController.signal
|
|
10951
|
+
}).catch((error) => {
|
|
10952
|
+
if (error instanceof DOMException && error.name === "AbortError") return;
|
|
10953
|
+
this._promiseWithResolvers.reject(error);
|
|
10954
|
+
});
|
|
10955
|
+
} else {
|
|
10956
|
+
const supportedInWorker = await mediaStreamTrackProcessorIsSupportedInWorker();
|
|
10957
|
+
if (supportedInWorker) {
|
|
10958
|
+
this._workerTrackId = nextMediaStreamTrackProcessorWorkerId++;
|
|
10959
|
+
sendMessageToMediaStreamTrackProcessorWorker({
|
|
10960
|
+
type: "videoTrack",
|
|
10961
|
+
trackId: this._workerTrackId,
|
|
10962
|
+
track: this._track
|
|
10963
|
+
}, [this._track]);
|
|
10964
|
+
this._workerListener = (event) => {
|
|
10965
|
+
const message = event.data;
|
|
10966
|
+
if (message.type === "videoFrame" && message.trackId === this._workerTrackId) {
|
|
10967
|
+
onVideoFrame(message.videoFrame);
|
|
10968
|
+
} else if (message.type === "error" && message.trackId === this._workerTrackId) {
|
|
10969
|
+
this._promiseWithResolvers.reject(message.error);
|
|
10970
|
+
}
|
|
10971
|
+
};
|
|
10972
|
+
mediaStreamTrackProcessorWorker.addEventListener("message", this._workerListener);
|
|
10973
|
+
} else {
|
|
10974
|
+
throw new Error("MediaStreamTrackProcessor is required but not supported by this browser.");
|
|
10975
|
+
}
|
|
10976
|
+
}
|
|
10768
10977
|
}
|
|
10769
10978
|
/** @internal */
|
|
10770
10979
|
async _flushAndClose() {
|
|
@@ -10772,6 +10981,25 @@ ${cue.notes ?? ""}`;
|
|
|
10772
10981
|
this._abortController.abort();
|
|
10773
10982
|
this._abortController = null;
|
|
10774
10983
|
}
|
|
10984
|
+
if (this._workerTrackId !== null) {
|
|
10985
|
+
assert(this._workerListener);
|
|
10986
|
+
sendMessageToMediaStreamTrackProcessorWorker({
|
|
10987
|
+
type: "stopTrack",
|
|
10988
|
+
trackId: this._workerTrackId
|
|
10989
|
+
});
|
|
10990
|
+
await new Promise((resolve) => {
|
|
10991
|
+
const listener = (event) => {
|
|
10992
|
+
const message = event.data;
|
|
10993
|
+
if (message.type === "trackStopped" && message.trackId === this._workerTrackId) {
|
|
10994
|
+
assert(this._workerListener);
|
|
10995
|
+
mediaStreamTrackProcessorWorker.removeEventListener("message", this._workerListener);
|
|
10996
|
+
mediaStreamTrackProcessorWorker.removeEventListener("message", listener);
|
|
10997
|
+
resolve();
|
|
10998
|
+
}
|
|
10999
|
+
};
|
|
11000
|
+
mediaStreamTrackProcessorWorker.addEventListener("message", listener);
|
|
11001
|
+
});
|
|
11002
|
+
}
|
|
10775
11003
|
await this._encoder.flushAndClose();
|
|
10776
11004
|
}
|
|
10777
11005
|
};
|
|
@@ -10865,55 +11093,61 @@ ${cue.notes ?? ""}`;
|
|
|
10865
11093
|
this.encoderError = null;
|
|
10866
11094
|
}
|
|
10867
11095
|
async add(audioSample, shouldClose) {
|
|
10868
|
-
|
|
10869
|
-
|
|
10870
|
-
|
|
10871
|
-
if (
|
|
10872
|
-
|
|
10873
|
-
|
|
10874
|
-
|
|
10875
|
-
|
|
10876
|
-
|
|
10877
|
-
|
|
10878
|
-
|
|
10879
|
-
|
|
10880
|
-
if (!this.encoderInitialized) {
|
|
10881
|
-
if (!this.ensureEncoderPromise) {
|
|
10882
|
-
void this.ensureEncoder(audioSample);
|
|
11096
|
+
try {
|
|
11097
|
+
this.checkForEncoderError();
|
|
11098
|
+
this.source._ensureValidAdd();
|
|
11099
|
+
if (this.lastNumberOfChannels !== null && this.lastSampleRate !== null) {
|
|
11100
|
+
if (audioSample.numberOfChannels !== this.lastNumberOfChannels || audioSample.sampleRate !== this.lastSampleRate) {
|
|
11101
|
+
throw new Error(
|
|
11102
|
+
`Audio parameters must remain constant. Expected ${this.lastNumberOfChannels} channels at ${this.lastSampleRate} Hz, got ${audioSample.numberOfChannels} channels at ${audioSample.sampleRate} Hz.`
|
|
11103
|
+
);
|
|
11104
|
+
}
|
|
11105
|
+
} else {
|
|
11106
|
+
this.lastNumberOfChannels = audioSample.numberOfChannels;
|
|
11107
|
+
this.lastSampleRate = audioSample.sampleRate;
|
|
10883
11108
|
}
|
|
10884
11109
|
if (!this.encoderInitialized) {
|
|
10885
|
-
|
|
11110
|
+
if (!this.ensureEncoderPromise) {
|
|
11111
|
+
void this.ensureEncoder(audioSample);
|
|
11112
|
+
}
|
|
11113
|
+
if (!this.encoderInitialized) {
|
|
11114
|
+
await this.ensureEncoderPromise;
|
|
11115
|
+
}
|
|
10886
11116
|
}
|
|
10887
|
-
|
|
10888
|
-
|
|
10889
|
-
|
|
10890
|
-
|
|
10891
|
-
|
|
10892
|
-
|
|
11117
|
+
assert(this.encoderInitialized);
|
|
11118
|
+
if (this.customEncoder) {
|
|
11119
|
+
this.customEncoderQueueSize++;
|
|
11120
|
+
const promise = this.customEncoderCallSerializer.call(() => this.customEncoder.encode(audioSample)).then(() => {
|
|
11121
|
+
this.customEncoderQueueSize--;
|
|
11122
|
+
if (shouldClose) {
|
|
11123
|
+
audioSample.close();
|
|
11124
|
+
}
|
|
11125
|
+
}).catch((error) => {
|
|
11126
|
+
this.encoderError ??= error;
|
|
11127
|
+
});
|
|
11128
|
+
if (this.customEncoderQueueSize >= 4) {
|
|
11129
|
+
await promise;
|
|
11130
|
+
}
|
|
11131
|
+
await this.muxer.mutex.currentPromise;
|
|
11132
|
+
} else if (this.isPcmEncoder) {
|
|
11133
|
+
await this.doPcmEncoding(audioSample, shouldClose);
|
|
11134
|
+
} else {
|
|
11135
|
+
assert(this.encoder);
|
|
11136
|
+
const audioData = audioSample.toAudioData();
|
|
11137
|
+
this.encoder.encode(audioData);
|
|
11138
|
+
audioData.close();
|
|
10893
11139
|
if (shouldClose) {
|
|
10894
11140
|
audioSample.close();
|
|
10895
11141
|
}
|
|
10896
|
-
|
|
10897
|
-
|
|
10898
|
-
|
|
10899
|
-
|
|
10900
|
-
await promise;
|
|
11142
|
+
if (this.encoder.encodeQueueSize >= 4) {
|
|
11143
|
+
await new Promise((resolve) => this.encoder.addEventListener("dequeue", resolve, { once: true }));
|
|
11144
|
+
}
|
|
11145
|
+
await this.muxer.mutex.currentPromise;
|
|
10901
11146
|
}
|
|
10902
|
-
|
|
10903
|
-
} else if (this.isPcmEncoder) {
|
|
10904
|
-
await this.doPcmEncoding(audioSample, shouldClose);
|
|
10905
|
-
} else {
|
|
10906
|
-
assert(this.encoder);
|
|
10907
|
-
const audioData = audioSample.toAudioData();
|
|
10908
|
-
this.encoder.encode(audioData);
|
|
10909
|
-
audioData.close();
|
|
11147
|
+
} finally {
|
|
10910
11148
|
if (shouldClose) {
|
|
10911
11149
|
audioSample.close();
|
|
10912
11150
|
}
|
|
10913
|
-
if (this.encoder.encodeQueueSize >= 4) {
|
|
10914
|
-
await new Promise((resolve) => this.encoder.addEventListener("dequeue", resolve, { once: true }));
|
|
10915
|
-
}
|
|
10916
|
-
await this.muxer.mutex.currentPromise;
|
|
10917
11151
|
}
|
|
10918
11152
|
}
|
|
10919
11153
|
async doPcmEncoding(audioSample, shouldClose) {
|
|
@@ -11185,7 +11419,7 @@ ${cue.notes ?? ""}`;
|
|
|
11185
11419
|
validateAudioEncodingConfig(encodingConfig);
|
|
11186
11420
|
super(encodingConfig.codec);
|
|
11187
11421
|
/** @internal */
|
|
11188
|
-
this.
|
|
11422
|
+
this._accumulatedTime = 0;
|
|
11189
11423
|
this._encoder = new AudioEncoderWrapper(this, encodingConfig);
|
|
11190
11424
|
}
|
|
11191
11425
|
/**
|
|
@@ -11200,37 +11434,9 @@ ${cue.notes ?? ""}`;
|
|
|
11200
11434
|
if (!(audioBuffer instanceof AudioBuffer)) {
|
|
11201
11435
|
throw new TypeError("audioBuffer must be an AudioBuffer.");
|
|
11202
11436
|
}
|
|
11203
|
-
const
|
|
11204
|
-
const
|
|
11205
|
-
|
|
11206
|
-
const totalFrames = audioBuffer.length;
|
|
11207
|
-
const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);
|
|
11208
|
-
let currentRelativeFrame = 0;
|
|
11209
|
-
let remainingFrames = totalFrames;
|
|
11210
|
-
const promises = [];
|
|
11211
|
-
while (remainingFrames > 0) {
|
|
11212
|
-
const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);
|
|
11213
|
-
const chunkData = new Float32Array(numberOfChannels * framesToCopy);
|
|
11214
|
-
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
11215
|
-
audioBuffer.copyFromChannel(
|
|
11216
|
-
chunkData.subarray(channel * framesToCopy, channel * framesToCopy + framesToCopy),
|
|
11217
|
-
channel,
|
|
11218
|
-
currentRelativeFrame
|
|
11219
|
-
);
|
|
11220
|
-
}
|
|
11221
|
-
const audioSample = new AudioSample({
|
|
11222
|
-
format: "f32-planar",
|
|
11223
|
-
sampleRate,
|
|
11224
|
-
numberOfFrames: framesToCopy,
|
|
11225
|
-
numberOfChannels,
|
|
11226
|
-
timestamp: (this._accumulatedFrameCount + currentRelativeFrame) / sampleRate,
|
|
11227
|
-
data: chunkData
|
|
11228
|
-
});
|
|
11229
|
-
promises.push(this._encoder.add(audioSample, true));
|
|
11230
|
-
currentRelativeFrame += framesToCopy;
|
|
11231
|
-
remainingFrames -= framesToCopy;
|
|
11232
|
-
}
|
|
11233
|
-
this._accumulatedFrameCount += totalFrames;
|
|
11437
|
+
const audioSamples = AudioSample.fromAudioBuffer(audioBuffer, this._accumulatedTime);
|
|
11438
|
+
const promises = audioSamples.map((sample) => this._encoder.add(sample, true));
|
|
11439
|
+
this._accumulatedTime += audioBuffer.duration;
|
|
11234
11440
|
return Promise.all(promises);
|
|
11235
11441
|
}
|
|
11236
11442
|
/** @internal */
|
|
@@ -11247,36 +11453,97 @@ ${cue.notes ?? ""}`;
|
|
|
11247
11453
|
super(encodingConfig.codec);
|
|
11248
11454
|
/** @internal */
|
|
11249
11455
|
this._abortController = null;
|
|
11456
|
+
/** @internal */
|
|
11457
|
+
this._audioContext = null;
|
|
11458
|
+
/** @internal */
|
|
11459
|
+
this._scriptProcessorNode = null;
|
|
11460
|
+
// Deprecated but goated
|
|
11461
|
+
/** @internal */
|
|
11462
|
+
this._promiseWithResolvers = promiseWithResolvers();
|
|
11463
|
+
/** @internal */
|
|
11464
|
+
this._errorPromiseAccessed = false;
|
|
11250
11465
|
this._encoder = new AudioEncoderWrapper(this, encodingConfig);
|
|
11251
11466
|
this._track = track;
|
|
11252
11467
|
}
|
|
11468
|
+
/** A promise that rejects upon any error within this source. This promise never resolves. */
|
|
11469
|
+
get errorPromise() {
|
|
11470
|
+
this._errorPromiseAccessed = true;
|
|
11471
|
+
return this._promiseWithResolvers.promise;
|
|
11472
|
+
}
|
|
11253
11473
|
/** @internal */
|
|
11254
|
-
_start() {
|
|
11474
|
+
async _start() {
|
|
11475
|
+
if (!this._errorPromiseAccessed) {
|
|
11476
|
+
console.warn(
|
|
11477
|
+
"Make sure not to ignore the `errorPromise` field on MediaStreamVideoTrackSource, so that any internal errors get bubbled up properly."
|
|
11478
|
+
);
|
|
11479
|
+
}
|
|
11255
11480
|
this._abortController = new AbortController();
|
|
11256
|
-
|
|
11257
|
-
|
|
11258
|
-
|
|
11259
|
-
|
|
11260
|
-
|
|
11261
|
-
|
|
11262
|
-
|
|
11263
|
-
|
|
11264
|
-
|
|
11265
|
-
|
|
11266
|
-
|
|
11481
|
+
if (typeof MediaStreamTrackProcessor !== "undefined") {
|
|
11482
|
+
let firstAudioDataTimestamp = null;
|
|
11483
|
+
const processor = new MediaStreamTrackProcessor({ track: this._track });
|
|
11484
|
+
const consumer = new WritableStream({
|
|
11485
|
+
write: (audioData) => {
|
|
11486
|
+
if (firstAudioDataTimestamp === null) {
|
|
11487
|
+
firstAudioDataTimestamp = audioData.timestamp / 1e6;
|
|
11488
|
+
const muxer = this._connectedTrack.output._muxer;
|
|
11489
|
+
if (muxer.firstMediaStreamTimestamp === null) {
|
|
11490
|
+
muxer.firstMediaStreamTimestamp = performance.now() / 1e3;
|
|
11491
|
+
this._timestampOffset = -firstAudioDataTimestamp;
|
|
11492
|
+
} else {
|
|
11493
|
+
this._timestampOffset = performance.now() / 1e3 - muxer.firstMediaStreamTimestamp - firstAudioDataTimestamp;
|
|
11494
|
+
}
|
|
11495
|
+
}
|
|
11496
|
+
if (this._encoder.getQueueSize() >= 4) {
|
|
11497
|
+
audioData.close();
|
|
11498
|
+
return;
|
|
11499
|
+
}
|
|
11500
|
+
void this._encoder.add(new AudioSample(audioData), true).catch((error) => {
|
|
11501
|
+
this._abortController?.abort();
|
|
11502
|
+
this._promiseWithResolvers.reject(error);
|
|
11503
|
+
});
|
|
11267
11504
|
}
|
|
11268
|
-
|
|
11269
|
-
|
|
11270
|
-
|
|
11271
|
-
|
|
11272
|
-
|
|
11273
|
-
|
|
11274
|
-
|
|
11275
|
-
|
|
11276
|
-
|
|
11277
|
-
|
|
11278
|
-
|
|
11279
|
-
|
|
11505
|
+
});
|
|
11506
|
+
processor.readable.pipeTo(consumer, {
|
|
11507
|
+
signal: this._abortController.signal
|
|
11508
|
+
}).catch((error) => {
|
|
11509
|
+
if (error instanceof DOMException && error.name === "AbortError") return;
|
|
11510
|
+
this._promiseWithResolvers.reject(error);
|
|
11511
|
+
});
|
|
11512
|
+
} else {
|
|
11513
|
+
this._audioContext = new AudioContext({ sampleRate: this._track.getSettings().sampleRate });
|
|
11514
|
+
const sourceNode = this._audioContext.createMediaStreamSource(new MediaStream([this._track]));
|
|
11515
|
+
this._scriptProcessorNode = this._audioContext.createScriptProcessor(4096);
|
|
11516
|
+
if (this._audioContext.state === "suspended") {
|
|
11517
|
+
await this._audioContext.resume();
|
|
11518
|
+
}
|
|
11519
|
+
sourceNode.connect(this._scriptProcessorNode);
|
|
11520
|
+
this._scriptProcessorNode.connect(this._audioContext.destination);
|
|
11521
|
+
let audioReceived = false;
|
|
11522
|
+
let totalDuration = 0;
|
|
11523
|
+
this._scriptProcessorNode.onaudioprocess = (event) => {
|
|
11524
|
+
const audioSamples = AudioSample.fromAudioBuffer(event.inputBuffer, totalDuration);
|
|
11525
|
+
totalDuration += event.inputBuffer.duration;
|
|
11526
|
+
for (const audioSample of audioSamples) {
|
|
11527
|
+
if (!audioReceived) {
|
|
11528
|
+
audioReceived = true;
|
|
11529
|
+
const muxer = this._connectedTrack.output._muxer;
|
|
11530
|
+
if (muxer.firstMediaStreamTimestamp === null) {
|
|
11531
|
+
muxer.firstMediaStreamTimestamp = performance.now() / 1e3;
|
|
11532
|
+
} else {
|
|
11533
|
+
this._timestampOffset = performance.now() / 1e3 - muxer.firstMediaStreamTimestamp;
|
|
11534
|
+
}
|
|
11535
|
+
}
|
|
11536
|
+
if (this._encoder.getQueueSize() >= 4) {
|
|
11537
|
+
audioSample.close();
|
|
11538
|
+
continue;
|
|
11539
|
+
}
|
|
11540
|
+
void this._encoder.add(audioSample, true).catch((error) => {
|
|
11541
|
+
void this._audioContext.suspend();
|
|
11542
|
+
this._promiseWithResolvers.reject(error);
|
|
11543
|
+
});
|
|
11544
|
+
}
|
|
11545
|
+
};
|
|
11546
|
+
}
|
|
11280
11547
|
}
|
|
11281
11548
|
/** @internal */
|
|
11282
11549
|
async _flushAndClose() {
|
|
@@ -11284,17 +11551,120 @@ ${cue.notes ?? ""}`;
|
|
|
11284
11551
|
this._abortController.abort();
|
|
11285
11552
|
this._abortController = null;
|
|
11286
11553
|
}
|
|
11554
|
+
if (this._audioContext) {
|
|
11555
|
+
assert(this._scriptProcessorNode);
|
|
11556
|
+
this._scriptProcessorNode.disconnect();
|
|
11557
|
+
await this._audioContext.suspend();
|
|
11558
|
+
}
|
|
11287
11559
|
await this._encoder.flushAndClose();
|
|
11288
11560
|
}
|
|
11289
11561
|
};
|
|
11290
|
-
var
|
|
11291
|
-
const
|
|
11292
|
-
|
|
11293
|
-
|
|
11294
|
-
|
|
11295
|
-
|
|
11562
|
+
var mediaStreamTrackProcessorWorkerCode = () => {
|
|
11563
|
+
const sendMessage = (message, transfer) => {
|
|
11564
|
+
if (transfer) {
|
|
11565
|
+
self.postMessage(message, transfer);
|
|
11566
|
+
} else {
|
|
11567
|
+
self.postMessage(message);
|
|
11568
|
+
}
|
|
11569
|
+
};
|
|
11570
|
+
sendMessage({
|
|
11571
|
+
type: "support",
|
|
11572
|
+
supported: typeof MediaStreamTrackProcessor !== "undefined"
|
|
11573
|
+
});
|
|
11574
|
+
const abortControllers = /* @__PURE__ */ new Map();
|
|
11575
|
+
const stoppedTracks = /* @__PURE__ */ new Set();
|
|
11576
|
+
self.addEventListener("message", (event) => {
|
|
11577
|
+
const message = event.data;
|
|
11578
|
+
switch (message.type) {
|
|
11579
|
+
case "videoTrack":
|
|
11580
|
+
{
|
|
11581
|
+
const processor = new MediaStreamTrackProcessor({ track: message.track });
|
|
11582
|
+
const consumer = new WritableStream({
|
|
11583
|
+
write: (videoFrame) => {
|
|
11584
|
+
if (stoppedTracks.has(message.trackId)) {
|
|
11585
|
+
videoFrame.close();
|
|
11586
|
+
return;
|
|
11587
|
+
}
|
|
11588
|
+
sendMessage({
|
|
11589
|
+
type: "videoFrame",
|
|
11590
|
+
trackId: message.trackId,
|
|
11591
|
+
videoFrame
|
|
11592
|
+
}, [videoFrame]);
|
|
11593
|
+
}
|
|
11594
|
+
});
|
|
11595
|
+
const abortController = new AbortController();
|
|
11596
|
+
abortControllers.set(message.trackId, abortController);
|
|
11597
|
+
processor.readable.pipeTo(consumer, {
|
|
11598
|
+
signal: abortController.signal
|
|
11599
|
+
}).catch((error) => {
|
|
11600
|
+
if (error instanceof DOMException && error.name === "AbortError") return;
|
|
11601
|
+
sendMessage({
|
|
11602
|
+
type: "error",
|
|
11603
|
+
trackId: message.trackId,
|
|
11604
|
+
error
|
|
11605
|
+
});
|
|
11606
|
+
});
|
|
11607
|
+
}
|
|
11608
|
+
;
|
|
11609
|
+
break;
|
|
11610
|
+
case "stopTrack":
|
|
11611
|
+
{
|
|
11612
|
+
const abortController = abortControllers.get(message.trackId);
|
|
11613
|
+
if (abortController) {
|
|
11614
|
+
abortController.abort();
|
|
11615
|
+
abortControllers.delete(message.trackId);
|
|
11616
|
+
}
|
|
11617
|
+
stoppedTracks.add(message.trackId);
|
|
11618
|
+
sendMessage({
|
|
11619
|
+
type: "trackStopped",
|
|
11620
|
+
trackId: message.trackId
|
|
11621
|
+
});
|
|
11622
|
+
}
|
|
11623
|
+
;
|
|
11624
|
+
break;
|
|
11625
|
+
default:
|
|
11626
|
+
assertNever(message);
|
|
11627
|
+
}
|
|
11628
|
+
});
|
|
11629
|
+
};
|
|
11630
|
+
var nextMediaStreamTrackProcessorWorkerId = 0;
|
|
11631
|
+
var mediaStreamTrackProcessorWorker = null;
|
|
11632
|
+
var initMediaStreamTrackProcessorWorker = () => {
|
|
11633
|
+
const blob = new Blob(
|
|
11634
|
+
[`(${mediaStreamTrackProcessorWorkerCode.toString()})()`],
|
|
11635
|
+
{ type: "application/javascript" }
|
|
11636
|
+
);
|
|
11637
|
+
const url2 = URL.createObjectURL(blob);
|
|
11638
|
+
mediaStreamTrackProcessorWorker = new Worker(url2);
|
|
11639
|
+
};
|
|
11640
|
+
var mediaStreamTrackProcessorIsSupportedInWorkerCache = null;
|
|
11641
|
+
var mediaStreamTrackProcessorIsSupportedInWorker = async () => {
|
|
11642
|
+
if (mediaStreamTrackProcessorIsSupportedInWorkerCache !== null) {
|
|
11643
|
+
return mediaStreamTrackProcessorIsSupportedInWorkerCache;
|
|
11644
|
+
}
|
|
11645
|
+
if (!mediaStreamTrackProcessorWorker) {
|
|
11646
|
+
initMediaStreamTrackProcessorWorker();
|
|
11647
|
+
}
|
|
11648
|
+
return new Promise((resolve) => {
|
|
11649
|
+
assert(mediaStreamTrackProcessorWorker);
|
|
11650
|
+
const listener = (event) => {
|
|
11651
|
+
const message = event.data;
|
|
11652
|
+
if (message.type === "support") {
|
|
11653
|
+
mediaStreamTrackProcessorIsSupportedInWorkerCache = message.supported;
|
|
11654
|
+
mediaStreamTrackProcessorWorker.removeEventListener("message", listener);
|
|
11655
|
+
resolve(message.supported);
|
|
11656
|
+
}
|
|
11657
|
+
};
|
|
11658
|
+
mediaStreamTrackProcessorWorker.addEventListener("message", listener);
|
|
11659
|
+
});
|
|
11660
|
+
};
|
|
11661
|
+
var sendMessageToMediaStreamTrackProcessorWorker = (message, transfer) => {
|
|
11662
|
+
assert(mediaStreamTrackProcessorWorker);
|
|
11663
|
+
if (transfer) {
|
|
11664
|
+
mediaStreamTrackProcessorWorker.postMessage(message, transfer);
|
|
11665
|
+
} else {
|
|
11666
|
+
mediaStreamTrackProcessorWorker.postMessage(message);
|
|
11296
11667
|
}
|
|
11297
|
-
source._timestampOffset = -Math.min(muxer.firstMediaStreamTimestamp, timestampInSeconds);
|
|
11298
11668
|
};
|
|
11299
11669
|
var SubtitleSource = class extends MediaSource {
|
|
11300
11670
|
constructor(codec) {
|
|
@@ -11516,9 +11886,8 @@ ${cue.notes ?? ""}`;
|
|
|
11516
11886
|
this._writer.start();
|
|
11517
11887
|
const release = await this._mutex.acquire();
|
|
11518
11888
|
await this._muxer.start();
|
|
11519
|
-
|
|
11520
|
-
|
|
11521
|
-
}
|
|
11889
|
+
const promises = this._tracks.map((track) => track.source._start());
|
|
11890
|
+
await Promise.all(promises);
|
|
11522
11891
|
release();
|
|
11523
11892
|
})();
|
|
11524
11893
|
}
|
|
@@ -11761,106 +12130,6 @@ ${cue.notes ?? ""}`;
|
|
|
11761
12130
|
}
|
|
11762
12131
|
};
|
|
11763
12132
|
|
|
11764
|
-
// src/isobmff/isobmff-reader.ts
|
|
11765
|
-
var MIN_BOX_HEADER_SIZE = 8;
|
|
11766
|
-
var MAX_BOX_HEADER_SIZE = 16;
|
|
11767
|
-
var IsobmffReader = class {
|
|
11768
|
-
constructor(reader) {
|
|
11769
|
-
this.reader = reader;
|
|
11770
|
-
this.pos = 0;
|
|
11771
|
-
}
|
|
11772
|
-
readBytes(length) {
|
|
11773
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + length);
|
|
11774
|
-
this.pos += length;
|
|
11775
|
-
return new Uint8Array(view2.buffer, offset, length);
|
|
11776
|
-
}
|
|
11777
|
-
readU8() {
|
|
11778
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 1);
|
|
11779
|
-
this.pos++;
|
|
11780
|
-
return view2.getUint8(offset);
|
|
11781
|
-
}
|
|
11782
|
-
readU16() {
|
|
11783
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 2);
|
|
11784
|
-
this.pos += 2;
|
|
11785
|
-
return view2.getUint16(offset, false);
|
|
11786
|
-
}
|
|
11787
|
-
readI16() {
|
|
11788
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 2);
|
|
11789
|
-
this.pos += 2;
|
|
11790
|
-
return view2.getInt16(offset, false);
|
|
11791
|
-
}
|
|
11792
|
-
readU24() {
|
|
11793
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 3);
|
|
11794
|
-
this.pos += 3;
|
|
11795
|
-
const high = view2.getUint16(offset, false);
|
|
11796
|
-
const low = view2.getUint8(offset + 2);
|
|
11797
|
-
return high * 256 + low;
|
|
11798
|
-
}
|
|
11799
|
-
readU32() {
|
|
11800
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 4);
|
|
11801
|
-
this.pos += 4;
|
|
11802
|
-
return view2.getUint32(offset, false);
|
|
11803
|
-
}
|
|
11804
|
-
readI32() {
|
|
11805
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 4);
|
|
11806
|
-
this.pos += 4;
|
|
11807
|
-
return view2.getInt32(offset, false);
|
|
11808
|
-
}
|
|
11809
|
-
readU64() {
|
|
11810
|
-
const high = this.readU32();
|
|
11811
|
-
const low = this.readU32();
|
|
11812
|
-
return high * 4294967296 + low;
|
|
11813
|
-
}
|
|
11814
|
-
readI64() {
|
|
11815
|
-
const high = this.readI32();
|
|
11816
|
-
const low = this.readU32();
|
|
11817
|
-
return high * 4294967296 + low;
|
|
11818
|
-
}
|
|
11819
|
-
readF64() {
|
|
11820
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + 8);
|
|
11821
|
-
this.pos += 8;
|
|
11822
|
-
return view2.getFloat64(offset, false);
|
|
11823
|
-
}
|
|
11824
|
-
readFixed_16_16() {
|
|
11825
|
-
return this.readI32() / 65536;
|
|
11826
|
-
}
|
|
11827
|
-
readFixed_2_30() {
|
|
11828
|
-
return this.readI32() / 1073741824;
|
|
11829
|
-
}
|
|
11830
|
-
readAscii(length) {
|
|
11831
|
-
const { view: view2, offset } = this.reader.getViewAndOffset(this.pos, this.pos + length);
|
|
11832
|
-
this.pos += length;
|
|
11833
|
-
let str = "";
|
|
11834
|
-
for (let i = 0; i < length; i++) {
|
|
11835
|
-
str += String.fromCharCode(view2.getUint8(offset + i));
|
|
11836
|
-
}
|
|
11837
|
-
return str;
|
|
11838
|
-
}
|
|
11839
|
-
readIsomVariableInteger() {
|
|
11840
|
-
let result = 0;
|
|
11841
|
-
for (let i = 0; i < 4; i++) {
|
|
11842
|
-
result <<= 7;
|
|
11843
|
-
const nextByte = this.readU8();
|
|
11844
|
-
result |= nextByte & 127;
|
|
11845
|
-
if ((nextByte & 128) === 0) {
|
|
11846
|
-
break;
|
|
11847
|
-
}
|
|
11848
|
-
}
|
|
11849
|
-
return result;
|
|
11850
|
-
}
|
|
11851
|
-
readBoxHeader() {
|
|
11852
|
-
let totalSize = this.readU32();
|
|
11853
|
-
const name = this.readAscii(4);
|
|
11854
|
-
let headerSize = 8;
|
|
11855
|
-
const hasLargeSize = totalSize === 1;
|
|
11856
|
-
if (hasLargeSize) {
|
|
11857
|
-
totalSize = this.readU64();
|
|
11858
|
-
headerSize = 16;
|
|
11859
|
-
}
|
|
11860
|
-
return { name, totalSize, headerSize, contentSize: totalSize - headerSize };
|
|
11861
|
-
}
|
|
11862
|
-
};
|
|
11863
|
-
|
|
11864
12133
|
// src/isobmff/isobmff-demuxer.ts
|
|
11865
12134
|
var IsobmffDemuxer = class extends Demuxer {
|
|
11866
12135
|
constructor(input) {
|