@remotion/studio 4.0.332 → 4.0.334
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-make.log +2 -2
- package/dist/esm/chunk-1g0vys7t.js +46593 -0
- package/dist/esm/chunk-kssm9t8x.js +46596 -0
- package/dist/esm/chunk-s0cbvv3s.js +46543 -0
- package/dist/esm/internals.mjs +218 -209
- package/dist/esm/previewEntry.mjs +218 -209
- package/dist/esm/renderEntry.mjs +1 -1
- package/package.json +10 -10
- package/tsconfig.tsbuildinfo +1 -1
package/dist/esm/internals.mjs
CHANGED
|
@@ -24055,6 +24055,17 @@ var getAvccBox = (trakBox) => {
|
|
|
24055
24055
|
}
|
|
24056
24056
|
return avccBox;
|
|
24057
24057
|
};
|
|
24058
|
+
var getVpccBox = (trakBox) => {
|
|
24059
|
+
const videoConfig = getStsdVideoConfig(trakBox);
|
|
24060
|
+
if (!videoConfig) {
|
|
24061
|
+
return null;
|
|
24062
|
+
}
|
|
24063
|
+
const vpccBox = videoConfig.descriptors.find((c) => c.type === "vpcc-box");
|
|
24064
|
+
if (!vpccBox || vpccBox.type !== "vpcc-box") {
|
|
24065
|
+
return null;
|
|
24066
|
+
}
|
|
24067
|
+
return vpccBox;
|
|
24068
|
+
};
|
|
24058
24069
|
var getAv1CBox = (trakBox) => {
|
|
24059
24070
|
const videoConfig = getStsdVideoConfig(trakBox);
|
|
24060
24071
|
if (!videoConfig) {
|
|
@@ -24210,21 +24221,25 @@ var getIsoBmColrConfig = (trakBox) => {
|
|
|
24210
24221
|
var getVideoCodecString = (trakBox) => {
|
|
24211
24222
|
const videoSample = getStsdVideoConfig(trakBox);
|
|
24212
24223
|
const avccBox = getAvccBox(trakBox);
|
|
24213
|
-
const hvccBox = getHvccBox(trakBox);
|
|
24214
|
-
const av1cBox = getAv1CBox(trakBox);
|
|
24215
24224
|
if (!videoSample) {
|
|
24216
24225
|
return null;
|
|
24217
24226
|
}
|
|
24218
24227
|
if (avccBox) {
|
|
24219
24228
|
return `${videoSample.format}.${avccBox.configurationString}`;
|
|
24220
24229
|
}
|
|
24230
|
+
const hvccBox = getHvccBox(trakBox);
|
|
24221
24231
|
if (hvccBox) {
|
|
24222
24232
|
return `${videoSample.format}.${hvccBox.configurationString}`;
|
|
24223
24233
|
}
|
|
24234
|
+
const av1cBox = getAv1CBox(trakBox);
|
|
24224
24235
|
if (av1cBox) {
|
|
24225
24236
|
const colrAtom = getColrBox(videoSample);
|
|
24226
24237
|
return parseAv1PrivateData(av1cBox.privateData, colrAtom);
|
|
24227
24238
|
}
|
|
24239
|
+
const vpccBox = getVpccBox(trakBox);
|
|
24240
|
+
if (vpccBox) {
|
|
24241
|
+
return `${videoSample.format}.${vpccBox.codecString}`;
|
|
24242
|
+
}
|
|
24228
24243
|
return videoSample.format;
|
|
24229
24244
|
};
|
|
24230
24245
|
var normalizeVideoRotation = (rotation) => {
|
|
@@ -24274,6 +24289,9 @@ var getVideoCodecFromIsoTrak = (trakBox) => {
|
|
|
24274
24289
|
if (videoSample.format === "av01") {
|
|
24275
24290
|
return "av1";
|
|
24276
24291
|
}
|
|
24292
|
+
if (videoSample.format === "vp09") {
|
|
24293
|
+
return "vp9";
|
|
24294
|
+
}
|
|
24277
24295
|
if (videoSample.format === "ap4h") {
|
|
24278
24296
|
return "prores";
|
|
24279
24297
|
}
|
|
@@ -25206,6 +25224,51 @@ var parseStts = ({
|
|
|
25206
25224
|
sampleDistribution: sampleDistributions
|
|
25207
25225
|
};
|
|
25208
25226
|
};
|
|
25227
|
+
var getvp09ConfigurationString = ({
|
|
25228
|
+
profile,
|
|
25229
|
+
level,
|
|
25230
|
+
bitDepth: bitDepth2
|
|
25231
|
+
}) => {
|
|
25232
|
+
return `${String(profile).padStart(2, "0")}.${String(level).padStart(2, "0")}.${String(bitDepth2).padStart(2, "0")}`;
|
|
25233
|
+
};
|
|
25234
|
+
var parseVpcc = ({
|
|
25235
|
+
data,
|
|
25236
|
+
size: size4
|
|
25237
|
+
}) => {
|
|
25238
|
+
const box2 = data.startBox(size4 - 8);
|
|
25239
|
+
const confVersion = data.getUint8();
|
|
25240
|
+
if (confVersion !== 1) {
|
|
25241
|
+
throw new Error(`Unsupported AVCC version ${confVersion}`);
|
|
25242
|
+
}
|
|
25243
|
+
data.discard(3);
|
|
25244
|
+
const profile = data.getUint8();
|
|
25245
|
+
const level = data.getUint8();
|
|
25246
|
+
data.startReadingBits();
|
|
25247
|
+
const bitDepth2 = data.getBits(4);
|
|
25248
|
+
const chromaSubsampling = data.getBits(3);
|
|
25249
|
+
const videoFullRangeFlag = data.getBits(1);
|
|
25250
|
+
const videoColorPrimaries = data.getBits(8);
|
|
25251
|
+
const videoTransferCharacteristics = data.getBits(8);
|
|
25252
|
+
const videoMatrixCoefficients = data.getBits(8);
|
|
25253
|
+
data.stopReadingBits();
|
|
25254
|
+
const codecInitializationDataSize = data.getUint16();
|
|
25255
|
+
const codecInitializationData = data.getSlice(codecInitializationDataSize);
|
|
25256
|
+
box2.expectNoMoreBytes();
|
|
25257
|
+
return {
|
|
25258
|
+
type: "vpcc-box",
|
|
25259
|
+
profile,
|
|
25260
|
+
level,
|
|
25261
|
+
bitDepth: bitDepth2,
|
|
25262
|
+
chromaSubsampling,
|
|
25263
|
+
videoFullRangeFlag,
|
|
25264
|
+
videoColorPrimaries,
|
|
25265
|
+
videoTransferCharacteristics,
|
|
25266
|
+
videoMatrixCoefficients,
|
|
25267
|
+
codecInitializationDataSize,
|
|
25268
|
+
codecInitializationData,
|
|
25269
|
+
codecString: getvp09ConfigurationString({ profile, level, bitDepth: bitDepth2 })
|
|
25270
|
+
};
|
|
25271
|
+
};
|
|
25209
25272
|
var parseTfdt = ({
|
|
25210
25273
|
iterator,
|
|
25211
25274
|
size: size4,
|
|
@@ -25522,7 +25585,7 @@ var processBox = async ({
|
|
|
25522
25585
|
if (boxType === "stsz") {
|
|
25523
25586
|
return {
|
|
25524
25587
|
type: "box",
|
|
25525
|
-
box:
|
|
25588
|
+
box: parseStsz({
|
|
25526
25589
|
iterator,
|
|
25527
25590
|
offset: fileOffset,
|
|
25528
25591
|
size: boxSize
|
|
@@ -25532,7 +25595,7 @@ var processBox = async ({
|
|
|
25532
25595
|
if (boxType === "stco" || boxType === "co64") {
|
|
25533
25596
|
return {
|
|
25534
25597
|
type: "box",
|
|
25535
|
-
box:
|
|
25598
|
+
box: parseStco({
|
|
25536
25599
|
iterator,
|
|
25537
25600
|
offset: fileOffset,
|
|
25538
25601
|
size: boxSize,
|
|
@@ -25543,7 +25606,7 @@ var processBox = async ({
|
|
|
25543
25606
|
if (boxType === "pasp") {
|
|
25544
25607
|
return {
|
|
25545
25608
|
type: "box",
|
|
25546
|
-
box:
|
|
25609
|
+
box: parsePasp({
|
|
25547
25610
|
iterator,
|
|
25548
25611
|
offset: fileOffset,
|
|
25549
25612
|
size: boxSize
|
|
@@ -25553,7 +25616,7 @@ var processBox = async ({
|
|
|
25553
25616
|
if (boxType === "stss") {
|
|
25554
25617
|
return {
|
|
25555
25618
|
type: "box",
|
|
25556
|
-
box:
|
|
25619
|
+
box: parseStss({
|
|
25557
25620
|
iterator,
|
|
25558
25621
|
offset: fileOffset,
|
|
25559
25622
|
boxSize
|
|
@@ -25563,7 +25626,7 @@ var processBox = async ({
|
|
|
25563
25626
|
if (boxType === "ctts") {
|
|
25564
25627
|
return {
|
|
25565
25628
|
type: "box",
|
|
25566
|
-
box:
|
|
25629
|
+
box: parseCtts({
|
|
25567
25630
|
iterator,
|
|
25568
25631
|
offset: fileOffset,
|
|
25569
25632
|
size: boxSize
|
|
@@ -25573,7 +25636,7 @@ var processBox = async ({
|
|
|
25573
25636
|
if (boxType === "stsc") {
|
|
25574
25637
|
return {
|
|
25575
25638
|
type: "box",
|
|
25576
|
-
box:
|
|
25639
|
+
box: parseStsc({
|
|
25577
25640
|
iterator,
|
|
25578
25641
|
offset: fileOffset,
|
|
25579
25642
|
size: boxSize
|
|
@@ -25692,7 +25755,7 @@ var processBox = async ({
|
|
|
25692
25755
|
if (boxType === "stts") {
|
|
25693
25756
|
return {
|
|
25694
25757
|
type: "box",
|
|
25695
|
-
box:
|
|
25758
|
+
box: parseStts({
|
|
25696
25759
|
data: iterator,
|
|
25697
25760
|
size: boxSize,
|
|
25698
25761
|
fileOffset
|
|
@@ -25702,16 +25765,22 @@ var processBox = async ({
|
|
|
25702
25765
|
if (boxType === "avcC") {
|
|
25703
25766
|
return {
|
|
25704
25767
|
type: "box",
|
|
25705
|
-
box:
|
|
25768
|
+
box: parseAvcc({
|
|
25706
25769
|
data: iterator,
|
|
25707
25770
|
size: boxSize
|
|
25708
25771
|
})
|
|
25709
25772
|
};
|
|
25710
25773
|
}
|
|
25774
|
+
if (boxType === "vpcC") {
|
|
25775
|
+
return {
|
|
25776
|
+
type: "box",
|
|
25777
|
+
box: parseVpcc({ data: iterator, size: boxSize })
|
|
25778
|
+
};
|
|
25779
|
+
}
|
|
25711
25780
|
if (boxType === "av1C") {
|
|
25712
25781
|
return {
|
|
25713
25782
|
type: "box",
|
|
25714
|
-
box:
|
|
25783
|
+
box: parseAv1C({
|
|
25715
25784
|
data: iterator,
|
|
25716
25785
|
size: boxSize
|
|
25717
25786
|
})
|
|
@@ -25720,7 +25789,7 @@ var processBox = async ({
|
|
|
25720
25789
|
if (boxType === "hvcC") {
|
|
25721
25790
|
return {
|
|
25722
25791
|
type: "box",
|
|
25723
|
-
box:
|
|
25792
|
+
box: parseHvcc({
|
|
25724
25793
|
data: iterator,
|
|
25725
25794
|
size: boxSize,
|
|
25726
25795
|
offset: fileOffset
|
|
@@ -25730,7 +25799,7 @@ var processBox = async ({
|
|
|
25730
25799
|
if (boxType === "tfhd") {
|
|
25731
25800
|
return {
|
|
25732
25801
|
type: "box",
|
|
25733
|
-
box:
|
|
25802
|
+
box: getTfhd({
|
|
25734
25803
|
iterator,
|
|
25735
25804
|
offset: fileOffset,
|
|
25736
25805
|
size: boxSize
|
|
@@ -25740,7 +25809,7 @@ var processBox = async ({
|
|
|
25740
25809
|
if (boxType === "mdhd") {
|
|
25741
25810
|
return {
|
|
25742
25811
|
type: "box",
|
|
25743
|
-
box:
|
|
25812
|
+
box: parseMdhd({
|
|
25744
25813
|
data: iterator,
|
|
25745
25814
|
size: boxSize,
|
|
25746
25815
|
fileOffset
|
|
@@ -25750,7 +25819,7 @@ var processBox = async ({
|
|
|
25750
25819
|
if (boxType === "esds") {
|
|
25751
25820
|
return {
|
|
25752
25821
|
type: "box",
|
|
25753
|
-
box:
|
|
25822
|
+
box: parseEsds({
|
|
25754
25823
|
data: iterator,
|
|
25755
25824
|
size: boxSize,
|
|
25756
25825
|
fileOffset
|
|
@@ -25760,7 +25829,7 @@ var processBox = async ({
|
|
|
25760
25829
|
if (boxType === "trex") {
|
|
25761
25830
|
return {
|
|
25762
25831
|
type: "box",
|
|
25763
|
-
box:
|
|
25832
|
+
box: parseTrex({ iterator, offset: fileOffset, size: boxSize })
|
|
25764
25833
|
};
|
|
25765
25834
|
}
|
|
25766
25835
|
if (boxType === "moof") {
|
|
@@ -25855,7 +25924,9 @@ var videoTags = [
|
|
|
25855
25924
|
"hvc1",
|
|
25856
25925
|
"hev1",
|
|
25857
25926
|
"ap4h",
|
|
25858
|
-
"av01"
|
|
25927
|
+
"av01",
|
|
25928
|
+
"vp08",
|
|
25929
|
+
"vp09"
|
|
25859
25930
|
];
|
|
25860
25931
|
var audioTags = [
|
|
25861
25932
|
0,
|
|
@@ -29811,7 +29882,8 @@ var performSeek = async ({
|
|
|
29811
29882
|
src,
|
|
29812
29883
|
discardReadBytes,
|
|
29813
29884
|
fields,
|
|
29814
|
-
prefetchCache
|
|
29885
|
+
prefetchCache,
|
|
29886
|
+
isoState
|
|
29815
29887
|
}) => {
|
|
29816
29888
|
const byteInMediaSection = isByteInMediaSection({
|
|
29817
29889
|
position: seekTo,
|
|
@@ -29875,6 +29947,9 @@ var performSeek = async ({
|
|
|
29875
29947
|
prefetchCache
|
|
29876
29948
|
});
|
|
29877
29949
|
}
|
|
29950
|
+
if (userInitiated) {
|
|
29951
|
+
isoState.flatSamples.updateAfterSeek(seekTo);
|
|
29952
|
+
}
|
|
29878
29953
|
await controller._internals.checkForAbortAndPause();
|
|
29879
29954
|
};
|
|
29880
29955
|
var turnSeekIntoByte = async ({
|
|
@@ -30051,7 +30126,8 @@ var workOnSeekRequest = async (options) => {
|
|
|
30051
30126
|
src,
|
|
30052
30127
|
discardReadBytes,
|
|
30053
30128
|
fields,
|
|
30054
|
-
prefetchCache
|
|
30129
|
+
prefetchCache,
|
|
30130
|
+
isoState
|
|
30055
30131
|
});
|
|
30056
30132
|
return;
|
|
30057
30133
|
}
|
|
@@ -30071,7 +30147,8 @@ var workOnSeekRequest = async (options) => {
|
|
|
30071
30147
|
src,
|
|
30072
30148
|
discardReadBytes,
|
|
30073
30149
|
fields,
|
|
30074
|
-
prefetchCache
|
|
30150
|
+
prefetchCache,
|
|
30151
|
+
isoState
|
|
30075
30152
|
});
|
|
30076
30153
|
const { hasChanged } = controller._internals.seekSignal.clearSeekIfStillSame(seek2);
|
|
30077
30154
|
if (hasChanged) {
|
|
@@ -31135,9 +31212,10 @@ var parseFlac = ({
|
|
|
31135
31212
|
state
|
|
31136
31213
|
});
|
|
31137
31214
|
};
|
|
31138
|
-
var
|
|
31215
|
+
var calculateSamplePositions = ({
|
|
31139
31216
|
state,
|
|
31140
|
-
mediaSectionStart
|
|
31217
|
+
mediaSectionStart,
|
|
31218
|
+
trackIds
|
|
31141
31219
|
}) => {
|
|
31142
31220
|
const tracks2 = getTracks(state, true);
|
|
31143
31221
|
const moofBoxes = getMoofBoxes(state.structure.getIsoStructure().boxes);
|
|
@@ -31159,11 +31237,13 @@ var calculateFlatSamples = ({
|
|
|
31159
31237
|
if (!moov) {
|
|
31160
31238
|
throw new Error("No moov box found");
|
|
31161
31239
|
}
|
|
31162
|
-
const
|
|
31163
|
-
const trackIds = [];
|
|
31164
|
-
const map = new Map;
|
|
31240
|
+
const trackIdAndSamplePositions = [];
|
|
31165
31241
|
for (const track of tracks2) {
|
|
31166
31242
|
const trakBox = getTrakBoxByTrackId(moov, track.trackId);
|
|
31243
|
+
if (!trackIds.includes(track.trackId)) {
|
|
31244
|
+
Log.verbose(state.logLevel, "Skipping calculating sample positions for track", track.trackId);
|
|
31245
|
+
continue;
|
|
31246
|
+
}
|
|
31167
31247
|
if (!trakBox) {
|
|
31168
31248
|
throw new Error("No trak box found");
|
|
31169
31249
|
}
|
|
@@ -31173,36 +31253,88 @@ var calculateFlatSamples = ({
|
|
|
31173
31253
|
moofComplete,
|
|
31174
31254
|
trexBoxes: getTrexBoxes(moov)
|
|
31175
31255
|
});
|
|
31176
|
-
|
|
31177
|
-
|
|
31178
|
-
|
|
31179
|
-
|
|
31180
|
-
|
|
31181
|
-
|
|
31182
|
-
|
|
31256
|
+
trackIdAndSamplePositions.push({
|
|
31257
|
+
trackId: track.trackId,
|
|
31258
|
+
samplePositions
|
|
31259
|
+
});
|
|
31260
|
+
}
|
|
31261
|
+
return trackIdAndSamplePositions;
|
|
31262
|
+
};
|
|
31263
|
+
var updateSampleIndicesAfterSeek = ({
|
|
31264
|
+
samplePositionsForMdatStart,
|
|
31265
|
+
seekedByte
|
|
31266
|
+
}) => {
|
|
31267
|
+
const currentSampleIndices = {};
|
|
31268
|
+
const keys = Object.keys(samplePositionsForMdatStart).map(Number).sort();
|
|
31269
|
+
const mdat = keys.find((key4) => seekedByte >= key4);
|
|
31270
|
+
if (!mdat) {
|
|
31271
|
+
return currentSampleIndices;
|
|
31272
|
+
}
|
|
31273
|
+
const samplePositions = samplePositionsForMdatStart[mdat];
|
|
31274
|
+
if (!samplePositions) {
|
|
31275
|
+
return currentSampleIndices;
|
|
31276
|
+
}
|
|
31277
|
+
for (const track of samplePositions) {
|
|
31278
|
+
const currentSampleIndex = track.samplePositions.findIndex((sample) => sample.offset >= seekedByte);
|
|
31279
|
+
if (!currentSampleIndices[mdat]) {
|
|
31280
|
+
currentSampleIndices[mdat] = {};
|
|
31281
|
+
}
|
|
31282
|
+
if (!currentSampleIndices[mdat][track.trackId]) {
|
|
31283
|
+
currentSampleIndices[mdat][track.trackId] = 0;
|
|
31284
|
+
}
|
|
31285
|
+
if (currentSampleIndex === -1) {
|
|
31286
|
+
currentSampleIndices[mdat][track.trackId] = track.samplePositions.length;
|
|
31287
|
+
} else {
|
|
31288
|
+
currentSampleIndices[mdat][track.trackId] = currentSampleIndex;
|
|
31183
31289
|
}
|
|
31184
31290
|
}
|
|
31185
|
-
|
|
31186
|
-
return { flatSamples: map, offsets, trackIds };
|
|
31291
|
+
return currentSampleIndices;
|
|
31187
31292
|
};
|
|
31188
31293
|
var cachedSamplePositionsState = () => {
|
|
31189
|
-
const
|
|
31190
|
-
|
|
31294
|
+
const samplePositionsForMdatStart = {};
|
|
31295
|
+
let currentSampleIndex = {};
|
|
31191
31296
|
return {
|
|
31192
31297
|
getSamples: (mdatStart) => {
|
|
31193
|
-
return
|
|
31298
|
+
return samplePositionsForMdatStart[mdatStart] ?? null;
|
|
31194
31299
|
},
|
|
31195
31300
|
setSamples: (mdatStart, samples) => {
|
|
31196
|
-
|
|
31301
|
+
samplePositionsForMdatStart[mdatStart] = samples;
|
|
31197
31302
|
},
|
|
31198
|
-
|
|
31199
|
-
|
|
31303
|
+
setCurrentSampleIndex: (mdatStart, trackId, index) => {
|
|
31304
|
+
if (!currentSampleIndex[mdatStart]) {
|
|
31305
|
+
currentSampleIndex[mdatStart] = {};
|
|
31306
|
+
}
|
|
31307
|
+
if (!currentSampleIndex[mdatStart][trackId]) {
|
|
31308
|
+
currentSampleIndex[mdatStart][trackId] = 0;
|
|
31309
|
+
}
|
|
31310
|
+
currentSampleIndex[mdatStart][trackId] = index;
|
|
31200
31311
|
},
|
|
31201
|
-
|
|
31202
|
-
return
|
|
31312
|
+
getCurrentSampleIndices: (mdatStart) => {
|
|
31313
|
+
return currentSampleIndex[mdatStart] ?? {};
|
|
31314
|
+
},
|
|
31315
|
+
updateAfterSeek: (seekedByte) => {
|
|
31316
|
+
currentSampleIndex = updateSampleIndicesAfterSeek({
|
|
31317
|
+
samplePositionsForMdatStart,
|
|
31318
|
+
seekedByte
|
|
31319
|
+
});
|
|
31203
31320
|
}
|
|
31204
31321
|
};
|
|
31205
31322
|
};
|
|
31323
|
+
var getSampleWithLowestDts = (samplePositions, currentSampleIndexMap) => {
|
|
31324
|
+
const lowestDts = [];
|
|
31325
|
+
for (const track of samplePositions) {
|
|
31326
|
+
const currentSampleIndex = currentSampleIndexMap[track.trackId] ?? 0;
|
|
31327
|
+
const currentSample = track.samplePositions[currentSampleIndex];
|
|
31328
|
+
if (currentSample && (lowestDts.length === 0 || currentSample.decodingTimestamp <= lowestDts[0].samplePosition.decodingTimestamp)) {
|
|
31329
|
+
lowestDts.push({
|
|
31330
|
+
samplePosition: currentSample,
|
|
31331
|
+
trackId: track.trackId,
|
|
31332
|
+
index: currentSampleIndex
|
|
31333
|
+
});
|
|
31334
|
+
}
|
|
31335
|
+
}
|
|
31336
|
+
return lowestDts;
|
|
31337
|
+
};
|
|
31206
31338
|
var getLastMoofBox = (boxes) => {
|
|
31207
31339
|
if (boxes) {
|
|
31208
31340
|
const tfras = boxes.filter((b) => b.type === "tfra-box");
|
|
@@ -31472,125 +31604,6 @@ var getMoovAtom = async ({
|
|
|
31472
31604
|
Log.verbose(state.logLevel, `Finished fetching moov atom in ${Date.now() - start}ms`);
|
|
31473
31605
|
return moov;
|
|
31474
31606
|
};
|
|
31475
|
-
var MAX_SPREAD_IN_SECONDS = 8;
|
|
31476
|
-
var getKey = (samplePositionTrack) => {
|
|
31477
|
-
return `${samplePositionTrack.track.trackId}-${samplePositionTrack.samplePosition.decodingTimestamp}.${samplePositionTrack.samplePosition.offset}`;
|
|
31478
|
-
};
|
|
31479
|
-
var findBestJump = ({
|
|
31480
|
-
sampleMap,
|
|
31481
|
-
offsetsSorted,
|
|
31482
|
-
visited,
|
|
31483
|
-
progresses
|
|
31484
|
-
}) => {
|
|
31485
|
-
const minProgress = Math.min(...Object.values(progresses));
|
|
31486
|
-
const trackNumberWithLowestProgress = Object.entries(progresses).find(([, progress]) => progress === minProgress)?.[0];
|
|
31487
|
-
const firstSampleAboveMinProgress = offsetsSorted.findIndex((offset) => sampleMap.get(offset).track.trackId === Number(trackNumberWithLowestProgress) && !visited.has(getKey(sampleMap.get(offset))));
|
|
31488
|
-
return firstSampleAboveMinProgress;
|
|
31489
|
-
};
|
|
31490
|
-
var calculateJumpMarks = ({
|
|
31491
|
-
sampleMap,
|
|
31492
|
-
offsetsSorted,
|
|
31493
|
-
trackIds,
|
|
31494
|
-
endOfMdat
|
|
31495
|
-
}) => {
|
|
31496
|
-
const progresses = {};
|
|
31497
|
-
for (const trackId of trackIds) {
|
|
31498
|
-
progresses[trackId] = 0;
|
|
31499
|
-
}
|
|
31500
|
-
const jumpMarks = [];
|
|
31501
|
-
let indexToVisit = 0;
|
|
31502
|
-
const visited = new Set;
|
|
31503
|
-
let rollOverToProcess = false;
|
|
31504
|
-
const increaseIndex = () => {
|
|
31505
|
-
indexToVisit++;
|
|
31506
|
-
if (indexToVisit >= offsetsSorted.length) {
|
|
31507
|
-
rollOverToProcess = true;
|
|
31508
|
-
indexToVisit = 0;
|
|
31509
|
-
}
|
|
31510
|
-
};
|
|
31511
|
-
let lastVisitedSample = null;
|
|
31512
|
-
const addJumpMark = ({
|
|
31513
|
-
firstSampleAboveMinProgress
|
|
31514
|
-
}) => {
|
|
31515
|
-
if (!lastVisitedSample) {
|
|
31516
|
-
throw new Error("no last visited sample");
|
|
31517
|
-
}
|
|
31518
|
-
const jumpMark = {
|
|
31519
|
-
afterSampleWithOffset: lastVisitedSample.samplePosition.offset,
|
|
31520
|
-
jumpToOffset: offsetsSorted[firstSampleAboveMinProgress]
|
|
31521
|
-
};
|
|
31522
|
-
indexToVisit = firstSampleAboveMinProgress;
|
|
31523
|
-
jumpMarks.push(jumpMark);
|
|
31524
|
-
};
|
|
31525
|
-
const addFinalJumpIfNecessary = () => {
|
|
31526
|
-
if (indexToVisit === offsetsSorted.length - 1) {
|
|
31527
|
-
return;
|
|
31528
|
-
}
|
|
31529
|
-
jumpMarks.push({
|
|
31530
|
-
afterSampleWithOffset: offsetsSorted[indexToVisit],
|
|
31531
|
-
jumpToOffset: endOfMdat
|
|
31532
|
-
});
|
|
31533
|
-
};
|
|
31534
|
-
const considerJump = () => {
|
|
31535
|
-
const firstSampleAboveMinProgress = findBestJump({
|
|
31536
|
-
sampleMap,
|
|
31537
|
-
offsetsSorted,
|
|
31538
|
-
visited,
|
|
31539
|
-
progresses
|
|
31540
|
-
});
|
|
31541
|
-
if (firstSampleAboveMinProgress > -1 && firstSampleAboveMinProgress !== indexToVisit + 1) {
|
|
31542
|
-
addJumpMark({ firstSampleAboveMinProgress });
|
|
31543
|
-
indexToVisit = firstSampleAboveMinProgress;
|
|
31544
|
-
} else {
|
|
31545
|
-
while (true) {
|
|
31546
|
-
increaseIndex();
|
|
31547
|
-
if (!visited.has(getKey(sampleMap.get(offsetsSorted[indexToVisit])))) {
|
|
31548
|
-
break;
|
|
31549
|
-
}
|
|
31550
|
-
}
|
|
31551
|
-
}
|
|
31552
|
-
};
|
|
31553
|
-
while (true) {
|
|
31554
|
-
const currentSamplePosition = sampleMap.get(offsetsSorted[indexToVisit]);
|
|
31555
|
-
const sampleKey = getKey(currentSamplePosition);
|
|
31556
|
-
if (visited.has(sampleKey)) {
|
|
31557
|
-
considerJump();
|
|
31558
|
-
continue;
|
|
31559
|
-
}
|
|
31560
|
-
visited.add(sampleKey);
|
|
31561
|
-
if (rollOverToProcess) {
|
|
31562
|
-
if (!lastVisitedSample) {
|
|
31563
|
-
throw new Error("no last visited sample");
|
|
31564
|
-
}
|
|
31565
|
-
jumpMarks.push({
|
|
31566
|
-
afterSampleWithOffset: lastVisitedSample.samplePosition.offset,
|
|
31567
|
-
jumpToOffset: currentSamplePosition.samplePosition.offset
|
|
31568
|
-
});
|
|
31569
|
-
rollOverToProcess = false;
|
|
31570
|
-
}
|
|
31571
|
-
lastVisitedSample = currentSamplePosition;
|
|
31572
|
-
if (visited.size === offsetsSorted.length) {
|
|
31573
|
-
addFinalJumpIfNecessary();
|
|
31574
|
-
break;
|
|
31575
|
-
}
|
|
31576
|
-
const timestamp = currentSamplePosition.samplePosition.decodingTimestamp / currentSamplePosition.track.originalTimescale;
|
|
31577
|
-
progresses[currentSamplePosition.track.trackId] = timestamp;
|
|
31578
|
-
const progressValues = Object.values(progresses);
|
|
31579
|
-
const maxProgress = Math.max(...progressValues);
|
|
31580
|
-
const minProgress = Math.min(...progressValues);
|
|
31581
|
-
const spread = maxProgress - minProgress;
|
|
31582
|
-
if (visited.size === offsetsSorted.length) {
|
|
31583
|
-
addFinalJumpIfNecessary();
|
|
31584
|
-
break;
|
|
31585
|
-
}
|
|
31586
|
-
if (spread > MAX_SPREAD_IN_SECONDS) {
|
|
31587
|
-
considerJump();
|
|
31588
|
-
} else {
|
|
31589
|
-
increaseIndex();
|
|
31590
|
-
}
|
|
31591
|
-
}
|
|
31592
|
-
return jumpMarks;
|
|
31593
|
-
};
|
|
31594
31607
|
var postprocessBytes = ({
|
|
31595
31608
|
bytes,
|
|
31596
31609
|
bigEndian,
|
|
@@ -31648,52 +31661,53 @@ var parseMdatSection = async (state) => {
|
|
|
31648
31661
|
endOfMdat,
|
|
31649
31662
|
state
|
|
31650
31663
|
});
|
|
31664
|
+
const tracksFromMoov = getTracksFromMoovBox(moov);
|
|
31651
31665
|
state.iso.moov.setMoovBox({
|
|
31652
31666
|
moovBox: moov,
|
|
31653
31667
|
precomputed: false
|
|
31654
31668
|
});
|
|
31669
|
+
const existingTracks = state.callbacks.tracks.getTracks();
|
|
31670
|
+
for (const trackFromMoov of tracksFromMoov) {
|
|
31671
|
+
if (existingTracks.find((t) => t.trackId === trackFromMoov.trackId)) {
|
|
31672
|
+
continue;
|
|
31673
|
+
}
|
|
31674
|
+
if (trackFromMoov.type === "other") {
|
|
31675
|
+
continue;
|
|
31676
|
+
}
|
|
31677
|
+
state.callbacks.tracks.addTrack(trackFromMoov);
|
|
31678
|
+
}
|
|
31655
31679
|
state.callbacks.tracks.setIsDone(state.logLevel);
|
|
31656
31680
|
state.structure.getIsoStructure().boxes.push(moov);
|
|
31657
31681
|
return parseMdatSection(state);
|
|
31658
31682
|
}
|
|
31683
|
+
const tracks2 = state.callbacks.tracks.getTracks();
|
|
31659
31684
|
if (!state.iso.flatSamples.getSamples(mediaSection.start)) {
|
|
31660
|
-
const {
|
|
31661
|
-
flatSamples: flatSamplesMap,
|
|
31662
|
-
offsets,
|
|
31663
|
-
trackIds
|
|
31664
|
-
} = calculateFlatSamples({
|
|
31685
|
+
const samplePosition = calculateSamplePositions({
|
|
31665
31686
|
state,
|
|
31666
|
-
mediaSectionStart: mediaSection.start
|
|
31687
|
+
mediaSectionStart: mediaSection.start,
|
|
31688
|
+
trackIds: tracks2.map((t) => t.trackId)
|
|
31667
31689
|
});
|
|
31668
|
-
|
|
31669
|
-
sampleMap: flatSamplesMap,
|
|
31670
|
-
offsetsSorted: offsets,
|
|
31671
|
-
trackIds,
|
|
31672
|
-
endOfMdat
|
|
31673
|
-
});
|
|
31674
|
-
state.iso.flatSamples.setJumpMarks(mediaSection.start, calcedJumpMarks);
|
|
31675
|
-
state.iso.flatSamples.setSamples(mediaSection.start, flatSamplesMap);
|
|
31690
|
+
state.iso.flatSamples.setSamples(mediaSection.start, samplePosition);
|
|
31676
31691
|
}
|
|
31677
|
-
const
|
|
31678
|
-
const
|
|
31679
|
-
const
|
|
31680
|
-
|
|
31681
|
-
|
|
31682
|
-
const offsets = Array.from(flatSamples.keys());
|
|
31683
|
-
const nextSample_ = offsets.filter((s) => s > iterator.counter.getOffset()).sort((a, b) => a - b)[0];
|
|
31684
|
-
if (nextSample_) {
|
|
31685
|
-
iterator.discard(nextSample_ - iterator.counter.getOffset());
|
|
31686
|
-
return null;
|
|
31687
|
-
}
|
|
31688
|
-
Log.verbose(state.logLevel, "Could not find sample at offset", iterator.counter.getOffset(), "skipping to end of mdat");
|
|
31692
|
+
const samplePositions = state.iso.flatSamples.getSamples(mediaSection.start);
|
|
31693
|
+
const sampleIndices = state.iso.flatSamples.getCurrentSampleIndices(mediaSection.start);
|
|
31694
|
+
const nextSampleArray = getSampleWithLowestDts(samplePositions, sampleIndices);
|
|
31695
|
+
if (nextSampleArray.length === 0) {
|
|
31696
|
+
Log.verbose(state.logLevel, "Iterated over all samples.", endOfMdat);
|
|
31689
31697
|
return makeSkip(endOfMdat);
|
|
31690
31698
|
}
|
|
31691
|
-
|
|
31692
|
-
|
|
31699
|
+
const exactMatch = nextSampleArray.find((s) => s.samplePosition.offset === state.iterator.counter.getOffset());
|
|
31700
|
+
const nextSample = exactMatch ?? nextSampleArray[0];
|
|
31701
|
+
if (nextSample.samplePosition.offset !== state.iterator.counter.getOffset()) {
|
|
31702
|
+
return makeSkip(nextSample.samplePosition.offset);
|
|
31703
|
+
}
|
|
31704
|
+
if (nextSample.samplePosition.offset + nextSample.samplePosition.size > state.contentLength) {
|
|
31705
|
+
Log.verbose(state.logLevel, "Sample is beyond the end of the file. Don't process it.", nextSample.samplePosition.offset + nextSample.samplePosition.size, endOfMdat);
|
|
31693
31706
|
return makeSkip(endOfMdat);
|
|
31694
31707
|
}
|
|
31695
|
-
|
|
31696
|
-
|
|
31708
|
+
const { iterator } = state;
|
|
31709
|
+
if (iterator.bytesRemaining() < nextSample.samplePosition.size) {
|
|
31710
|
+
return makeFetchMoreData(nextSample.samplePosition.size - iterator.bytesRemaining());
|
|
31697
31711
|
}
|
|
31698
31712
|
const {
|
|
31699
31713
|
timestamp: rawCts,
|
|
@@ -31703,21 +31717,22 @@ var parseMdatSection = async (state) => {
|
|
|
31703
31717
|
offset,
|
|
31704
31718
|
bigEndian,
|
|
31705
31719
|
chunkSize
|
|
31706
|
-
} =
|
|
31720
|
+
} = nextSample.samplePosition;
|
|
31721
|
+
const track = tracks2.find((t) => t.trackId === nextSample.trackId);
|
|
31707
31722
|
const {
|
|
31708
31723
|
originalTimescale,
|
|
31709
31724
|
startInSeconds,
|
|
31710
31725
|
trackMediaTimeOffsetInTrackTimescale,
|
|
31711
31726
|
timescale: trackTimescale
|
|
31712
|
-
} =
|
|
31727
|
+
} = track;
|
|
31713
31728
|
const cts = rawCts + startInSeconds * originalTimescale - trackMediaTimeOffsetInTrackTimescale / trackTimescale * WEBCODECS_TIMESCALE;
|
|
31714
31729
|
const dts = rawDts + startInSeconds * originalTimescale - trackMediaTimeOffsetInTrackTimescale / trackTimescale * WEBCODECS_TIMESCALE;
|
|
31715
31730
|
const bytes = postprocessBytes({
|
|
31716
|
-
bytes: iterator.getSlice(
|
|
31731
|
+
bytes: iterator.getSlice(nextSample.samplePosition.size),
|
|
31717
31732
|
bigEndian,
|
|
31718
31733
|
chunkSize
|
|
31719
31734
|
});
|
|
31720
|
-
if (
|
|
31735
|
+
if (track.type === "audio") {
|
|
31721
31736
|
const audioSample = convertAudioOrVideoSampleToWebCodecsTimestamps({
|
|
31722
31737
|
sample: {
|
|
31723
31738
|
data: bytes,
|
|
@@ -31731,10 +31746,10 @@ var parseMdatSection = async (state) => {
|
|
|
31731
31746
|
});
|
|
31732
31747
|
await state.callbacks.onAudioSample({
|
|
31733
31748
|
audioSample,
|
|
31734
|
-
trackId:
|
|
31749
|
+
trackId: track.trackId
|
|
31735
31750
|
});
|
|
31736
31751
|
}
|
|
31737
|
-
if (
|
|
31752
|
+
if (track.type === "video") {
|
|
31738
31753
|
const nalUnitType = bytes[4] & 31;
|
|
31739
31754
|
let isRecoveryPoint = false;
|
|
31740
31755
|
if (nalUnitType === 6) {
|
|
@@ -31754,14 +31769,10 @@ var parseMdatSection = async (state) => {
|
|
|
31754
31769
|
});
|
|
31755
31770
|
await state.callbacks.onVideoSample({
|
|
31756
31771
|
videoSample,
|
|
31757
|
-
trackId:
|
|
31772
|
+
trackId: track.trackId
|
|
31758
31773
|
});
|
|
31759
31774
|
}
|
|
31760
|
-
|
|
31761
|
-
if (jump) {
|
|
31762
|
-
Log.verbose(state.logLevel, "Found jump mark", jump.jumpToOffset, "skipping to jump mark");
|
|
31763
|
-
return makeSkip(jump.jumpToOffset);
|
|
31764
|
-
}
|
|
31775
|
+
state.iso.flatSamples.setCurrentSampleIndex(mediaSection.start, nextSample.trackId, nextSample.index + 1);
|
|
31765
31776
|
return null;
|
|
31766
31777
|
};
|
|
31767
31778
|
var parseIsoBaseMedia = async (state) => {
|
|
@@ -33162,10 +33173,7 @@ var innerParseMp3PacketHeader = (iterator) => {
|
|
|
33162
33173
|
throw new Error("Expected Layer I, II or III");
|
|
33163
33174
|
}
|
|
33164
33175
|
const layer = layerBits === 3 ? 1 : layerBits === 2 ? 2 : 3;
|
|
33165
|
-
|
|
33166
|
-
if (protectionBit !== 1) {
|
|
33167
|
-
throw new Error("Does not support CRC yet");
|
|
33168
|
-
}
|
|
33176
|
+
iterator.getBits(1);
|
|
33169
33177
|
const bitrateIndex = iterator.getBits(4);
|
|
33170
33178
|
const bitrateInKbit = getBitrateKB({
|
|
33171
33179
|
bits: bitrateIndex,
|
|
@@ -35179,7 +35187,7 @@ var parseWav = (state) => {
|
|
|
35179
35187
|
if (type === "id3") {
|
|
35180
35188
|
return parseId32({ state });
|
|
35181
35189
|
}
|
|
35182
|
-
if (type === "junk" || type === "fllr") {
|
|
35190
|
+
if (type === "junk" || type === "fllr" || type === "bext") {
|
|
35183
35191
|
return parseJunk({ state });
|
|
35184
35192
|
}
|
|
35185
35193
|
if (type === "fact") {
|
|
@@ -35656,7 +35664,8 @@ var parseLoop = async ({
|
|
|
35656
35664
|
fields: state.fields,
|
|
35657
35665
|
src: state.src,
|
|
35658
35666
|
discardReadBytes: state.discardReadBytes,
|
|
35659
|
-
prefetchCache: state.prefetchCache
|
|
35667
|
+
prefetchCache: state.prefetchCache,
|
|
35668
|
+
isoState: state.iso
|
|
35660
35669
|
});
|
|
35661
35670
|
state.timings.timeSeeking += Date.now() - seekStart;
|
|
35662
35671
|
}
|