@remotion/media-parser 4.0.311 → 4.0.313
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/aac/get-seeking-byte.js +5 -1
- package/dist/containers/flac/get-seeking-byte.d.ts +2 -1
- package/dist/containers/flac/get-seeking-byte.js +1 -1
- package/dist/containers/iso-base-media/base-media-box.d.ts +3 -2
- package/dist/containers/iso-base-media/collect-sample-positions-from-moof-boxes.d.ts +3 -1
- package/dist/containers/iso-base-media/collect-sample-positions-from-moof-boxes.js +2 -1
- package/dist/containers/iso-base-media/find-keyframe-before-time.d.ts +1 -1
- package/dist/containers/iso-base-media/find-keyframe-before-time.js +1 -1
- package/dist/containers/iso-base-media/find-track-to-seek.js +2 -0
- package/dist/containers/iso-base-media/get-keyframes.js +1 -0
- package/dist/containers/iso-base-media/get-sample-positions-from-track.d.ts +3 -1
- package/dist/containers/iso-base-media/get-sample-positions-from-track.js +2 -1
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +4 -1
- package/dist/containers/iso-base-media/get-seeking-byte.js +3 -1
- package/dist/containers/iso-base-media/moov/mvhd.d.ts +30 -0
- package/dist/containers/iso-base-media/moov/mvhd.js +65 -0
- package/dist/containers/iso-base-media/moov/trex.d.ts +16 -0
- package/dist/containers/iso-base-media/moov/trex.js +27 -0
- package/dist/containers/iso-base-media/process-box.js +10 -1
- package/dist/containers/iso-base-media/tkhd.d.ts +1 -1
- package/dist/containers/iso-base-media/traversal.d.ts +4 -1
- package/dist/containers/iso-base-media/traversal.js +18 -1
- package/dist/containers/m3u/get-seeking-byte.js +2 -0
- package/dist/containers/mp3/get-seeking-byte.js +4 -1
- package/dist/containers/riff/get-seeking-byte.js +3 -0
- package/dist/containers/wav/get-seeking-byte.js +1 -0
- package/dist/containers/wav/parse-list.js +4 -3
- package/dist/containers/webm/seek/get-seeking-byte.js +21 -6
- package/dist/controller/media-parser-controller.d.ts +3 -0
- package/dist/controller/media-parser-controller.js +15 -0
- package/dist/esm/index.mjs +327 -156
- package/dist/esm/server-worker.mjs +17 -0
- package/dist/esm/worker-server-entry.mjs +341 -155
- package/dist/esm/worker-web-entry.mjs +341 -155
- package/dist/esm/worker.mjs +28 -0
- package/dist/get-duration.js +1 -0
- package/dist/get-seeking-byte.js +13 -2
- package/dist/index.cjs +54 -0
- package/dist/index.d.ts +3 -2
- package/dist/index.js +1 -1
- package/dist/internal-parse-media.js +25 -0
- package/dist/iterator/buffer-iterator.d.ts +1 -1
- package/dist/iterator/buffer-manager.d.ts +1 -1
- package/dist/iterator/buffer-manager.js +19 -5
- package/dist/parse-media-on-worker-entry.js +17 -0
- package/dist/samples-from-moof.d.ts +3 -1
- package/dist/samples-from-moof.js +15 -12
- package/dist/state/iso-base-media/cached-sample-positions.js +1 -0
- package/dist/state/iso-base-media/lazy-mfra-load.js +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/webcodec-sample-types.d.ts +2 -2
- package/dist/work-on-seek-request.d.ts +22 -0
- package/dist/work-on-seek-request.js +3 -2
- package/dist/worker/forward-controller-to-worker.js +18 -0
- package/dist/worker/worker-types.d.ts +13 -2
- package/package.json +3 -3
package/dist/esm/index.mjs
CHANGED
|
@@ -1164,14 +1164,28 @@ var Log = {
|
|
|
1164
1164
|
};
|
|
1165
1165
|
|
|
1166
1166
|
// src/iterator/buffer-manager.ts
|
|
1167
|
+
var makeBufferWithMaxBytes = (initialData, maxBytes) => {
|
|
1168
|
+
const maxByteLength = Math.min(maxBytes, 2 ** 31);
|
|
1169
|
+
try {
|
|
1170
|
+
const buf = new ArrayBuffer(initialData.byteLength, {
|
|
1171
|
+
maxByteLength
|
|
1172
|
+
});
|
|
1173
|
+
return buf;
|
|
1174
|
+
} catch (e) {
|
|
1175
|
+
if (e instanceof RangeError && maxBytes > 2 ** 27) {
|
|
1176
|
+
return new ArrayBuffer(initialData.byteLength, {
|
|
1177
|
+
maxByteLength: 2 ** 27
|
|
1178
|
+
});
|
|
1179
|
+
}
|
|
1180
|
+
throw e;
|
|
1181
|
+
}
|
|
1182
|
+
};
|
|
1167
1183
|
var bufferManager = ({
|
|
1168
1184
|
initialData,
|
|
1169
1185
|
maxBytes,
|
|
1170
1186
|
counter
|
|
1171
1187
|
}) => {
|
|
1172
|
-
const buf =
|
|
1173
|
-
maxByteLength: maxBytes === null ? initialData.byteLength : Math.min(maxBytes, 2 ** 31)
|
|
1174
|
-
});
|
|
1188
|
+
const buf = makeBufferWithMaxBytes(initialData, maxBytes);
|
|
1175
1189
|
if (!buf.resize) {
|
|
1176
1190
|
throw new Error("`ArrayBuffer.resize` is not supported in this Runtime. On the server: Use at least Node.js 20 or Bun. In the browser: Chrome 111, Edge 111, Safari 16.4, Firefox 128, Opera 111");
|
|
1177
1191
|
}
|
|
@@ -1721,7 +1735,7 @@ var toUnixTimestamp = (value) => {
|
|
|
1721
1735
|
return Math.floor(value + baseDate.getTime() / 1000) * 1000;
|
|
1722
1736
|
};
|
|
1723
1737
|
|
|
1724
|
-
// src/containers/iso-base-media/mvhd.ts
|
|
1738
|
+
// src/containers/iso-base-media/moov/mvhd.ts
|
|
1725
1739
|
var parseMvhd = ({
|
|
1726
1740
|
iterator,
|
|
1727
1741
|
offset,
|
|
@@ -2287,6 +2301,21 @@ var getTrunBoxes = (segment) => {
|
|
|
2287
2301
|
const trunBoxes = segment.children.filter((c) => c.type === "trun-box");
|
|
2288
2302
|
return trunBoxes;
|
|
2289
2303
|
};
|
|
2304
|
+
var getMvexBox = (moovAtom) => {
|
|
2305
|
+
const mvexBox = moovAtom.children.find((s) => s.type === "regular-box" && s.boxType === "mvex");
|
|
2306
|
+
if (!mvexBox || mvexBox.type !== "regular-box") {
|
|
2307
|
+
return null;
|
|
2308
|
+
}
|
|
2309
|
+
return mvexBox;
|
|
2310
|
+
};
|
|
2311
|
+
var getTrexBoxes = (moovAtom) => {
|
|
2312
|
+
const mvexBox = getMvexBox(moovAtom);
|
|
2313
|
+
if (!mvexBox) {
|
|
2314
|
+
return [];
|
|
2315
|
+
}
|
|
2316
|
+
const trexBoxes = mvexBox.children.filter((c) => c.type === "trex-box");
|
|
2317
|
+
return trexBoxes;
|
|
2318
|
+
};
|
|
2290
2319
|
var getTfraBoxesFromMfraBoxChildren = (mfraBoxChildren) => {
|
|
2291
2320
|
const tfraBoxes = mfraBoxChildren.filter((b) => b.type === "tfra-box");
|
|
2292
2321
|
return tfraBoxes;
|
|
@@ -4498,6 +4527,34 @@ var parseMoov = async ({
|
|
|
4498
4527
|
};
|
|
4499
4528
|
};
|
|
4500
4529
|
|
|
4530
|
+
// src/containers/iso-base-media/moov/trex.ts
|
|
4531
|
+
var parseTrex = ({
|
|
4532
|
+
iterator,
|
|
4533
|
+
offset,
|
|
4534
|
+
size
|
|
4535
|
+
}) => {
|
|
4536
|
+
const box = iterator.startBox(size - 8);
|
|
4537
|
+
const version = iterator.getUint8();
|
|
4538
|
+
iterator.discard(3);
|
|
4539
|
+
const trackId = iterator.getUint32();
|
|
4540
|
+
const defaultSampleDescriptionIndex = iterator.getUint32();
|
|
4541
|
+
const defaultSampleDuration = iterator.getUint32();
|
|
4542
|
+
const defaultSampleSize = iterator.getUint32();
|
|
4543
|
+
const defaultSampleFlags = iterator.getUint32();
|
|
4544
|
+
box.expectNoMoreBytes();
|
|
4545
|
+
return {
|
|
4546
|
+
type: "trex-box",
|
|
4547
|
+
boxSize: size,
|
|
4548
|
+
offset,
|
|
4549
|
+
trackId,
|
|
4550
|
+
version,
|
|
4551
|
+
defaultSampleDescriptionIndex,
|
|
4552
|
+
defaultSampleDuration,
|
|
4553
|
+
defaultSampleSize,
|
|
4554
|
+
defaultSampleFlags
|
|
4555
|
+
};
|
|
4556
|
+
};
|
|
4557
|
+
|
|
4501
4558
|
// src/containers/iso-base-media/stsd/av1c.ts
|
|
4502
4559
|
var parseAv1C = ({
|
|
4503
4560
|
data,
|
|
@@ -5583,10 +5640,16 @@ var processBox = async ({
|
|
|
5583
5640
|
})
|
|
5584
5641
|
};
|
|
5585
5642
|
}
|
|
5643
|
+
if (boxType === "trex") {
|
|
5644
|
+
return {
|
|
5645
|
+
type: "box",
|
|
5646
|
+
box: await parseTrex({ iterator, offset: fileOffset, size: boxSize })
|
|
5647
|
+
};
|
|
5648
|
+
}
|
|
5586
5649
|
if (boxType === "moof") {
|
|
5587
5650
|
await onlyIfMoovAtomExpected?.isoState?.mfra.triggerLoad();
|
|
5588
5651
|
}
|
|
5589
|
-
if (boxType === "mdia" || boxType === "minf" || boxType === "stbl" || boxType === "udta" || boxType === "moof" || boxType === "dims" || boxType === "meta" || boxType === "wave" || boxType === "traf" || boxType === "mfra" || boxType === "edts" || boxType === "stsb") {
|
|
5652
|
+
if (boxType === "mdia" || boxType === "minf" || boxType === "stbl" || boxType === "udta" || boxType === "moof" || boxType === "dims" || boxType === "meta" || boxType === "wave" || boxType === "traf" || boxType === "mfra" || boxType === "edts" || boxType === "mvex" || boxType === "stsb") {
|
|
5590
5653
|
const children = await getIsoBaseMediaChildren({
|
|
5591
5654
|
iterator,
|
|
5592
5655
|
size: boxSize - 8,
|
|
@@ -5606,6 +5669,7 @@ var processBox = async ({
|
|
|
5606
5669
|
};
|
|
5607
5670
|
}
|
|
5608
5671
|
iterator.discard(boxSize - 8);
|
|
5672
|
+
Log.verbose(logLevel, "Unknown ISO Base Media Box:", boxType);
|
|
5609
5673
|
return {
|
|
5610
5674
|
type: "box",
|
|
5611
5675
|
box: {
|
|
@@ -6860,24 +6924,38 @@ var mediaParserController = () => {
|
|
|
6860
6924
|
await pauseSignal.waitUntilResume();
|
|
6861
6925
|
};
|
|
6862
6926
|
let seekingHintResolution = null;
|
|
6927
|
+
let simulateSeekResolution = null;
|
|
6863
6928
|
const getSeekingHints = () => {
|
|
6864
6929
|
if (!seekingHintResolution) {
|
|
6865
6930
|
throw new Error("The mediaParserController() was not yet used in a parseMedia() call");
|
|
6866
6931
|
}
|
|
6867
6932
|
return seekingHintResolution();
|
|
6868
6933
|
};
|
|
6934
|
+
const simulateSeek = (seekInSeconds) => {
|
|
6935
|
+
if (!simulateSeekResolution) {
|
|
6936
|
+
throw new Error("The mediaParserController() was not yet used in a parseMedia() call");
|
|
6937
|
+
}
|
|
6938
|
+
return simulateSeekResolution(seekInSeconds);
|
|
6939
|
+
};
|
|
6869
6940
|
const attachSeekingHintResolution = (callback) => {
|
|
6870
6941
|
if (seekingHintResolution) {
|
|
6871
6942
|
throw new Error("The mediaParserController() was used in multiple parseMedia() calls. Create a separate controller for each call.");
|
|
6872
6943
|
}
|
|
6873
6944
|
seekingHintResolution = callback;
|
|
6874
6945
|
};
|
|
6946
|
+
const attachSimulateSeekResolution = (callback) => {
|
|
6947
|
+
if (simulateSeekResolution) {
|
|
6948
|
+
throw new Error("The mediaParserController() was used in multiple parseMedia() calls. Create a separate controller for each call.");
|
|
6949
|
+
}
|
|
6950
|
+
simulateSeekResolution = callback;
|
|
6951
|
+
};
|
|
6875
6952
|
return {
|
|
6876
6953
|
abort: (reason) => {
|
|
6877
6954
|
abortController.abort(reason);
|
|
6878
6955
|
emitter.dispatchAbort(reason);
|
|
6879
6956
|
},
|
|
6880
6957
|
seek: seekSignal.seek,
|
|
6958
|
+
simulateSeek,
|
|
6881
6959
|
pause: pauseSignal.pause,
|
|
6882
6960
|
resume: pauseSignal.resume,
|
|
6883
6961
|
addEventListener: emitter.addEventListener,
|
|
@@ -6889,7 +6967,8 @@ var mediaParserController = () => {
|
|
|
6889
6967
|
seekSignal,
|
|
6890
6968
|
markAsReadyToEmitEvents: emitter.markAsReady,
|
|
6891
6969
|
performedSeeksSignal,
|
|
6892
|
-
attachSeekingHintResolution
|
|
6970
|
+
attachSeekingHintResolution,
|
|
6971
|
+
attachSimulateSeekResolution
|
|
6893
6972
|
}
|
|
6894
6973
|
};
|
|
6895
6974
|
};
|
|
@@ -7070,14 +7149,15 @@ var areSamplesComplete = ({
|
|
|
7070
7149
|
};
|
|
7071
7150
|
|
|
7072
7151
|
// src/samples-from-moof.ts
|
|
7073
|
-
var getSamplesFromTraf = (trafSegment, moofOffset) => {
|
|
7152
|
+
var getSamplesFromTraf = (trafSegment, moofOffset, trexBoxes) => {
|
|
7074
7153
|
if (trafSegment.type !== "regular-box" || trafSegment.boxType !== "traf") {
|
|
7075
7154
|
throw new Error("Expected traf-box");
|
|
7076
7155
|
}
|
|
7077
7156
|
const tfhdBox = getTfhdBox(trafSegment);
|
|
7078
|
-
const
|
|
7079
|
-
const
|
|
7080
|
-
const
|
|
7157
|
+
const trexBox = trexBoxes.find((t) => t.trackId === tfhdBox?.trackId) ?? null;
|
|
7158
|
+
const defaultTrackSampleDuration = tfhdBox?.defaultSampleDuration || trexBox?.defaultSampleDuration || null;
|
|
7159
|
+
const defaultTrackSampleSize = tfhdBox?.defaultSampleSize || trexBox?.defaultSampleSize || null;
|
|
7160
|
+
const defaultTrackSampleFlags = tfhdBox?.defaultSampleFlags ?? trexBox?.defaultSampleFlags ?? null;
|
|
7081
7161
|
const tfdtBox = getTfdtBox(trafSegment);
|
|
7082
7162
|
const trunBoxes = getTrunBoxes(trafSegment);
|
|
7083
7163
|
let time = 0;
|
|
@@ -7092,16 +7172,16 @@ var getSamplesFromTraf = (trafSegment, moofOffset) => {
|
|
|
7092
7172
|
}
|
|
7093
7173
|
for (const sample of trunBox.samples) {
|
|
7094
7174
|
i++;
|
|
7095
|
-
const duration2 = sample.sampleDuration
|
|
7175
|
+
const duration2 = sample.sampleDuration || defaultTrackSampleDuration;
|
|
7096
7176
|
if (duration2 === null) {
|
|
7097
7177
|
throw new Error("Expected duration");
|
|
7098
7178
|
}
|
|
7099
|
-
const size = sample.sampleSize ??
|
|
7179
|
+
const size = sample.sampleSize ?? defaultTrackSampleSize;
|
|
7100
7180
|
if (size === null) {
|
|
7101
7181
|
throw new Error("Expected size");
|
|
7102
7182
|
}
|
|
7103
7183
|
const isFirstSample = i === 0;
|
|
7104
|
-
const sampleFlags = sample.sampleFlags ? sample.sampleFlags : isFirstSample && trunBox.firstSampleFlags !== null ? trunBox.firstSampleFlags :
|
|
7184
|
+
const sampleFlags = sample.sampleFlags ? sample.sampleFlags : isFirstSample && trunBox.firstSampleFlags !== null ? trunBox.firstSampleFlags : defaultTrackSampleFlags;
|
|
7105
7185
|
if (sampleFlags === null) {
|
|
7106
7186
|
throw new Error("Expected sample flags");
|
|
7107
7187
|
}
|
|
@@ -7127,14 +7207,15 @@ var getSamplesFromTraf = (trafSegment, moofOffset) => {
|
|
|
7127
7207
|
};
|
|
7128
7208
|
var getSamplesFromMoof = ({
|
|
7129
7209
|
moofBox,
|
|
7130
|
-
trackId
|
|
7210
|
+
trackId,
|
|
7211
|
+
trexBoxes
|
|
7131
7212
|
}) => {
|
|
7132
7213
|
const mapped = moofBox.trafBoxes.map((traf) => {
|
|
7133
7214
|
const tfhdBox = getTfhdBox(traf);
|
|
7134
7215
|
if (!tfhdBox || tfhdBox.trackId !== trackId) {
|
|
7135
7216
|
return [];
|
|
7136
7217
|
}
|
|
7137
|
-
return getSamplesFromTraf(traf, moofBox.offset);
|
|
7218
|
+
return getSamplesFromTraf(traf, moofBox.offset, trexBoxes);
|
|
7138
7219
|
});
|
|
7139
7220
|
return mapped.flat(1);
|
|
7140
7221
|
};
|
|
@@ -7143,7 +7224,8 @@ var getSamplesFromMoof = ({
|
|
|
7143
7224
|
var collectSamplePositionsFromMoofBoxes = ({
|
|
7144
7225
|
moofBoxes,
|
|
7145
7226
|
tkhdBox,
|
|
7146
|
-
isComplete
|
|
7227
|
+
isComplete,
|
|
7228
|
+
trexBoxes
|
|
7147
7229
|
}) => {
|
|
7148
7230
|
const samplePositions = moofBoxes.map((m, index) => {
|
|
7149
7231
|
const isLastFragment = index === moofBoxes.length - 1 && isComplete;
|
|
@@ -7151,7 +7233,8 @@ var collectSamplePositionsFromMoofBoxes = ({
|
|
|
7151
7233
|
isLastFragment,
|
|
7152
7234
|
samples: getSamplesFromMoof({
|
|
7153
7235
|
moofBox: m,
|
|
7154
|
-
trackId: tkhdBox.trackId
|
|
7236
|
+
trackId: tkhdBox.trackId,
|
|
7237
|
+
trexBoxes
|
|
7155
7238
|
})
|
|
7156
7239
|
};
|
|
7157
7240
|
});
|
|
@@ -7324,7 +7407,8 @@ var collectSamplePositionsFromTrak = (trakBox) => {
|
|
|
7324
7407
|
var getSamplePositionsFromTrack = ({
|
|
7325
7408
|
trakBox,
|
|
7326
7409
|
moofBoxes,
|
|
7327
|
-
moofComplete
|
|
7410
|
+
moofComplete,
|
|
7411
|
+
trexBoxes
|
|
7328
7412
|
}) => {
|
|
7329
7413
|
const tkhdBox = getTkhdBox(trakBox);
|
|
7330
7414
|
if (!tkhdBox) {
|
|
@@ -7334,7 +7418,8 @@ var getSamplePositionsFromTrack = ({
|
|
|
7334
7418
|
const { samplePositions } = collectSamplePositionsFromMoofBoxes({
|
|
7335
7419
|
moofBoxes,
|
|
7336
7420
|
tkhdBox,
|
|
7337
|
-
isComplete: moofComplete
|
|
7421
|
+
isComplete: moofComplete,
|
|
7422
|
+
trexBoxes
|
|
7338
7423
|
});
|
|
7339
7424
|
return {
|
|
7340
7425
|
samplePositions: samplePositions.map((s) => s.samples).flat(1),
|
|
@@ -7620,7 +7705,8 @@ var getDurationFromIsoBaseMedia = (parserState) => {
|
|
|
7620
7705
|
const { samplePositions, isComplete } = getSamplePositionsFromTrack({
|
|
7621
7706
|
trakBox,
|
|
7622
7707
|
moofBoxes,
|
|
7623
|
-
moofComplete: areSamplesComplete({ moofBoxes, tfraBoxes })
|
|
7708
|
+
moofComplete: areSamplesComplete({ moofBoxes, tfraBoxes }),
|
|
7709
|
+
trexBoxes: getTrexBoxes(moovBox)
|
|
7624
7710
|
});
|
|
7625
7711
|
if (!isComplete) {
|
|
7626
7712
|
return null;
|
|
@@ -7726,7 +7812,8 @@ var getKeyframesFromIsoBaseMedia = (state) => {
|
|
|
7726
7812
|
moofComplete: areSamplesComplete({
|
|
7727
7813
|
moofBoxes,
|
|
7728
7814
|
tfraBoxes
|
|
7729
|
-
})
|
|
7815
|
+
}),
|
|
7816
|
+
trexBoxes: getTrexBoxes(moov)
|
|
7730
7817
|
});
|
|
7731
7818
|
if (!isComplete) {
|
|
7732
7819
|
return [];
|
|
@@ -8102,7 +8189,11 @@ var getSeekingByteForAac = ({
|
|
|
8102
8189
|
}
|
|
8103
8190
|
}
|
|
8104
8191
|
if (bestAudioSample) {
|
|
8105
|
-
return {
|
|
8192
|
+
return {
|
|
8193
|
+
type: "do-seek",
|
|
8194
|
+
byte: bestAudioSample.offset,
|
|
8195
|
+
timeInSeconds: bestAudioSample.timeInSeconds
|
|
8196
|
+
};
|
|
8106
8197
|
}
|
|
8107
8198
|
return { type: "valid-but-must-wait" };
|
|
8108
8199
|
};
|
|
@@ -8129,7 +8220,7 @@ var getSeekingByteForFlac = ({
|
|
|
8129
8220
|
}
|
|
8130
8221
|
}
|
|
8131
8222
|
if (bestAudioSample) {
|
|
8132
|
-
return bestAudioSample
|
|
8223
|
+
return bestAudioSample;
|
|
8133
8224
|
}
|
|
8134
8225
|
return null;
|
|
8135
8226
|
};
|
|
@@ -8167,7 +8258,7 @@ var findKeyframeBeforeTime = ({
|
|
|
8167
8258
|
Log.trace(logLevel, "Found a sample, but the offset has not yet been marked as a video section yet. Not yet able to seek, but probably once we have started reading the next box.", videoSample);
|
|
8168
8259
|
return null;
|
|
8169
8260
|
}
|
|
8170
|
-
return videoSample
|
|
8261
|
+
return videoSample;
|
|
8171
8262
|
};
|
|
8172
8263
|
|
|
8173
8264
|
// src/containers/iso-base-media/find-track-to-seek.ts
|
|
@@ -8188,7 +8279,8 @@ var findAnyTrackWithSamplePositions = (allTracks, struc) => {
|
|
|
8188
8279
|
moofComplete: areSamplesComplete({
|
|
8189
8280
|
moofBoxes: getMoofBoxes(struc.boxes),
|
|
8190
8281
|
tfraBoxes: getTfraBoxes(struc.boxes)
|
|
8191
|
-
})
|
|
8282
|
+
}),
|
|
8283
|
+
trexBoxes: getTrexBoxes(moov)
|
|
8192
8284
|
});
|
|
8193
8285
|
if (samplePositions.length === 0) {
|
|
8194
8286
|
continue;
|
|
@@ -8218,7 +8310,8 @@ var findTrackToSeek = (allTracks, structure) => {
|
|
|
8218
8310
|
moofComplete: areSamplesComplete({
|
|
8219
8311
|
moofBoxes: getMoofBoxes(struc.boxes),
|
|
8220
8312
|
tfraBoxes: getTfraBoxes(struc.boxes)
|
|
8221
|
-
})
|
|
8313
|
+
}),
|
|
8314
|
+
trexBoxes: getTrexBoxes(moov)
|
|
8222
8315
|
});
|
|
8223
8316
|
if (samplePositions.length === 0) {
|
|
8224
8317
|
return findAnyTrackWithSamplePositions(allTracks, struc);
|
|
@@ -8380,7 +8473,8 @@ var getSeekingByteFromFragmentedMp4 = async ({
|
|
|
8380
8473
|
const { samplePositions: samplePositionsArray } = collectSamplePositionsFromMoofBoxes({
|
|
8381
8474
|
moofBoxes: info.moofBoxes,
|
|
8382
8475
|
tkhdBox,
|
|
8383
|
-
isComplete
|
|
8476
|
+
isComplete,
|
|
8477
|
+
trexBoxes: getTrexBoxes(moov)
|
|
8384
8478
|
});
|
|
8385
8479
|
Log.trace(logLevel, "Fragmented MP4 - Checking if we have seeking info for this time range");
|
|
8386
8480
|
for (const positions of samplePositionsArray) {
|
|
@@ -8398,7 +8492,8 @@ var getSeekingByteFromFragmentedMp4 = async ({
|
|
|
8398
8492
|
if (kf) {
|
|
8399
8493
|
return {
|
|
8400
8494
|
type: "do-seek",
|
|
8401
|
-
byte: kf
|
|
8495
|
+
byte: kf.offset,
|
|
8496
|
+
timeInSeconds: Math.min(kf.decodingTimestamp, kf.timestamp) / firstTrack.originalTimescale
|
|
8402
8497
|
};
|
|
8403
8498
|
}
|
|
8404
8499
|
}
|
|
@@ -8501,7 +8596,8 @@ var getSeekingByteFromIsoBaseMedia = ({
|
|
|
8501
8596
|
if (keyframe) {
|
|
8502
8597
|
return Promise.resolve({
|
|
8503
8598
|
type: "do-seek",
|
|
8504
|
-
byte: keyframe
|
|
8599
|
+
byte: keyframe.offset,
|
|
8600
|
+
timeInSeconds: Math.min(keyframe.decodingTimestamp, keyframe.timestamp) / track.originalTimescale
|
|
8505
8601
|
});
|
|
8506
8602
|
}
|
|
8507
8603
|
return Promise.resolve({
|
|
@@ -8541,7 +8637,8 @@ var getSeekingByteForM3u8 = ({
|
|
|
8541
8637
|
}
|
|
8542
8638
|
return {
|
|
8543
8639
|
type: "do-seek",
|
|
8544
|
-
byte: currentPosition
|
|
8640
|
+
byte: currentPosition,
|
|
8641
|
+
timeInSeconds: time
|
|
8545
8642
|
};
|
|
8546
8643
|
};
|
|
8547
8644
|
|
|
@@ -8775,9 +8872,12 @@ var getSeekingByteForMp3 = ({
|
|
|
8775
8872
|
type: "valid-but-must-wait"
|
|
8776
8873
|
};
|
|
8777
8874
|
}
|
|
8875
|
+
const byte = Math.max(...candidates);
|
|
8876
|
+
const timeInSeconds = byte === bestAudioSample?.offset ? bestAudioSample.timeInSeconds : time;
|
|
8778
8877
|
return {
|
|
8779
8878
|
type: "do-seek",
|
|
8780
|
-
byte
|
|
8879
|
+
byte,
|
|
8880
|
+
timeInSeconds
|
|
8781
8881
|
};
|
|
8782
8882
|
};
|
|
8783
8883
|
|
|
@@ -8821,7 +8921,8 @@ var getSeekingByteForRiff = async ({
|
|
|
8821
8921
|
avcState.clear();
|
|
8822
8922
|
return {
|
|
8823
8923
|
type: "do-seek",
|
|
8824
|
-
byte: lastKeyframe.positionInBytes
|
|
8924
|
+
byte: lastKeyframe.positionInBytes,
|
|
8925
|
+
timeInSeconds: Math.min(lastKeyframe.decodingTimeInSeconds, lastKeyframe.presentationTimeInSeconds)
|
|
8825
8926
|
};
|
|
8826
8927
|
}
|
|
8827
8928
|
if (idx1Entries.videoTrackIndex === null) {
|
|
@@ -8852,121 +8953,8 @@ var getSeekingByteForRiff = async ({
|
|
|
8852
8953
|
avcState.clear();
|
|
8853
8954
|
return {
|
|
8854
8955
|
type: "do-seek",
|
|
8855
|
-
byte: bestEntry.offset + info.moviOffset - 4
|
|
8856
|
-
|
|
8857
|
-
};
|
|
8858
|
-
|
|
8859
|
-
// src/containers/wav/get-seeking-byte.ts
|
|
8860
|
-
var WAVE_SAMPLES_PER_SECOND = 25;
|
|
8861
|
-
var getSeekingByteFromWav = ({
|
|
8862
|
-
info,
|
|
8863
|
-
time
|
|
8864
|
-
}) => {
|
|
8865
|
-
const bytesPerSecond = info.sampleRate * info.blockAlign;
|
|
8866
|
-
const durationInSeconds = info.mediaSection.size / bytesPerSecond;
|
|
8867
|
-
const timeRoundedDown = Math.floor(Math.min(time, durationInSeconds - 0.0000001) * WAVE_SAMPLES_PER_SECOND) / WAVE_SAMPLES_PER_SECOND;
|
|
8868
|
-
const byteOffset = bytesPerSecond * timeRoundedDown;
|
|
8869
|
-
return Promise.resolve({
|
|
8870
|
-
type: "do-seek",
|
|
8871
|
-
byte: byteOffset + info.mediaSection.start
|
|
8872
|
-
});
|
|
8873
|
-
};
|
|
8874
|
-
|
|
8875
|
-
// src/containers/webm/seek/get-seeking-byte.ts
|
|
8876
|
-
var toSeconds = (timeInTimescale, track) => {
|
|
8877
|
-
return timeInTimescale / track.timescale * 1000;
|
|
8878
|
-
};
|
|
8879
|
-
var findBiggestCueBeforeTime = ({
|
|
8880
|
-
cues,
|
|
8881
|
-
time,
|
|
8882
|
-
track
|
|
8883
|
-
}) => {
|
|
8884
|
-
let biggestCueBeforeTime;
|
|
8885
|
-
for (const cue of cues) {
|
|
8886
|
-
const cueTimeInSeconds = toSeconds(cue.timeInTimescale, track);
|
|
8887
|
-
if (cueTimeInSeconds < time && (!biggestCueBeforeTime || cueTimeInSeconds > toSeconds(biggestCueBeforeTime.timeInTimescale, track))) {
|
|
8888
|
-
biggestCueBeforeTime = cue;
|
|
8889
|
-
}
|
|
8890
|
-
}
|
|
8891
|
-
return biggestCueBeforeTime;
|
|
8892
|
-
};
|
|
8893
|
-
var findKeyframeBeforeTime2 = ({
|
|
8894
|
-
keyframes,
|
|
8895
|
-
time
|
|
8896
|
-
}) => {
|
|
8897
|
-
let keyframeBeforeTime;
|
|
8898
|
-
for (const keyframe of keyframes) {
|
|
8899
|
-
if (keyframe.decodingTimeInSeconds < time && (!keyframeBeforeTime || keyframe.decodingTimeInSeconds > keyframeBeforeTime.decodingTimeInSeconds)) {
|
|
8900
|
-
keyframeBeforeTime = keyframe;
|
|
8901
|
-
}
|
|
8902
|
-
}
|
|
8903
|
-
return keyframeBeforeTime?.positionInBytes ?? null;
|
|
8904
|
-
};
|
|
8905
|
-
var getByteFromCues = ({
|
|
8906
|
-
cuesResponse,
|
|
8907
|
-
time,
|
|
8908
|
-
info,
|
|
8909
|
-
logLevel
|
|
8910
|
-
}) => {
|
|
8911
|
-
if (!cuesResponse) {
|
|
8912
|
-
Log.trace(logLevel, "Has no Matroska cues at the moment, cannot use them");
|
|
8913
|
-
return null;
|
|
8914
|
-
}
|
|
8915
|
-
const { cues, segmentOffset } = cuesResponse;
|
|
8916
|
-
Log.trace(logLevel, "Has Matroska cues. Will use them to perform a seek.");
|
|
8917
|
-
const biggestCueBeforeTime = findBiggestCueBeforeTime({
|
|
8918
|
-
cues,
|
|
8919
|
-
time,
|
|
8920
|
-
track: info.track
|
|
8921
|
-
});
|
|
8922
|
-
if (!biggestCueBeforeTime) {
|
|
8923
|
-
return null;
|
|
8924
|
-
}
|
|
8925
|
-
return biggestCueBeforeTime.clusterPositionInSegment + segmentOffset;
|
|
8926
|
-
};
|
|
8927
|
-
var getSeekingByteFromMatroska = async ({
|
|
8928
|
-
time,
|
|
8929
|
-
webmState,
|
|
8930
|
-
info,
|
|
8931
|
-
logLevel,
|
|
8932
|
-
mediaSection
|
|
8933
|
-
}) => {
|
|
8934
|
-
if (!info.track) {
|
|
8935
|
-
Log.trace(logLevel, "No video track found, cannot seek yet");
|
|
8936
|
-
return {
|
|
8937
|
-
type: "valid-but-must-wait"
|
|
8938
|
-
};
|
|
8939
|
-
}
|
|
8940
|
-
const cuesResponse = info.loadedCues ?? await webmState.cues.getLoadedCues();
|
|
8941
|
-
const byteFromObservedKeyframe = findKeyframeBeforeTime2({
|
|
8942
|
-
keyframes: info.keyframes,
|
|
8943
|
-
time
|
|
8944
|
-
});
|
|
8945
|
-
const byteFromCues = getByteFromCues({
|
|
8946
|
-
cuesResponse,
|
|
8947
|
-
time,
|
|
8948
|
-
info,
|
|
8949
|
-
logLevel
|
|
8950
|
-
});
|
|
8951
|
-
const byteFromFirstMediaSection = webmState.getFirstCluster()?.start ?? null;
|
|
8952
|
-
const seekPossibilities = [
|
|
8953
|
-
byteFromCues,
|
|
8954
|
-
byteFromObservedKeyframe,
|
|
8955
|
-
byteFromFirstMediaSection
|
|
8956
|
-
].filter((n) => n !== null);
|
|
8957
|
-
const byteToSeekTo = seekPossibilities.length === 0 ? null : Math.max(...seekPossibilities);
|
|
8958
|
-
if (byteToSeekTo === null) {
|
|
8959
|
-
return {
|
|
8960
|
-
type: "invalid"
|
|
8961
|
-
};
|
|
8962
|
-
}
|
|
8963
|
-
mediaSection.addMediaSection({
|
|
8964
|
-
start: byteToSeekTo,
|
|
8965
|
-
size: 1
|
|
8966
|
-
});
|
|
8967
|
-
return {
|
|
8968
|
-
type: "do-seek",
|
|
8969
|
-
byte: byteToSeekTo
|
|
8956
|
+
byte: bestEntry.offset + info.moviOffset - 4,
|
|
8957
|
+
timeInSeconds: bestEntry.sampleCounts[idx1Entries.videoTrackIndex] / info.samplesPerSecond
|
|
8970
8958
|
};
|
|
8971
8959
|
};
|
|
8972
8960
|
|
|
@@ -9137,6 +9125,137 @@ var handleAvcPacket = async ({
|
|
|
9137
9125
|
transportStream.lastEmittedSample.setLastEmittedSample(sample);
|
|
9138
9126
|
};
|
|
9139
9127
|
|
|
9128
|
+
// src/containers/wav/get-seeking-byte.ts
|
|
9129
|
+
var WAVE_SAMPLES_PER_SECOND = 25;
|
|
9130
|
+
var getSeekingByteFromWav = ({
|
|
9131
|
+
info,
|
|
9132
|
+
time
|
|
9133
|
+
}) => {
|
|
9134
|
+
const bytesPerSecond = info.sampleRate * info.blockAlign;
|
|
9135
|
+
const durationInSeconds = info.mediaSection.size / bytesPerSecond;
|
|
9136
|
+
const timeRoundedDown = Math.floor(Math.min(time, durationInSeconds - 0.0000001) * WAVE_SAMPLES_PER_SECOND) / WAVE_SAMPLES_PER_SECOND;
|
|
9137
|
+
const byteOffset = bytesPerSecond * timeRoundedDown;
|
|
9138
|
+
return Promise.resolve({
|
|
9139
|
+
type: "do-seek",
|
|
9140
|
+
byte: byteOffset + info.mediaSection.start,
|
|
9141
|
+
timeInSeconds: timeRoundedDown
|
|
9142
|
+
});
|
|
9143
|
+
};
|
|
9144
|
+
|
|
9145
|
+
// src/containers/webm/seek/get-seeking-byte.ts
|
|
9146
|
+
var toSeconds = (timeInTimescale, track) => {
|
|
9147
|
+
return timeInTimescale / track.timescale * 1000;
|
|
9148
|
+
};
|
|
9149
|
+
var findBiggestCueBeforeTime = ({
|
|
9150
|
+
cues,
|
|
9151
|
+
time,
|
|
9152
|
+
track
|
|
9153
|
+
}) => {
|
|
9154
|
+
let biggestCueBeforeTime;
|
|
9155
|
+
for (const cue of cues) {
|
|
9156
|
+
const cueTimeInSeconds = toSeconds(cue.timeInTimescale, track);
|
|
9157
|
+
if (cueTimeInSeconds < time && (!biggestCueBeforeTime || cueTimeInSeconds > toSeconds(biggestCueBeforeTime.timeInTimescale, track))) {
|
|
9158
|
+
biggestCueBeforeTime = cue;
|
|
9159
|
+
}
|
|
9160
|
+
}
|
|
9161
|
+
return biggestCueBeforeTime;
|
|
9162
|
+
};
|
|
9163
|
+
var findKeyframeBeforeTime2 = ({
|
|
9164
|
+
keyframes,
|
|
9165
|
+
time
|
|
9166
|
+
}) => {
|
|
9167
|
+
let keyframeBeforeTime;
|
|
9168
|
+
for (const keyframe of keyframes) {
|
|
9169
|
+
if (keyframe.decodingTimeInSeconds < time && (!keyframeBeforeTime || keyframe.decodingTimeInSeconds > keyframeBeforeTime.decodingTimeInSeconds)) {
|
|
9170
|
+
keyframeBeforeTime = keyframe;
|
|
9171
|
+
}
|
|
9172
|
+
}
|
|
9173
|
+
return keyframeBeforeTime ?? null;
|
|
9174
|
+
};
|
|
9175
|
+
var getByteFromCues = ({
|
|
9176
|
+
cuesResponse,
|
|
9177
|
+
time,
|
|
9178
|
+
info,
|
|
9179
|
+
logLevel
|
|
9180
|
+
}) => {
|
|
9181
|
+
if (!cuesResponse) {
|
|
9182
|
+
Log.trace(logLevel, "Has no Matroska cues at the moment, cannot use them");
|
|
9183
|
+
return null;
|
|
9184
|
+
}
|
|
9185
|
+
const { cues, segmentOffset } = cuesResponse;
|
|
9186
|
+
Log.trace(logLevel, "Has Matroska cues. Will use them to perform a seek.");
|
|
9187
|
+
const biggestCueBeforeTime = findBiggestCueBeforeTime({
|
|
9188
|
+
cues,
|
|
9189
|
+
time,
|
|
9190
|
+
track: info.track
|
|
9191
|
+
});
|
|
9192
|
+
if (!biggestCueBeforeTime) {
|
|
9193
|
+
return null;
|
|
9194
|
+
}
|
|
9195
|
+
return {
|
|
9196
|
+
byte: biggestCueBeforeTime.clusterPositionInSegment + segmentOffset,
|
|
9197
|
+
timeInSeconds: toSeconds(biggestCueBeforeTime.timeInTimescale, info.track)
|
|
9198
|
+
};
|
|
9199
|
+
};
|
|
9200
|
+
var getSeekingByteFromMatroska = async ({
|
|
9201
|
+
time,
|
|
9202
|
+
webmState,
|
|
9203
|
+
info,
|
|
9204
|
+
logLevel,
|
|
9205
|
+
mediaSection
|
|
9206
|
+
}) => {
|
|
9207
|
+
if (!info.track) {
|
|
9208
|
+
Log.trace(logLevel, "No video track found, cannot seek yet");
|
|
9209
|
+
return {
|
|
9210
|
+
type: "valid-but-must-wait"
|
|
9211
|
+
};
|
|
9212
|
+
}
|
|
9213
|
+
const cuesResponse = info.loadedCues ?? await webmState.cues.getLoadedCues();
|
|
9214
|
+
const byteFromObservedKeyframe = findKeyframeBeforeTime2({
|
|
9215
|
+
keyframes: info.keyframes,
|
|
9216
|
+
time
|
|
9217
|
+
});
|
|
9218
|
+
const byteFromCues = getByteFromCues({
|
|
9219
|
+
cuesResponse,
|
|
9220
|
+
time,
|
|
9221
|
+
info,
|
|
9222
|
+
logLevel
|
|
9223
|
+
});
|
|
9224
|
+
const byteFromFirstMediaSection = webmState.getFirstCluster()?.start ?? null;
|
|
9225
|
+
const seekPossibilities = [
|
|
9226
|
+
byteFromCues?.byte ?? null,
|
|
9227
|
+
byteFromObservedKeyframe?.positionInBytes ?? null,
|
|
9228
|
+
byteFromFirstMediaSection
|
|
9229
|
+
].filter((n) => n !== null);
|
|
9230
|
+
const byteToSeekTo = seekPossibilities.length === 0 ? null : Math.max(...seekPossibilities);
|
|
9231
|
+
if (byteToSeekTo === null) {
|
|
9232
|
+
return {
|
|
9233
|
+
type: "invalid"
|
|
9234
|
+
};
|
|
9235
|
+
}
|
|
9236
|
+
mediaSection.addMediaSection({
|
|
9237
|
+
start: byteToSeekTo,
|
|
9238
|
+
size: 1
|
|
9239
|
+
});
|
|
9240
|
+
const timeInSeconds = (() => {
|
|
9241
|
+
if (byteToSeekTo === byteFromObservedKeyframe?.positionInBytes) {
|
|
9242
|
+
return Math.min(byteFromObservedKeyframe.decodingTimeInSeconds, byteFromObservedKeyframe.presentationTimeInSeconds);
|
|
9243
|
+
}
|
|
9244
|
+
if (byteToSeekTo === byteFromCues?.byte) {
|
|
9245
|
+
return byteFromCues.timeInSeconds;
|
|
9246
|
+
}
|
|
9247
|
+
if (byteToSeekTo === byteFromFirstMediaSection) {
|
|
9248
|
+
return 0;
|
|
9249
|
+
}
|
|
9250
|
+
throw new Error("Should not happen");
|
|
9251
|
+
})();
|
|
9252
|
+
return {
|
|
9253
|
+
type: "do-seek",
|
|
9254
|
+
byte: byteToSeekTo,
|
|
9255
|
+
timeInSeconds
|
|
9256
|
+
};
|
|
9257
|
+
};
|
|
9258
|
+
|
|
9140
9259
|
// src/state/transport-stream/observed-pes-header.ts
|
|
9141
9260
|
var makeObservedPesHeader = () => {
|
|
9142
9261
|
const pesHeaders = [];
|
|
@@ -9226,7 +9345,8 @@ var getSeekingByte = ({
|
|
|
9226
9345
|
if (byte) {
|
|
9227
9346
|
return Promise.resolve({
|
|
9228
9347
|
type: "do-seek",
|
|
9229
|
-
byte
|
|
9348
|
+
byte: byte.offset,
|
|
9349
|
+
timeInSeconds: byte.timeInSeconds
|
|
9230
9350
|
});
|
|
9231
9351
|
}
|
|
9232
9352
|
return Promise.resolve({
|
|
@@ -9239,11 +9359,20 @@ var getSeekingByte = ({
|
|
|
9239
9359
|
timeInSeconds: time,
|
|
9240
9360
|
ptsStartOffset: info.ptsStartOffset
|
|
9241
9361
|
});
|
|
9242
|
-
|
|
9362
|
+
if (!lastKeyframeBeforeTimeInSeconds) {
|
|
9363
|
+
transportStream.resetBeforeSeek();
|
|
9364
|
+
return Promise.resolve({
|
|
9365
|
+
type: "do-seek",
|
|
9366
|
+
byte: 0,
|
|
9367
|
+
timeInSeconds: 0
|
|
9368
|
+
});
|
|
9369
|
+
}
|
|
9370
|
+
const byte = lastKeyframeBeforeTimeInSeconds.offset;
|
|
9243
9371
|
transportStream.resetBeforeSeek();
|
|
9244
9372
|
return Promise.resolve({
|
|
9245
9373
|
type: "do-seek",
|
|
9246
|
-
byte
|
|
9374
|
+
byte,
|
|
9375
|
+
timeInSeconds: Math.min(lastKeyframeBeforeTimeInSeconds.pts, lastKeyframeBeforeTimeInSeconds.dts ?? Infinity) / MPEG_TIMESCALE
|
|
9247
9376
|
});
|
|
9248
9377
|
}
|
|
9249
9378
|
if (info.type === "riff-seeking-hints") {
|
|
@@ -11116,7 +11245,8 @@ var calculateFlatSamples = ({
|
|
|
11116
11245
|
const { samplePositions } = getSamplePositionsFromTrack({
|
|
11117
11246
|
trakBox,
|
|
11118
11247
|
moofBoxes: relevantMoofBox ? [relevantMoofBox] : [],
|
|
11119
|
-
moofComplete
|
|
11248
|
+
moofComplete,
|
|
11249
|
+
trexBoxes: getTrexBoxes(moov)
|
|
11120
11250
|
});
|
|
11121
11251
|
return samplePositions.map((samplePosition) => {
|
|
11122
11252
|
return {
|
|
@@ -15039,10 +15169,11 @@ var parseList = ({
|
|
|
15039
15169
|
const metadata = [];
|
|
15040
15170
|
const remainingBytes = () => ckSize - (iterator.counter.getOffset() - startOffset);
|
|
15041
15171
|
while (remainingBytes() > 0) {
|
|
15042
|
-
|
|
15043
|
-
|
|
15044
|
-
|
|
15172
|
+
const byte = iterator.getUint8();
|
|
15173
|
+
if (byte === 0) {
|
|
15174
|
+
continue;
|
|
15045
15175
|
}
|
|
15176
|
+
iterator.counter.decrement(1);
|
|
15046
15177
|
const key = iterator.getByteString(4, false);
|
|
15047
15178
|
const size = iterator.getUint32Le();
|
|
15048
15179
|
const value = iterator.getByteString(size, true);
|
|
@@ -16071,7 +16202,7 @@ var lazyMfraLoad = ({
|
|
|
16071
16202
|
logLevel,
|
|
16072
16203
|
prefetchCache
|
|
16073
16204
|
}).then((boxes) => {
|
|
16074
|
-
Log.verbose(logLevel, "Lazily found mfra atom.");
|
|
16205
|
+
Log.verbose(logLevel, boxes ? "Lazily found mfra atom." : "No mfra atom found.");
|
|
16075
16206
|
result = boxes;
|
|
16076
16207
|
return boxes;
|
|
16077
16208
|
});
|
|
@@ -17598,6 +17729,46 @@ var internalParseMedia = async function({
|
|
|
17598
17729
|
contentLength: state.contentLength,
|
|
17599
17730
|
aacState: state.aac
|
|
17600
17731
|
})));
|
|
17732
|
+
controller._internals.attachSimulateSeekResolution((seek2) => {
|
|
17733
|
+
const {
|
|
17734
|
+
aacState: aacState2,
|
|
17735
|
+
avcState: avcState2,
|
|
17736
|
+
flacState: flacState2,
|
|
17737
|
+
isoState,
|
|
17738
|
+
iterator,
|
|
17739
|
+
keyframes,
|
|
17740
|
+
m3uState: m3uState2,
|
|
17741
|
+
mediaSection,
|
|
17742
|
+
mp3State,
|
|
17743
|
+
riffState,
|
|
17744
|
+
samplesObserved,
|
|
17745
|
+
structureState: structureState2,
|
|
17746
|
+
tracksState,
|
|
17747
|
+
transportStream,
|
|
17748
|
+
webmState: webmState2
|
|
17749
|
+
} = getWorkOnSeekRequestOptions(state);
|
|
17750
|
+
return turnSeekIntoByte({
|
|
17751
|
+
aacState: aacState2,
|
|
17752
|
+
seek: seek2,
|
|
17753
|
+
avcState: avcState2,
|
|
17754
|
+
contentLength,
|
|
17755
|
+
flacState: flacState2,
|
|
17756
|
+
isoState,
|
|
17757
|
+
iterator,
|
|
17758
|
+
keyframes,
|
|
17759
|
+
logLevel,
|
|
17760
|
+
m3uPlaylistContext,
|
|
17761
|
+
m3uState: m3uState2,
|
|
17762
|
+
mediaSectionState: mediaSection,
|
|
17763
|
+
mp3State,
|
|
17764
|
+
riffState,
|
|
17765
|
+
samplesObserved,
|
|
17766
|
+
structureState: structureState2,
|
|
17767
|
+
tracksState,
|
|
17768
|
+
transportStream,
|
|
17769
|
+
webmState: webmState2
|
|
17770
|
+
});
|
|
17771
|
+
});
|
|
17601
17772
|
if (!hasAudioTrackHandlers && !hasVideoTrackHandlers && Object.values(state.fields).every((v) => !v) && mode === "query") {
|
|
17602
17773
|
Log.warn(logLevel, new Error("Warning - No `fields` and no `on*` callbacks were passed to `parseMedia()`. Specify the data you would like to retrieve."));
|
|
17603
17774
|
}
|
|
@@ -17699,7 +17870,7 @@ var downloadAndParseMedia = async (options) => {
|
|
|
17699
17870
|
return returnValue;
|
|
17700
17871
|
};
|
|
17701
17872
|
// src/version.ts
|
|
17702
|
-
var VERSION = "4.0.
|
|
17873
|
+
var VERSION = "4.0.313";
|
|
17703
17874
|
|
|
17704
17875
|
// src/index.ts
|
|
17705
17876
|
var MediaParserInternals = {
|