@remotion/media-parser 4.0.290 → 4.0.292
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/containers/iso-base-media/base-media-box.d.ts +0 -1
- package/dist/containers/iso-base-media/collect-sample-positions-from-moof-boxes.d.ts +4 -1
- package/dist/containers/iso-base-media/collect-sample-positions-from-moof-boxes.js +9 -5
- package/dist/containers/iso-base-media/find-keyframe-before-time.js +16 -11
- package/dist/containers/iso-base-media/find-track-to-seek.d.ts +14 -0
- package/dist/containers/iso-base-media/find-track-to-seek.js +39 -0
- package/dist/containers/iso-base-media/get-children.js +2 -2
- package/dist/containers/iso-base-media/get-keyframes.js +6 -1
- package/dist/containers/iso-base-media/get-mfra-seeking-box.d.ts +3 -1
- package/dist/containers/iso-base-media/get-mfra-seeking-box.js +5 -1
- package/dist/containers/iso-base-media/get-moov-atom.js +6 -3
- package/dist/containers/iso-base-media/get-sample-position-bounds.js +3 -1
- package/dist/containers/iso-base-media/get-sample-positions-from-track.js +1 -1
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.d.ts +14 -0
- package/dist/containers/iso-base-media/get-seeking-byte-from-fragmented-mp4.js +89 -0
- package/dist/containers/iso-base-media/get-seeking-byte.d.ts +3 -3
- package/dist/containers/iso-base-media/get-seeking-byte.js +32 -96
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.d.ts +6 -0
- package/dist/containers/iso-base-media/mdat/calculate-jump-marks.js +131 -0
- package/dist/containers/iso-base-media/mdat/mdat.d.ts +2 -2
- package/dist/containers/iso-base-media/mdat/mdat.js +18 -2
- package/dist/containers/iso-base-media/mfra/find-best-segment-from-tfra.d.ts +3 -3
- package/dist/containers/iso-base-media/mfra/find-best-segment-from-tfra.js +2 -2
- package/dist/containers/iso-base-media/mfra/get-mfra-atom.d.ts +5 -1
- package/dist/containers/iso-base-media/mfra/get-mfra-atom.js +3 -1
- package/dist/containers/iso-base-media/mfra/get-mfro-atom.d.ts +5 -1
- package/dist/containers/iso-base-media/mfra/get-mfro-atom.js +3 -1
- package/dist/containers/iso-base-media/parse-boxes.js +5 -2
- package/dist/containers/iso-base-media/process-box.d.ts +16 -5
- package/dist/containers/iso-base-media/process-box.js +206 -118
- package/dist/containers/iso-base-media/sample-positions.d.ts +25 -0
- package/dist/containers/iso-base-media/sample-positions.js +37 -0
- package/dist/containers/iso-base-media/stsd/samples.js +1 -0
- package/dist/containers/iso-base-media/stsd/stsc.d.ts +1 -6
- package/dist/containers/iso-base-media/stsd/stsc.js +2 -5
- package/dist/containers/iso-base-media/stsd/stss.d.ts +1 -1
- package/dist/containers/iso-base-media/stsd/stss.js +2 -2
- package/dist/containers/iso-base-media/turn-sample-positions-into-array.d.ts +19 -0
- package/dist/containers/iso-base-media/turn-sample-positions-into-array.js +73 -0
- package/dist/containers/m3u/after-manifest-fetch.d.ts +5 -1
- package/dist/containers/m3u/after-manifest-fetch.js +3 -1
- package/dist/containers/m3u/first-sample-in-m3u-chunk.d.ts +13 -0
- package/dist/containers/m3u/first-sample-in-m3u-chunk.js +31 -0
- package/dist/containers/m3u/get-seeking-byte.d.ts +13 -0
- package/dist/containers/m3u/get-seeking-byte.js +32 -0
- package/dist/containers/m3u/get-streams.d.ts +1 -0
- package/dist/containers/m3u/get-streams.js +1 -0
- package/dist/containers/m3u/iterate-over-segment-files.d.ts +5 -3
- package/dist/containers/m3u/iterate-over-segment-files.js +11 -1
- package/dist/containers/m3u/parse-m3u-media-directive.js +1 -0
- package/dist/containers/m3u/parse-m3u.js +8 -0
- package/dist/containers/m3u/process-m3u-chunk.d.ts +12 -0
- package/dist/containers/m3u/process-m3u-chunk.js +274 -0
- package/dist/containers/m3u/run-over-m3u.js +7 -80
- package/dist/containers/m3u/sample-sorter.d.ts +1 -0
- package/dist/containers/m3u/sample-sorter.js +4 -1
- package/dist/containers/m3u/seek/get-chunk-to-seek-to.d.ts +5 -0
- package/dist/containers/m3u/seek/get-chunk-to-seek-to.js +14 -0
- package/dist/containers/m3u/seeking-hints.d.ts +2 -0
- package/dist/containers/m3u/seeking-hints.js +9 -0
- package/dist/containers/m3u/select-stream.d.ts +2 -1
- package/dist/containers/m3u/select-stream.js +7 -2
- package/dist/containers/m3u/types.d.ts +1 -0
- package/dist/containers/riff/seek/fetch-idx1.d.ts +3 -1
- package/dist/containers/riff/seek/fetch-idx1.js +3 -1
- package/dist/containers/transport-stream/handle-aac-packet.d.ts +2 -2
- package/dist/containers/transport-stream/handle-avc-packet.d.ts +2 -2
- package/dist/containers/transport-stream/process-audio.d.ts +2 -2
- package/dist/containers/transport-stream/process-stream-buffers.d.ts +3 -3
- package/dist/containers/transport-stream/process-video.d.ts +2 -2
- package/dist/containers/webm/get-sample-from-block.d.ts +12 -2
- package/dist/containers/webm/get-sample-from-block.js +40 -9
- package/dist/containers/webm/parse-ebml.js +28 -10
- package/dist/containers/webm/seek/fetch-web-cues.d.ts +3 -1
- package/dist/containers/webm/seek/fetch-web-cues.js +3 -1
- package/dist/containers/webm/state-for-processing.d.ts +2 -2
- package/dist/controller/media-parser-controller.d.ts +1 -1
- package/dist/controller/media-parser-controller.js +6 -2
- package/dist/controller/seek-signal.d.ts +1 -5
- package/dist/download-and-parse-media.js +1 -1
- package/dist/esm/index.mjs +1400 -611
- package/dist/esm/node.mjs +23 -3
- package/dist/esm/server-worker.mjs +8 -1
- package/dist/esm/universal.mjs +168 -15
- package/dist/esm/web.mjs +145 -13
- package/dist/esm/worker-server-entry.mjs +1467 -635
- package/dist/esm/worker-web-entry.mjs +1439 -634
- package/dist/esm/worker.mjs +8 -1
- package/dist/get-audio-codec.js +3 -0
- package/dist/get-duration.js +2 -1
- package/dist/get-fps.js +2 -1
- package/dist/get-sample-positions-from-mp4.js +10 -5
- package/dist/get-sample-positions.js +4 -4
- package/dist/get-seeking-byte.d.ts +5 -3
- package/dist/get-seeking-byte.js +19 -10
- package/dist/get-seeking-hints.d.ts +3 -3
- package/dist/get-seeking-hints.js +18 -13
- package/dist/get-tracks.d.ts +9 -1
- package/dist/get-tracks.js +13 -6
- package/dist/index.d.ts +21 -5
- package/dist/init-video.js +3 -2
- package/dist/internal-parse-media.js +13 -4
- package/dist/iterator/buffer-iterator.js +5 -3
- package/dist/metadata/metadata-from-iso.js +2 -1
- package/dist/options.d.ts +6 -1
- package/dist/parse-loop.js +22 -6
- package/dist/parse-media-on-worker-entry.js +1 -0
- package/dist/parse-media.js +1 -1
- package/dist/parse-result.d.ts +2 -2
- package/dist/perform-seek.d.ts +3 -1
- package/dist/perform-seek.js +3 -1
- package/dist/readers/fetch/get-body-and-reader.js +17 -2
- package/dist/readers/from-fetch.d.ts +17 -1
- package/dist/readers/from-fetch.js +68 -13
- package/dist/readers/from-node.js +24 -2
- package/dist/readers/from-web-file.js +3 -0
- package/dist/readers/reader.d.ts +19 -2
- package/dist/readers/universal.js +9 -0
- package/dist/readers/web.js +6 -0
- package/dist/register-track.d.ts +3 -3
- package/dist/seek-backwards.d.ts +3 -1
- package/dist/seek-backwards.js +4 -1
- package/dist/seek-forwards.d.ts +3 -1
- package/dist/seek-forwards.js +3 -1
- package/dist/seeking-hints.d.ts +4 -1
- package/dist/set-seeking-hints.js +4 -0
- package/dist/skip.d.ts +5 -0
- package/dist/skip.js +6 -1
- package/dist/state/can-skip-tracks.d.ts +1 -0
- package/dist/state/can-skip-tracks.js +10 -6
- package/dist/state/iso-base-media/cached-sample-positions.d.ts +15 -1
- package/dist/state/iso-base-media/cached-sample-positions.js +9 -4
- package/dist/state/iso-base-media/iso-state.d.ts +5 -1
- package/dist/state/iso-base-media/iso-state.js +2 -1
- package/dist/state/iso-base-media/lazy-mfra-load.d.ts +3 -1
- package/dist/state/iso-base-media/lazy-mfra-load.js +2 -1
- package/dist/state/keyframes.js +1 -0
- package/dist/state/m3u-state.d.ts +15 -4
- package/dist/state/m3u-state.js +20 -0
- package/dist/state/matroska/lazy-cues-fetch.d.ts +3 -1
- package/dist/state/matroska/lazy-cues-fetch.js +2 -1
- package/dist/state/matroska/webm.d.ts +3 -1
- package/dist/state/matroska/webm.js +2 -1
- package/dist/state/parser-state.d.ts +29 -13
- package/dist/state/parser-state.js +19 -5
- package/dist/state/riff/lazy-idx1-fetch.d.ts +3 -1
- package/dist/state/riff/lazy-idx1-fetch.js +2 -1
- package/dist/state/riff.d.ts +3 -1
- package/dist/state/riff.js +2 -1
- package/dist/state/sample-callbacks.d.ts +3 -2
- package/dist/state/sample-callbacks.js +3 -3
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/work-on-seek-request.d.ts +6 -3
- package/dist/work-on-seek-request.js +13 -13
- package/dist/worker/forward-controller-to-worker.js +1 -1
- package/dist/worker/serialize-error.js +26 -3
- package/dist/worker/worker-types.d.ts +7 -1
- package/dist/worker-server.js +2 -2
- package/package.json +3 -3
package/dist/parse-loop.js
CHANGED
|
@@ -45,19 +45,34 @@ const parseLoop = async ({ state, throttledState, onError, }) => {
|
|
|
45
45
|
try {
|
|
46
46
|
await (0, emit_all_info_1.triggerInfoEmit)(state);
|
|
47
47
|
await state.controller._internals.checkForAbortAndPause();
|
|
48
|
-
const
|
|
48
|
+
const result = await (0, run_parse_iteration_1.runParseIteration)({
|
|
49
49
|
state,
|
|
50
50
|
});
|
|
51
|
-
if (
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
51
|
+
if (result !== null && result.action === 'fetch-more-data') {
|
|
52
|
+
log_1.Log.verbose(state.logLevel, `Need to fetch ${result.bytesNeeded} more bytes before we can continue`);
|
|
53
|
+
const startBytesRemaining = state.iterator.bytesRemaining();
|
|
54
|
+
while (true) {
|
|
55
|
+
const done = await fetchMoreData(state);
|
|
56
|
+
if (done) {
|
|
57
|
+
break;
|
|
58
|
+
}
|
|
59
|
+
if (state.iterator.bytesRemaining() - startBytesRemaining >=
|
|
60
|
+
result.bytesNeeded) {
|
|
61
|
+
break;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
if (result !== null && result.action === 'skip') {
|
|
67
|
+
state.increaseSkippedBytes(result.skipTo - state.iterator.counter.getOffset());
|
|
68
|
+
if (result.skipTo === state.contentLength) {
|
|
69
|
+
state.iterator.discard(result.skipTo - state.iterator.counter.getOffset());
|
|
55
70
|
log_1.Log.verbose(state.logLevel, 'Skipped to end of file, not fetching.');
|
|
56
71
|
break;
|
|
57
72
|
}
|
|
58
73
|
const seekStart = Date.now();
|
|
59
74
|
await (0, perform_seek_1.performSeek)({
|
|
60
|
-
seekTo:
|
|
75
|
+
seekTo: result.skipTo,
|
|
61
76
|
userInitiated: false,
|
|
62
77
|
controller: state.controller,
|
|
63
78
|
mediaSection: state.mediaSection,
|
|
@@ -71,6 +86,7 @@ const parseLoop = async ({ state, throttledState, onError, }) => {
|
|
|
71
86
|
fields: state.fields,
|
|
72
87
|
src: state.src,
|
|
73
88
|
discardReadBytes: state.discardReadBytes,
|
|
89
|
+
prefetchCache: state.prefetchCache,
|
|
74
90
|
});
|
|
75
91
|
state.timings.timeSeeking += Date.now() - seekStart;
|
|
76
92
|
}
|
|
@@ -101,6 +101,7 @@ const parseMediaOnWorkerImplementation = async ({ controller, reader, ...params
|
|
|
101
101
|
cleanup();
|
|
102
102
|
// Reject main loop
|
|
103
103
|
const error = (0, serialize_error_1.deserializeError)(data);
|
|
104
|
+
error.stack = data.errorStack;
|
|
104
105
|
reject(error);
|
|
105
106
|
// If aborted, we send the seeking hints we got,
|
|
106
107
|
// otherwise we reject all .getSeekingHints() promises
|
package/dist/parse-media.js
CHANGED
|
@@ -45,7 +45,7 @@ const parseMedia = (options) => {
|
|
|
45
45
|
controller: (_11 = options.controller) !== null && _11 !== void 0 ? _11 : undefined,
|
|
46
46
|
selectM3uStream: (_12 = options.selectM3uStream) !== null && _12 !== void 0 ? _12 : select_stream_1.defaultSelectM3uStreamFn,
|
|
47
47
|
selectM3uAssociatedPlaylists: (_13 = options.selectM3uAssociatedPlaylists) !== null && _13 !== void 0 ? _13 : select_stream_1.defaultSelectM3uAssociatedPlaylists,
|
|
48
|
-
|
|
48
|
+
m3uPlaylistContext: (_14 = options.m3uPlaylistContext) !== null && _14 !== void 0 ? _14 : null,
|
|
49
49
|
src: options.src,
|
|
50
50
|
mode: 'query',
|
|
51
51
|
onDiscardedData: null,
|
package/dist/parse-result.d.ts
CHANGED
|
@@ -7,7 +7,7 @@ import type { TransportStreamBox } from './containers/transport-stream/boxes';
|
|
|
7
7
|
import type { WavStructure } from './containers/wav/types';
|
|
8
8
|
import type { MatroskaSegment } from './containers/webm/segments';
|
|
9
9
|
import type { MediaParserMetadataEntry } from './metadata/get-metadata';
|
|
10
|
-
import type { Skip } from './skip';
|
|
10
|
+
import type { FetchMoreData, Skip } from './skip';
|
|
11
11
|
type Mp3Id3Header = {
|
|
12
12
|
type: 'id3-header';
|
|
13
13
|
versionMajor: number;
|
|
@@ -35,5 +35,5 @@ export type Mp3Structure = {
|
|
|
35
35
|
boxes: Mp3Box[];
|
|
36
36
|
};
|
|
37
37
|
export type MediaParserStructureUnstable = IsoBaseMediaStructure | RiffStructure | MatroskaStructure | TransportStreamStructure | Mp3Structure | AacStructure | WavStructure | M3uStructure | FlacStructure;
|
|
38
|
-
export type ParseResult = Skip | null;
|
|
38
|
+
export type ParseResult = Skip | FetchMoreData | null;
|
|
39
39
|
export {};
|
package/dist/perform-seek.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { MediaParserController } from './controller/media-parser-controller';
|
|
2
|
+
import type { PrefetchCache } from './fetch';
|
|
2
3
|
import type { AllOptions, ParseMediaFields } from './fields';
|
|
3
4
|
import type { BufferIterator } from './iterator/buffer-iterator';
|
|
4
5
|
import type { LogLevel } from './log';
|
|
@@ -7,7 +8,7 @@ import type { ReaderInterface } from './readers/reader';
|
|
|
7
8
|
import type { CurrentReader } from './state/current-reader';
|
|
8
9
|
import type { SeekInfiniteLoop } from './state/seek-infinite-loop';
|
|
9
10
|
import type { MediaSectionState } from './state/video-section';
|
|
10
|
-
export declare const performSeek: ({ seekTo, userInitiated, controller, mediaSection, iterator, seekInfiniteLoop, logLevel, mode, contentLength, currentReader, readerInterface, src, discardReadBytes, fields, }: {
|
|
11
|
+
export declare const performSeek: ({ seekTo, userInitiated, controller, mediaSection, iterator, seekInfiniteLoop, logLevel, mode, contentLength, currentReader, readerInterface, src, discardReadBytes, fields, prefetchCache, }: {
|
|
11
12
|
seekTo: number;
|
|
12
13
|
userInitiated: boolean;
|
|
13
14
|
controller: MediaParserController;
|
|
@@ -22,4 +23,5 @@ export declare const performSeek: ({ seekTo, userInitiated, controller, mediaSec
|
|
|
22
23
|
fields: Partial<AllOptions<ParseMediaFields>>;
|
|
23
24
|
src: ParseMediaSrc;
|
|
24
25
|
discardReadBytes: (force: boolean) => Promise<void>;
|
|
26
|
+
prefetchCache: PrefetchCache;
|
|
25
27
|
}) => Promise<void>;
|
package/dist/perform-seek.js
CHANGED
|
@@ -5,7 +5,7 @@ const log_1 = require("./log");
|
|
|
5
5
|
const seek_backwards_1 = require("./seek-backwards");
|
|
6
6
|
const seek_forwards_1 = require("./seek-forwards");
|
|
7
7
|
const video_section_1 = require("./state/video-section");
|
|
8
|
-
const performSeek = async ({ seekTo, userInitiated, controller, mediaSection, iterator, seekInfiniteLoop, logLevel, mode, contentLength, currentReader, readerInterface, src, discardReadBytes, fields, }) => {
|
|
8
|
+
const performSeek = async ({ seekTo, userInitiated, controller, mediaSection, iterator, seekInfiniteLoop, logLevel, mode, contentLength, currentReader, readerInterface, src, discardReadBytes, fields, prefetchCache, }) => {
|
|
9
9
|
const byteInMediaSection = (0, video_section_1.isByteInMediaSection)({
|
|
10
10
|
position: seekTo,
|
|
11
11
|
mediaSections: mediaSection.getMediaSections(),
|
|
@@ -54,6 +54,7 @@ const performSeek = async ({ seekTo, userInitiated, controller, mediaSection, it
|
|
|
54
54
|
src,
|
|
55
55
|
controller,
|
|
56
56
|
discardReadBytes,
|
|
57
|
+
prefetchCache,
|
|
57
58
|
});
|
|
58
59
|
}
|
|
59
60
|
else {
|
|
@@ -65,6 +66,7 @@ const performSeek = async ({ seekTo, userInitiated, controller, mediaSection, it
|
|
|
65
66
|
currentReader,
|
|
66
67
|
readerInterface,
|
|
67
68
|
src,
|
|
69
|
+
prefetchCache,
|
|
68
70
|
});
|
|
69
71
|
}
|
|
70
72
|
await controller._internals.checkForAbortAndPause();
|
|
@@ -8,10 +8,25 @@ const getLengthAndReader = async ({ canLiveWithoutContentLength, res, ownControl
|
|
|
8
8
|
(canLiveWithoutContentLength && contentLength === null)) {
|
|
9
9
|
const buffer = await res.arrayBuffer();
|
|
10
10
|
const encoded = new Uint8Array(buffer);
|
|
11
|
+
let streamCancelled = false;
|
|
11
12
|
const stream = new ReadableStream({
|
|
12
13
|
start(controller) {
|
|
13
|
-
|
|
14
|
-
|
|
14
|
+
if (ownController.signal.aborted) {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
if (streamCancelled) {
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
controller.enqueue(encoded);
|
|
22
|
+
controller.close();
|
|
23
|
+
}
|
|
24
|
+
catch (_a) {
|
|
25
|
+
// sometimes on windows after aborting on node 16 : Invalid state: ReadableStreamDefaultController is not in a state where chunk can be enqueued
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
cancel() {
|
|
29
|
+
streamCancelled = true;
|
|
15
30
|
},
|
|
16
31
|
});
|
|
17
32
|
return {
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { MediaParserController } from '../controller/media-parser-controller';
|
|
2
|
+
import type { ParseMediaRange } from '../options';
|
|
3
|
+
import type { CreateAdjacentFileSource, PreloadContent, ReadContent, ReaderInterface, ReadWholeAsText } from './reader';
|
|
2
4
|
interface ParsedContentRange {
|
|
3
5
|
unit: string;
|
|
4
6
|
start?: number | null;
|
|
@@ -10,7 +12,21 @@ interface ParsedContentRange {
|
|
|
10
12
|
* From: https://github.com/gregberge/content-range/blob/main/src/index.ts
|
|
11
13
|
*/
|
|
12
14
|
export declare function parseContentRange(input: string): ParsedContentRange | null;
|
|
15
|
+
export declare const makeFetchRequest: ({ range, src, controller, }: {
|
|
16
|
+
range: ParseMediaRange;
|
|
17
|
+
src: string | URL;
|
|
18
|
+
controller: MediaParserController | null;
|
|
19
|
+
}) => Promise<{
|
|
20
|
+
contentLength: number | null;
|
|
21
|
+
needsContentRange: boolean;
|
|
22
|
+
reader: import("./reader").Reader;
|
|
23
|
+
name: string | undefined;
|
|
24
|
+
contentType: string | null;
|
|
25
|
+
supportsContentRange: boolean;
|
|
26
|
+
}>;
|
|
27
|
+
export type PrefetchCache = Map<string, ReturnType<typeof makeFetchRequest>>;
|
|
13
28
|
export declare const fetchReadContent: ReadContent;
|
|
29
|
+
export declare const fetchPreload: PreloadContent;
|
|
14
30
|
export declare const fetchReadWholeAsText: ReadWholeAsText;
|
|
15
31
|
export declare const fetchCreateAdjacentFileSource: CreateAdjacentFileSource;
|
|
16
32
|
export declare const fetchReader: ReaderInterface;
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.fetchReader = exports.fetchCreateAdjacentFileSource = exports.fetchReadWholeAsText = exports.fetchReadContent = void 0;
|
|
3
|
+
exports.fetchReader = exports.fetchCreateAdjacentFileSource = exports.fetchReadWholeAsText = exports.fetchPreload = exports.fetchReadContent = exports.makeFetchRequest = void 0;
|
|
4
4
|
exports.parseContentRange = parseContentRange;
|
|
5
|
-
/* eslint-disable eqeqeq */
|
|
6
|
-
/* eslint-disable no-eq-null */
|
|
7
5
|
const errors_1 = require("../errors");
|
|
6
|
+
const log_1 = require("../log");
|
|
8
7
|
const get_body_and_reader_1 = require("./fetch/get-body-and-reader");
|
|
9
8
|
const resolve_url_1 = require("./fetch/resolve-url");
|
|
10
9
|
/**
|
|
@@ -46,11 +45,8 @@ const validateContentRangeAndDetectIfSupported = ({ requestedRange, parsedConten
|
|
|
46
45
|
}
|
|
47
46
|
return { supportsContentRange: true };
|
|
48
47
|
};
|
|
49
|
-
const
|
|
48
|
+
const makeFetchRequest = async ({ range, src, controller, }) => {
|
|
50
49
|
var _a;
|
|
51
|
-
if (typeof src !== 'string' && src instanceof URL === false) {
|
|
52
|
-
throw new Error('src must be a string when using `fetchReader`');
|
|
53
|
-
}
|
|
54
50
|
const resolvedUrl = (0, resolve_url_1.resolveUrl)(src);
|
|
55
51
|
const resolvedUrlString = resolvedUrl.toString();
|
|
56
52
|
if (!resolvedUrlString.startsWith('https://') &&
|
|
@@ -91,22 +87,63 @@ const fetchReadContent = async ({ src, range, controller, }) => {
|
|
|
91
87
|
parsedContentRange,
|
|
92
88
|
statusCode: res.status,
|
|
93
89
|
});
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
90
|
+
if (controller) {
|
|
91
|
+
controller._internals.signal.addEventListener('abort', () => {
|
|
92
|
+
ownController.abort(new errors_1.MediaParserAbortError('Aborted by user'));
|
|
93
|
+
}, { once: true });
|
|
94
|
+
}
|
|
97
95
|
if (res.status.toString().startsWith('4') ||
|
|
98
96
|
res.status.toString().startsWith('5')) {
|
|
99
|
-
throw new Error(`Server returned status code ${res.status} for ${
|
|
97
|
+
throw new Error(`Server returned status code ${res.status} for ${resolvedUrl} and range ${requestedRange}`);
|
|
100
98
|
}
|
|
101
99
|
const contentDisposition = res.headers.get('content-disposition');
|
|
102
100
|
const name = (_a = contentDisposition === null || contentDisposition === void 0 ? void 0 : contentDisposition.match(/filename="([^"]+)"/)) === null || _a === void 0 ? void 0 : _a[1];
|
|
103
|
-
const fallbackName = src.toString().split('/').pop();
|
|
104
101
|
const { contentLength, needsContentRange, reader } = await (0, get_body_and_reader_1.getLengthAndReader)({
|
|
105
102
|
canLiveWithoutContentLength,
|
|
106
103
|
res,
|
|
107
104
|
ownController,
|
|
108
105
|
requestedWithoutRange: requestWithoutRange,
|
|
109
106
|
});
|
|
107
|
+
const contentType = res.headers.get('content-type');
|
|
108
|
+
return {
|
|
109
|
+
contentLength,
|
|
110
|
+
needsContentRange,
|
|
111
|
+
reader,
|
|
112
|
+
name,
|
|
113
|
+
contentType,
|
|
114
|
+
supportsContentRange,
|
|
115
|
+
};
|
|
116
|
+
};
|
|
117
|
+
exports.makeFetchRequest = makeFetchRequest;
|
|
118
|
+
const cacheKey = ({ src, range, }) => {
|
|
119
|
+
return `${src}-${JSON.stringify(range)}`;
|
|
120
|
+
};
|
|
121
|
+
const makeFetchRequestOrGetCached = ({ range, src, controller, logLevel, prefetchCache, }) => {
|
|
122
|
+
const key = cacheKey({ src, range });
|
|
123
|
+
const cached = prefetchCache.get(key);
|
|
124
|
+
if (cached) {
|
|
125
|
+
log_1.Log.verbose(logLevel, `Reading from preload cache for ${key}`);
|
|
126
|
+
return cached;
|
|
127
|
+
}
|
|
128
|
+
log_1.Log.verbose(logLevel, `Fetching ${key}`);
|
|
129
|
+
const result = (0, exports.makeFetchRequest)({ range, src, controller });
|
|
130
|
+
prefetchCache.set(key, result);
|
|
131
|
+
return result;
|
|
132
|
+
};
|
|
133
|
+
const fetchReadContent = async ({ src, range, controller, logLevel, prefetchCache, }) => {
|
|
134
|
+
if (typeof src !== 'string' && src instanceof URL === false) {
|
|
135
|
+
throw new Error('src must be a string when using `fetchReader`');
|
|
136
|
+
}
|
|
137
|
+
const fallbackName = src.toString().split('/').pop();
|
|
138
|
+
const { reader, contentLength, needsContentRange, name, supportsContentRange, contentType, } = await makeFetchRequestOrGetCached({
|
|
139
|
+
range,
|
|
140
|
+
src,
|
|
141
|
+
controller,
|
|
142
|
+
logLevel,
|
|
143
|
+
prefetchCache,
|
|
144
|
+
});
|
|
145
|
+
const key = cacheKey({ src, range });
|
|
146
|
+
prefetchCache.delete(key);
|
|
110
147
|
if (controller) {
|
|
111
148
|
controller._internals.signal.addEventListener('abort', () => {
|
|
112
149
|
reader.reader.cancel().catch(() => {
|
|
@@ -117,13 +154,30 @@ const fetchReadContent = async ({ src, range, controller, }) => {
|
|
|
117
154
|
return {
|
|
118
155
|
reader,
|
|
119
156
|
contentLength,
|
|
120
|
-
contentType
|
|
157
|
+
contentType,
|
|
121
158
|
name: name !== null && name !== void 0 ? name : fallbackName,
|
|
122
159
|
supportsContentRange,
|
|
123
160
|
needsContentRange,
|
|
124
161
|
};
|
|
125
162
|
};
|
|
126
163
|
exports.fetchReadContent = fetchReadContent;
|
|
164
|
+
const fetchPreload = ({ src, range, logLevel, prefetchCache, }) => {
|
|
165
|
+
if (typeof src !== 'string' && src instanceof URL === false) {
|
|
166
|
+
throw new Error('src must be a string when using `fetchReader`');
|
|
167
|
+
}
|
|
168
|
+
const key = cacheKey({ src, range });
|
|
169
|
+
if (prefetchCache.has(key)) {
|
|
170
|
+
return prefetchCache.get(key);
|
|
171
|
+
}
|
|
172
|
+
makeFetchRequestOrGetCached({
|
|
173
|
+
range,
|
|
174
|
+
src,
|
|
175
|
+
controller: null,
|
|
176
|
+
logLevel,
|
|
177
|
+
prefetchCache,
|
|
178
|
+
});
|
|
179
|
+
};
|
|
180
|
+
exports.fetchPreload = fetchPreload;
|
|
127
181
|
const fetchReadWholeAsText = async (src) => {
|
|
128
182
|
if (typeof src !== 'string' && src instanceof URL === false) {
|
|
129
183
|
throw new Error('src must be a string when using `fetchReader`');
|
|
@@ -146,4 +200,5 @@ exports.fetchReader = {
|
|
|
146
200
|
read: exports.fetchReadContent,
|
|
147
201
|
readWholeAsText: exports.fetchReadWholeAsText,
|
|
148
202
|
createAdjacentFileSource: exports.fetchCreateAdjacentFileSource,
|
|
203
|
+
preload: exports.fetchPreload,
|
|
149
204
|
};
|
|
@@ -3,7 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.nodeReader = exports.nodeCreateAdjacentFileSource = exports.nodeReadWholeAsText = exports.nodeReadContent = void 0;
|
|
4
4
|
const fs_1 = require("fs");
|
|
5
5
|
const path_1 = require("path");
|
|
6
|
-
const stream_1 = require("stream");
|
|
7
6
|
const nodeReadContent = ({ src, range, controller }) => {
|
|
8
7
|
if (typeof src !== 'string') {
|
|
9
8
|
throw new Error('src must be a string when using `nodeReader`');
|
|
@@ -22,7 +21,27 @@ const nodeReadContent = ({ src, range, controller }) => {
|
|
|
22
21
|
ownController.abort();
|
|
23
22
|
}, { once: true });
|
|
24
23
|
const stats = (0, fs_1.statSync)(src);
|
|
25
|
-
|
|
24
|
+
let readerCancelled = false;
|
|
25
|
+
const reader = new ReadableStream({
|
|
26
|
+
start(c) {
|
|
27
|
+
if (readerCancelled) {
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
stream.on('data', (chunk) => {
|
|
31
|
+
c.enqueue(chunk);
|
|
32
|
+
});
|
|
33
|
+
stream.on('end', () => {
|
|
34
|
+
c.close();
|
|
35
|
+
});
|
|
36
|
+
stream.on('error', (err) => {
|
|
37
|
+
c.error(err);
|
|
38
|
+
});
|
|
39
|
+
},
|
|
40
|
+
cancel() {
|
|
41
|
+
readerCancelled = true;
|
|
42
|
+
stream.destroy();
|
|
43
|
+
},
|
|
44
|
+
}).getReader();
|
|
26
45
|
if (controller) {
|
|
27
46
|
controller._internals.signal.addEventListener('abort', () => {
|
|
28
47
|
reader.cancel().catch(() => { });
|
|
@@ -66,4 +85,7 @@ exports.nodeReader = {
|
|
|
66
85
|
read: exports.nodeReadContent,
|
|
67
86
|
readWholeAsText: exports.nodeReadWholeAsText,
|
|
68
87
|
createAdjacentFileSource: exports.nodeCreateAdjacentFileSource,
|
|
88
|
+
preload: () => {
|
|
89
|
+
// doing nothing, it's just for when fetching over the network
|
|
90
|
+
},
|
|
69
91
|
};
|
|
@@ -60,4 +60,7 @@ exports.webFileReader = {
|
|
|
60
60
|
read: exports.webFileReadContent,
|
|
61
61
|
readWholeAsText: exports.webFileReadWholeAsText,
|
|
62
62
|
createAdjacentFileSource: exports.webFileCreateAdjacentFileSource,
|
|
63
|
+
preload: () => {
|
|
64
|
+
// doing nothing, it's just for when fetching over the network
|
|
65
|
+
},
|
|
63
66
|
};
|
package/dist/readers/reader.d.ts
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import type { MediaParserController } from '../controller/media-parser-controller';
|
|
2
|
-
import type {
|
|
2
|
+
import type { LogLevel } from '../log';
|
|
3
|
+
import type { ParseMediaRange, ParseMediaSrc } from '../options';
|
|
4
|
+
import type { PrefetchCache } from './from-fetch';
|
|
3
5
|
export type Reader = {
|
|
4
6
|
reader: ReadableStreamDefaultReader<Uint8Array>;
|
|
5
7
|
abort: () => void;
|
|
@@ -14,14 +16,29 @@ type ReadResult = {
|
|
|
14
16
|
};
|
|
15
17
|
export type ReadContent = (options: {
|
|
16
18
|
src: ParseMediaSrc;
|
|
17
|
-
range:
|
|
19
|
+
range: ParseMediaRange;
|
|
18
20
|
controller: MediaParserController;
|
|
21
|
+
logLevel: LogLevel;
|
|
22
|
+
prefetchCache: PrefetchCache;
|
|
19
23
|
}) => Promise<ReadResult>;
|
|
20
24
|
export type ReadWholeAsText = (src: ParseMediaSrc) => Promise<string>;
|
|
25
|
+
export type PreloadContent = (options: {
|
|
26
|
+
src: ParseMediaSrc;
|
|
27
|
+
range: ParseMediaRange;
|
|
28
|
+
logLevel: LogLevel;
|
|
29
|
+
prefetchCache: PrefetchCache;
|
|
30
|
+
}) => void;
|
|
31
|
+
export type ClearPreloadCache = (options: {
|
|
32
|
+
src: ParseMediaSrc;
|
|
33
|
+
range: ParseMediaRange;
|
|
34
|
+
logLevel: LogLevel;
|
|
35
|
+
prefetchCache: PrefetchCache;
|
|
36
|
+
}) => void;
|
|
21
37
|
export type CreateAdjacentFileSource = (relativePath: string, src: ParseMediaSrc) => string;
|
|
22
38
|
export type ReaderInterface = {
|
|
23
39
|
read: ReadContent;
|
|
24
40
|
readWholeAsText: ReadWholeAsText;
|
|
25
41
|
createAdjacentFileSource: CreateAdjacentFileSource;
|
|
42
|
+
preload: PreloadContent;
|
|
26
43
|
};
|
|
27
44
|
export {};
|
|
@@ -35,4 +35,13 @@ exports.universalReader = {
|
|
|
35
35
|
}
|
|
36
36
|
return (0, from_node_1.nodeCreateAdjacentFileSource)(relativePath, src);
|
|
37
37
|
},
|
|
38
|
+
preload: ({ src, range, logLevel, prefetchCache }) => {
|
|
39
|
+
if (src instanceof Blob) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
if (src.toString().startsWith('http') ||
|
|
43
|
+
src.toString().startsWith('blob:')) {
|
|
44
|
+
return (0, from_fetch_1.fetchPreload)({ range, src, logLevel, prefetchCache });
|
|
45
|
+
}
|
|
46
|
+
},
|
|
38
47
|
};
|
package/dist/readers/web.js
CHANGED
|
@@ -22,4 +22,10 @@ exports.webReader = {
|
|
|
22
22
|
}
|
|
23
23
|
return (0, from_fetch_1.fetchReadWholeAsText)(src);
|
|
24
24
|
},
|
|
25
|
+
preload: ({ range, src, logLevel, prefetchCache }) => {
|
|
26
|
+
if (src instanceof Blob) {
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
return (0, from_fetch_1.fetchPreload)({ range, src, logLevel, prefetchCache });
|
|
30
|
+
},
|
|
25
31
|
};
|
package/dist/register-track.d.ts
CHANGED
|
@@ -3,14 +3,14 @@ import type { LogLevel } from './log';
|
|
|
3
3
|
import type { MediaParserContainer } from './options';
|
|
4
4
|
import type { TracksState } from './state/has-tracks-section';
|
|
5
5
|
import type { ParserState } from './state/parser-state';
|
|
6
|
-
import type {
|
|
6
|
+
import type { CallbacksState } from './state/sample-callbacks';
|
|
7
7
|
import type { OnAudioTrack, OnVideoTrack } from './webcodec-sample-types';
|
|
8
8
|
export declare const registerVideoTrack: ({ track, container, logLevel, onVideoTrack, registerVideoSampleCallback, tracks, }: {
|
|
9
9
|
track: Track;
|
|
10
10
|
container: MediaParserContainer;
|
|
11
11
|
logLevel: LogLevel;
|
|
12
12
|
onVideoTrack: OnVideoTrack | null;
|
|
13
|
-
registerVideoSampleCallback:
|
|
13
|
+
registerVideoSampleCallback: CallbacksState["registerVideoSampleCallback"];
|
|
14
14
|
tracks: TracksState;
|
|
15
15
|
}) => Promise<import("./webcodec-sample-types").OnVideoSample | null>;
|
|
16
16
|
export declare const registerAudioTrack: ({ track, container, tracks, logLevel, onAudioTrack, registerAudioSampleCallback, }: {
|
|
@@ -19,7 +19,7 @@ export declare const registerAudioTrack: ({ track, container, tracks, logLevel,
|
|
|
19
19
|
tracks: TracksState;
|
|
20
20
|
logLevel: LogLevel;
|
|
21
21
|
onAudioTrack: OnAudioTrack | null;
|
|
22
|
-
registerAudioSampleCallback:
|
|
22
|
+
registerAudioSampleCallback: CallbacksState["registerAudioSampleCallback"];
|
|
23
23
|
}) => Promise<import("./webcodec-sample-types").OnAudioSample | null>;
|
|
24
24
|
export declare const registerVideoTrackWhenProfileIsAvailable: ({ state, track, container, }: {
|
|
25
25
|
state: ParserState;
|
package/dist/seek-backwards.d.ts
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import type { MediaParserController } from './controller/media-parser-controller';
|
|
2
|
+
import type { PrefetchCache } from './fetch';
|
|
2
3
|
import type { BufferIterator } from './iterator/buffer-iterator';
|
|
3
4
|
import type { LogLevel } from './log';
|
|
4
5
|
import type { ParseMediaSrc } from './options';
|
|
5
6
|
import type { ReaderInterface } from './readers/reader';
|
|
6
7
|
import type { CurrentReader } from './state/current-reader';
|
|
7
|
-
export declare const seekBackwards: ({ iterator, seekTo, readerInterface, src, controller, logLevel, currentReader, }: {
|
|
8
|
+
export declare const seekBackwards: ({ iterator, seekTo, readerInterface, src, controller, logLevel, currentReader, prefetchCache, }: {
|
|
8
9
|
iterator: BufferIterator;
|
|
9
10
|
seekTo: number;
|
|
10
11
|
readerInterface: ReaderInterface;
|
|
@@ -12,4 +13,5 @@ export declare const seekBackwards: ({ iterator, seekTo, readerInterface, src, c
|
|
|
12
13
|
controller: MediaParserController;
|
|
13
14
|
logLevel: LogLevel;
|
|
14
15
|
currentReader: CurrentReader;
|
|
16
|
+
prefetchCache: PrefetchCache;
|
|
15
17
|
}) => Promise<void>;
|
package/dist/seek-backwards.js
CHANGED
|
@@ -2,10 +2,11 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.seekBackwards = void 0;
|
|
4
4
|
const log_1 = require("./log");
|
|
5
|
-
const seekBackwards = async ({ iterator, seekTo, readerInterface, src, controller, logLevel, currentReader, }) => {
|
|
5
|
+
const seekBackwards = async ({ iterator, seekTo, readerInterface, src, controller, logLevel, currentReader, prefetchCache, }) => {
|
|
6
6
|
// (a) data has not been discarded yet
|
|
7
7
|
const howManyBytesWeCanGoBack = iterator.counter.getDiscardedOffset();
|
|
8
8
|
if (iterator.counter.getOffset() - howManyBytesWeCanGoBack <= seekTo) {
|
|
9
|
+
log_1.Log.verbose(logLevel, `Seeking back to ${seekTo}`);
|
|
9
10
|
iterator.skipTo(seekTo);
|
|
10
11
|
return;
|
|
11
12
|
}
|
|
@@ -16,6 +17,8 @@ const seekBackwards = async ({ iterator, seekTo, readerInterface, src, controlle
|
|
|
16
17
|
src,
|
|
17
18
|
range: seekTo,
|
|
18
19
|
controller,
|
|
20
|
+
logLevel,
|
|
21
|
+
prefetchCache,
|
|
19
22
|
});
|
|
20
23
|
iterator.replaceData(new Uint8Array([]), seekTo);
|
|
21
24
|
log_1.Log.verbose(logLevel, `Re-reading took ${Date.now() - time}ms. New position: ${iterator.counter.getOffset()}`);
|
package/dist/seek-forwards.d.ts
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import type { MediaParserController } from './controller/media-parser-controller';
|
|
2
|
+
import type { PrefetchCache } from './fetch';
|
|
2
3
|
import type { AllOptions, ParseMediaFields } from './fields';
|
|
3
4
|
import type { BufferIterator } from './iterator/buffer-iterator';
|
|
4
5
|
import type { LogLevel } from './log';
|
|
5
6
|
import type { ParseMediaSrc } from './options';
|
|
6
7
|
import type { ReaderInterface } from './readers/reader';
|
|
7
8
|
import type { CurrentReader } from './state/current-reader';
|
|
8
|
-
export declare const seekForward: ({ seekTo, userInitiated, iterator, fields, logLevel, currentReader, readerInterface, src, controller, discardReadBytes, }: {
|
|
9
|
+
export declare const seekForward: ({ seekTo, userInitiated, iterator, fields, logLevel, currentReader, readerInterface, src, controller, discardReadBytes, prefetchCache, }: {
|
|
9
10
|
seekTo: number;
|
|
10
11
|
userInitiated: boolean;
|
|
11
12
|
iterator: BufferIterator;
|
|
@@ -16,4 +17,5 @@ export declare const seekForward: ({ seekTo, userInitiated, iterator, fields, lo
|
|
|
16
17
|
src: ParseMediaSrc;
|
|
17
18
|
controller: MediaParserController;
|
|
18
19
|
discardReadBytes: (force: boolean) => Promise<void>;
|
|
20
|
+
prefetchCache: PrefetchCache;
|
|
19
21
|
}) => Promise<void>;
|
package/dist/seek-forwards.js
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.seekForward = void 0;
|
|
4
4
|
const disallow_forward_seek_if_samples_are_needed_1 = require("./disallow-forward-seek-if-samples-are-needed");
|
|
5
5
|
const log_1 = require("./log");
|
|
6
|
-
const seekForward = async ({ seekTo, userInitiated, iterator, fields, logLevel, currentReader, readerInterface, src, controller, discardReadBytes, }) => {
|
|
6
|
+
const seekForward = async ({ seekTo, userInitiated, iterator, fields, logLevel, currentReader, readerInterface, src, controller, discardReadBytes, prefetchCache, }) => {
|
|
7
7
|
if (userInitiated) {
|
|
8
8
|
(0, disallow_forward_seek_if_samples_are_needed_1.disallowForwardSeekIfSamplesAreNeeded)({
|
|
9
9
|
fields,
|
|
@@ -26,6 +26,8 @@ const seekForward = async ({ seekTo, userInitiated, iterator, fields, logLevel,
|
|
|
26
26
|
src,
|
|
27
27
|
range: seekTo,
|
|
28
28
|
controller,
|
|
29
|
+
logLevel,
|
|
30
|
+
prefetchCache,
|
|
29
31
|
});
|
|
30
32
|
iterator.skipTo(seekTo);
|
|
31
33
|
await discardReadBytes(true);
|
package/dist/seeking-hints.d.ts
CHANGED
|
@@ -40,4 +40,7 @@ export type WebmSeekingHints = {
|
|
|
40
40
|
loadedCues: LazyCuesLoadedOrNull;
|
|
41
41
|
timestampMap: Map<number, number>;
|
|
42
42
|
};
|
|
43
|
-
export type
|
|
43
|
+
export type M3u8SeekingHints = {
|
|
44
|
+
type: 'm3u8-seeking-hints';
|
|
45
|
+
};
|
|
46
|
+
export type SeekingHints = IsoBaseMediaSeekingHints | WavSeekingHints | TransportStreamSeekingHints | WebmSeekingHints | FlacSeekingHints | RiffSeekingHints | Mp3SeekingHints | AacSeekingHints | M3u8SeekingHints;
|
|
@@ -42,6 +42,10 @@ const setSeekingHints = ({ hints, state, }) => {
|
|
|
42
42
|
(0, seeking_hints_1.setSeekingHintsForAac)();
|
|
43
43
|
return;
|
|
44
44
|
}
|
|
45
|
+
if (hints.type === 'm3u8-seeking-hints') {
|
|
46
|
+
// TODO: Implement
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
45
49
|
throw new Error(`Unknown seeking hints type: ${hints}`);
|
|
46
50
|
};
|
|
47
51
|
exports.setSeekingHints = setSeekingHints;
|
package/dist/skip.d.ts
CHANGED
|
@@ -3,3 +3,8 @@ export type Skip = {
|
|
|
3
3
|
skipTo: number;
|
|
4
4
|
};
|
|
5
5
|
export declare const makeSkip: (skipTo: number) => Skip;
|
|
6
|
+
export type FetchMoreData = {
|
|
7
|
+
action: 'fetch-more-data';
|
|
8
|
+
bytesNeeded: number;
|
|
9
|
+
};
|
|
10
|
+
export declare const makeFetchMoreData: (bytesNeeded: number) => FetchMoreData;
|
package/dist/skip.js
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.makeSkip = void 0;
|
|
3
|
+
exports.makeFetchMoreData = exports.makeSkip = void 0;
|
|
4
4
|
const makeSkip = (skipTo) => ({
|
|
5
5
|
action: 'skip',
|
|
6
6
|
skipTo,
|
|
7
7
|
});
|
|
8
8
|
exports.makeSkip = makeSkip;
|
|
9
|
+
const makeFetchMoreData = (bytesNeeded) => ({
|
|
10
|
+
action: 'fetch-more-data',
|
|
11
|
+
bytesNeeded,
|
|
12
|
+
});
|
|
13
|
+
exports.makeFetchMoreData = makeFetchMoreData;
|
|
@@ -11,6 +11,7 @@ export declare const makeCanSkipTracksState: ({ hasAudioTrackHandlers, fields, h
|
|
|
11
11
|
fields: Options<ParseMediaFields>;
|
|
12
12
|
structure: StructureState;
|
|
13
13
|
}) => {
|
|
14
|
+
doFieldsNeedTracks: () => boolean;
|
|
14
15
|
canSkipTracks: () => boolean;
|
|
15
16
|
};
|
|
16
17
|
export type CanSkipTracksState = ReturnType<typeof makeCanSkipTracksState>;
|
|
@@ -43,17 +43,21 @@ const needsTracksForField = ({ field, structure, }) => {
|
|
|
43
43
|
};
|
|
44
44
|
exports.needsTracksForField = needsTracksForField;
|
|
45
45
|
const makeCanSkipTracksState = ({ hasAudioTrackHandlers, fields, hasVideoTrackHandlers, structure, }) => {
|
|
46
|
+
const doFieldsNeedTracks = () => {
|
|
47
|
+
const keys = Object.keys(fields !== null && fields !== void 0 ? fields : {});
|
|
48
|
+
const selectedKeys = keys.filter((k) => fields[k]);
|
|
49
|
+
return selectedKeys.some((k) => (0, exports.needsTracksForField)({
|
|
50
|
+
field: k,
|
|
51
|
+
structure: structure.getStructureOrNull(),
|
|
52
|
+
}));
|
|
53
|
+
};
|
|
46
54
|
return {
|
|
55
|
+
doFieldsNeedTracks,
|
|
47
56
|
canSkipTracks: () => {
|
|
48
57
|
if (hasAudioTrackHandlers || hasVideoTrackHandlers) {
|
|
49
58
|
return false;
|
|
50
59
|
}
|
|
51
|
-
|
|
52
|
-
const selectedKeys = keys.filter((k) => fields[k]);
|
|
53
|
-
return !selectedKeys.some((k) => (0, exports.needsTracksForField)({
|
|
54
|
-
field: k,
|
|
55
|
-
structure: structure.getStructureOrNull(),
|
|
56
|
-
}));
|
|
60
|
+
return !doFieldsNeedTracks();
|
|
57
61
|
},
|
|
58
62
|
};
|
|
59
63
|
};
|