@editframe/elements 0.20.4-beta.0 → 0.23.6-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DelayedLoadingState.js +0 -27
- package/dist/EF_FRAMEGEN.d.ts +5 -3
- package/dist/EF_FRAMEGEN.js +49 -11
- package/dist/_virtual/_@oxc-project_runtime@0.94.0/helpers/decorate.js +7 -0
- package/dist/attachContextRoot.d.ts +1 -0
- package/dist/attachContextRoot.js +9 -0
- package/dist/elements/ContextProxiesController.d.ts +1 -2
- package/dist/elements/EFAudio.js +5 -9
- package/dist/elements/EFCaptions.d.ts +1 -3
- package/dist/elements/EFCaptions.js +112 -129
- package/dist/elements/EFImage.js +6 -7
- package/dist/elements/EFMedia/AssetIdMediaEngine.js +2 -5
- package/dist/elements/EFMedia/AssetMediaEngine.js +36 -33
- package/dist/elements/EFMedia/BaseMediaEngine.js +57 -73
- package/dist/elements/EFMedia/BufferedSeekingInput.d.ts +1 -1
- package/dist/elements/EFMedia/BufferedSeekingInput.js +134 -78
- package/dist/elements/EFMedia/JitMediaEngine.js +9 -19
- package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +7 -13
- package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +2 -3
- package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +6 -5
- package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +1 -3
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +1 -1
- package/dist/elements/EFMedia/shared/AudioSpanUtils.js +9 -25
- package/dist/elements/EFMedia/shared/BufferUtils.js +2 -17
- package/dist/elements/EFMedia/shared/GlobalInputCache.js +0 -24
- package/dist/elements/EFMedia/shared/PrecisionUtils.js +0 -21
- package/dist/elements/EFMedia/shared/ThumbnailExtractor.js +0 -17
- package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +1 -10
- package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.d.ts +29 -0
- package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.js +32 -0
- package/dist/elements/EFMedia/videoTasks/ScrubInputCache.js +1 -15
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.js +1 -7
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoInputTask.js +8 -5
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.js +12 -13
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.js +1 -1
- package/dist/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.js +134 -70
- package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +11 -18
- package/dist/elements/EFMedia.d.ts +19 -0
- package/dist/elements/EFMedia.js +44 -25
- package/dist/elements/EFSourceMixin.js +5 -7
- package/dist/elements/EFSurface.js +6 -9
- package/dist/elements/EFTemporal.browsertest.d.ts +11 -0
- package/dist/elements/EFTemporal.d.ts +10 -0
- package/dist/elements/EFTemporal.js +100 -41
- package/dist/elements/EFThumbnailStrip.js +23 -73
- package/dist/elements/EFTimegroup.browsertest.d.ts +3 -3
- package/dist/elements/EFTimegroup.d.ts +35 -14
- package/dist/elements/EFTimegroup.js +138 -181
- package/dist/elements/EFVideo.d.ts +16 -2
- package/dist/elements/EFVideo.js +156 -108
- package/dist/elements/EFWaveform.js +23 -40
- package/dist/elements/SampleBuffer.js +3 -7
- package/dist/elements/TargetController.js +5 -5
- package/dist/elements/durationConverter.js +4 -4
- package/dist/elements/renderTemporalAudio.d.ts +10 -0
- package/dist/elements/renderTemporalAudio.js +35 -0
- package/dist/elements/updateAnimations.js +19 -43
- package/dist/gui/ContextMixin.d.ts +5 -5
- package/dist/gui/ContextMixin.js +167 -162
- package/dist/gui/Controllable.browsertest.d.ts +0 -0
- package/dist/gui/Controllable.d.ts +15 -0
- package/dist/gui/Controllable.js +9 -0
- package/dist/gui/EFConfiguration.js +7 -7
- package/dist/gui/EFControls.browsertest.d.ts +11 -0
- package/dist/gui/EFControls.d.ts +18 -4
- package/dist/gui/EFControls.js +70 -28
- package/dist/gui/EFDial.browsertest.d.ts +0 -0
- package/dist/gui/EFDial.d.ts +18 -0
- package/dist/gui/EFDial.js +141 -0
- package/dist/gui/EFFilmstrip.browsertest.d.ts +11 -0
- package/dist/gui/EFFilmstrip.d.ts +12 -2
- package/dist/gui/EFFilmstrip.js +214 -129
- package/dist/gui/EFFitScale.js +5 -8
- package/dist/gui/EFFocusOverlay.js +4 -4
- package/dist/gui/EFPause.browsertest.d.ts +0 -0
- package/dist/gui/EFPause.d.ts +23 -0
- package/dist/gui/EFPause.js +59 -0
- package/dist/gui/EFPlay.browsertest.d.ts +0 -0
- package/dist/gui/EFPlay.d.ts +23 -0
- package/dist/gui/EFPlay.js +59 -0
- package/dist/gui/EFPreview.d.ts +4 -0
- package/dist/gui/EFPreview.js +18 -9
- package/dist/gui/EFResizableBox.browsertest.d.ts +0 -0
- package/dist/gui/EFResizableBox.d.ts +34 -0
- package/dist/gui/EFResizableBox.js +547 -0
- package/dist/gui/EFScrubber.d.ts +9 -3
- package/dist/gui/EFScrubber.js +13 -13
- package/dist/gui/EFTimeDisplay.d.ts +7 -1
- package/dist/gui/EFTimeDisplay.js +8 -8
- package/dist/gui/EFToggleLoop.d.ts +9 -3
- package/dist/gui/EFToggleLoop.js +7 -5
- package/dist/gui/EFTogglePlay.d.ts +12 -4
- package/dist/gui/EFTogglePlay.js +26 -21
- package/dist/gui/EFWorkbench.js +5 -5
- package/dist/gui/PlaybackController.d.ts +67 -0
- package/dist/gui/PlaybackController.js +310 -0
- package/dist/gui/TWMixin.js +1 -1
- package/dist/gui/TWMixin2.js +1 -1
- package/dist/gui/TargetOrContextMixin.d.ts +10 -0
- package/dist/gui/TargetOrContextMixin.js +98 -0
- package/dist/gui/efContext.d.ts +2 -2
- package/dist/index.d.ts +5 -0
- package/dist/index.js +5 -1
- package/dist/otel/BridgeSpanExporter.d.ts +13 -0
- package/dist/otel/BridgeSpanExporter.js +87 -0
- package/dist/otel/setupBrowserTracing.d.ts +12 -0
- package/dist/otel/setupBrowserTracing.js +32 -0
- package/dist/otel/tracingHelpers.d.ts +34 -0
- package/dist/otel/tracingHelpers.js +112 -0
- package/dist/style.css +1 -1
- package/dist/transcoding/cache/RequestDeduplicator.js +0 -21
- package/dist/transcoding/cache/URLTokenDeduplicator.js +1 -21
- package/dist/transcoding/utils/UrlGenerator.js +2 -19
- package/dist/utils/LRUCache.js +6 -53
- package/package.json +13 -5
- package/src/elements/ContextProxiesController.ts +10 -10
- package/src/elements/EFAudio.ts +1 -0
- package/src/elements/EFCaptions.browsertest.ts +128 -56
- package/src/elements/EFCaptions.ts +60 -34
- package/src/elements/EFImage.browsertest.ts +1 -2
- package/src/elements/EFMedia/AssetMediaEngine.ts +65 -37
- package/src/elements/EFMedia/BaseMediaEngine.ts +110 -52
- package/src/elements/EFMedia/BufferedSeekingInput.ts +218 -101
- package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +3 -0
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +7 -3
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +1 -1
- package/src/elements/EFMedia/videoTasks/MainVideoInputCache.ts +76 -0
- package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +16 -10
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.ts +7 -1
- package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +222 -116
- package/src/elements/EFMedia.browsertest.ts +8 -15
- package/src/elements/EFMedia.ts +54 -8
- package/src/elements/EFSurface.browsertest.ts +2 -6
- package/src/elements/EFSurface.ts +1 -0
- package/src/elements/EFTemporal.browsertest.ts +58 -1
- package/src/elements/EFTemporal.ts +140 -4
- package/src/elements/EFThumbnailStrip.browsertest.ts +2 -8
- package/src/elements/EFThumbnailStrip.ts +1 -0
- package/src/elements/EFTimegroup.browsertest.ts +16 -15
- package/src/elements/EFTimegroup.ts +281 -275
- package/src/elements/EFVideo.browsertest.ts +162 -74
- package/src/elements/EFVideo.ts +229 -101
- package/src/elements/FetchContext.browsertest.ts +7 -2
- package/src/elements/TargetController.browsertest.ts +1 -0
- package/src/elements/TargetController.ts +1 -0
- package/src/elements/renderTemporalAudio.ts +108 -0
- package/src/elements/updateAnimations.browsertest.ts +181 -6
- package/src/elements/updateAnimations.ts +6 -6
- package/src/gui/ContextMixin.browsertest.ts +274 -27
- package/src/gui/ContextMixin.ts +230 -175
- package/src/gui/Controllable.browsertest.ts +258 -0
- package/src/gui/Controllable.ts +41 -0
- package/src/gui/EFControls.browsertest.ts +294 -80
- package/src/gui/EFControls.ts +139 -28
- package/src/gui/EFDial.browsertest.ts +84 -0
- package/src/gui/EFDial.ts +172 -0
- package/src/gui/EFFilmstrip.browsertest.ts +712 -0
- package/src/gui/EFFilmstrip.ts +213 -23
- package/src/gui/EFPause.browsertest.ts +202 -0
- package/src/gui/EFPause.ts +73 -0
- package/src/gui/EFPlay.browsertest.ts +202 -0
- package/src/gui/EFPlay.ts +73 -0
- package/src/gui/EFPreview.ts +20 -5
- package/src/gui/EFResizableBox.browsertest.ts +79 -0
- package/src/gui/EFResizableBox.ts +898 -0
- package/src/gui/EFScrubber.ts +7 -5
- package/src/gui/EFTimeDisplay.browsertest.ts +19 -19
- package/src/gui/EFTimeDisplay.ts +3 -1
- package/src/gui/EFToggleLoop.ts +6 -5
- package/src/gui/EFTogglePlay.ts +30 -23
- package/src/gui/PlaybackController.ts +522 -0
- package/src/gui/TWMixin.css +3 -0
- package/src/gui/TargetOrContextMixin.ts +185 -0
- package/src/gui/efContext.ts +2 -2
- package/src/otel/BridgeSpanExporter.ts +150 -0
- package/src/otel/setupBrowserTracing.ts +73 -0
- package/src/otel/tracingHelpers.ts +251 -0
- package/test/cache-integration-verification.browsertest.ts +1 -1
- package/types.json +1 -1
- package/dist/elements/ContextProxiesController.js +0 -69
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { withSpan } from "../../otel/tracingHelpers.js";
|
|
1
2
|
import { BaseMediaEngine } from "./BaseMediaEngine.js";
|
|
2
3
|
import { convertToScaledTime, roundToMilliseconds } from "./shared/PrecisionUtils.js";
|
|
3
4
|
var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
@@ -10,10 +11,8 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
|
10
11
|
static async fetch(host, urlGenerator, src) {
|
|
11
12
|
const engine = new AssetMediaEngine(host, src);
|
|
12
13
|
const url = urlGenerator.generateTrackFragmentIndexUrl(src);
|
|
13
|
-
|
|
14
|
-
engine.
|
|
15
|
-
const longestFragment = Object.values(engine.data).reduce((max, fragment) => Math.max(max, fragment.duration / fragment.timescale), 0);
|
|
16
|
-
engine.durationMs = longestFragment * 1e3;
|
|
14
|
+
engine.data = await engine.fetchManifest(url);
|
|
15
|
+
engine.durationMs = Object.values(engine.data).reduce((max, fragment) => Math.max(max, fragment.duration / fragment.timescale), 0) * 1e3;
|
|
17
16
|
if (src.startsWith("/")) engine.src = src.slice(1);
|
|
18
17
|
return engine;
|
|
19
18
|
}
|
|
@@ -25,7 +24,7 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
|
25
24
|
}
|
|
26
25
|
get videoRendition() {
|
|
27
26
|
const videoTrack = this.videoTrackIndex;
|
|
28
|
-
if (!videoTrack || videoTrack.track === void 0) return
|
|
27
|
+
if (!videoTrack || videoTrack.track === void 0) return;
|
|
29
28
|
return {
|
|
30
29
|
trackId: videoTrack.track,
|
|
31
30
|
src: this.src,
|
|
@@ -34,7 +33,7 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
|
34
33
|
}
|
|
35
34
|
get audioRendition() {
|
|
36
35
|
const audioTrack = this.audioTrackIndex;
|
|
37
|
-
if (!audioTrack || audioTrack.track === void 0) return
|
|
36
|
+
if (!audioTrack || audioTrack.track === void 0) return;
|
|
38
37
|
return {
|
|
39
38
|
trackId: audioTrack.track,
|
|
40
39
|
src: this.src
|
|
@@ -67,25 +66,37 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
|
67
66
|
return `/@ef-track/${this.src}?trackId=${trackId}&segmentId=${segmentId}`;
|
|
68
67
|
}
|
|
69
68
|
async fetchInitSegment(rendition, signal) {
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
69
|
+
return withSpan("assetEngine.fetchInitSegment", {
|
|
70
|
+
trackId: rendition.trackId || -1,
|
|
71
|
+
src: rendition.src
|
|
72
|
+
}, void 0, async (span) => {
|
|
73
|
+
if (!rendition.trackId) throw new Error("[fetchInitSegment] Track ID is required for asset metadata");
|
|
74
|
+
const url = this.buildInitSegmentUrl(rendition.trackId);
|
|
75
|
+
const initSegment = this.data[rendition.trackId]?.initSegment;
|
|
76
|
+
if (!initSegment) throw new Error("Init segment not found");
|
|
77
|
+
span.setAttribute("offset", initSegment.offset);
|
|
78
|
+
span.setAttribute("size", initSegment.size);
|
|
79
|
+
const headers = { Range: `bytes=${initSegment.offset}-${initSegment.offset + initSegment.size - 1}` };
|
|
80
|
+
return this.fetchMediaWithHeaders(url, headers, signal);
|
|
81
|
+
});
|
|
76
82
|
}
|
|
77
83
|
async fetchMediaSegment(segmentId, rendition, signal) {
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
84
|
+
return withSpan("assetEngine.fetchMediaSegment", {
|
|
85
|
+
segmentId,
|
|
86
|
+
trackId: rendition.trackId || -1,
|
|
87
|
+
src: rendition.src
|
|
88
|
+
}, void 0, async (span) => {
|
|
89
|
+
if (!rendition.trackId) throw new Error("[fetchMediaSegment] Track ID is required for asset metadata");
|
|
90
|
+
if (segmentId === void 0) throw new Error("Segment ID is not available");
|
|
91
|
+
const url = this.buildMediaSegmentUrl(rendition.trackId, segmentId);
|
|
92
|
+
const mediaSegment = this.data[rendition.trackId]?.segments[segmentId];
|
|
93
|
+
if (!mediaSegment) throw new Error("Media segment not found");
|
|
94
|
+
span.setAttribute("offset", mediaSegment.offset);
|
|
95
|
+
span.setAttribute("size", mediaSegment.size);
|
|
96
|
+
const headers = { Range: `bytes=${mediaSegment.offset}-${mediaSegment.offset + mediaSegment.size - 1}` };
|
|
97
|
+
return this.fetchMediaWithHeaders(url, headers, signal);
|
|
98
|
+
});
|
|
99
|
+
}
|
|
89
100
|
calculateAudioSegmentRange(fromMs, toMs, rendition, _durationMs) {
|
|
90
101
|
if (fromMs >= toMs || !rendition.trackId) {
|
|
91
102
|
console.warn(`calculateAudioSegmentRange: invalid fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(rendition)}`);
|
|
@@ -124,9 +135,7 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
|
124
135
|
const track = this.data[rendition.trackId];
|
|
125
136
|
if (!track) throw new Error("Track not found");
|
|
126
137
|
const { timescale, segments } = track;
|
|
127
|
-
const
|
|
128
|
-
const offsetSeekTimeMs = roundToMilliseconds(seekTimeMs + startTimeOffsetMs);
|
|
129
|
-
const scaledSeekTime = convertToScaledTime(offsetSeekTimeMs, timescale);
|
|
138
|
+
const scaledSeekTime = convertToScaledTime(roundToMilliseconds(seekTimeMs + ("startTimeOffsetMs" in rendition && rendition.startTimeOffsetMs || 0)), timescale);
|
|
130
139
|
for (let i = segments.length - 1; i >= 0; i--) {
|
|
131
140
|
const segment = segments[i];
|
|
132
141
|
const segmentEndTime = segment.cts + segment.duration;
|
|
@@ -149,13 +158,7 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
|
|
|
149
158
|
}
|
|
150
159
|
return nearestSegmentIndex;
|
|
151
160
|
}
|
|
152
|
-
getScrubVideoRendition() {
|
|
153
|
-
return void 0;
|
|
154
|
-
}
|
|
155
|
-
/**
|
|
156
|
-
* Get preferred buffer configuration for this media engine
|
|
157
|
-
* AssetMediaEngine uses lower buffering since segments are already optimized
|
|
158
|
-
*/
|
|
161
|
+
getScrubVideoRendition() {}
|
|
159
162
|
getBufferConfig() {
|
|
160
163
|
return {
|
|
161
164
|
videoBufferDurationMs: 2e3,
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { withSpan } from "../../otel/tracingHelpers.js";
|
|
1
2
|
import { RequestDeduplicator } from "../../transcoding/cache/RequestDeduplicator.js";
|
|
2
3
|
import { SizeAwareLRUCache } from "../../utils/LRUCache.js";
|
|
3
4
|
const mediaCache = new SizeAwareLRUCache(100 * 1024 * 1024);
|
|
@@ -6,60 +7,73 @@ var BaseMediaEngine = class {
|
|
|
6
7
|
constructor(host) {
|
|
7
8
|
this.host = host;
|
|
8
9
|
}
|
|
9
|
-
/**
|
|
10
|
-
* Get video rendition if available. Returns undefined for audio-only assets.
|
|
11
|
-
* Callers should handle undefined gracefully.
|
|
12
|
-
*/
|
|
13
10
|
getVideoRendition() {
|
|
14
11
|
return this.videoRendition;
|
|
15
12
|
}
|
|
16
|
-
/**
|
|
17
|
-
* Get audio rendition if available. Returns undefined for video-only assets.
|
|
18
|
-
* Callers should handle undefined gracefully.
|
|
19
|
-
*/
|
|
20
13
|
getAudioRendition() {
|
|
21
14
|
return this.audioRendition;
|
|
22
15
|
}
|
|
23
|
-
/**
|
|
24
|
-
* Generate cache key for segment requests
|
|
25
|
-
*/
|
|
26
16
|
getSegmentCacheKey(segmentId, rendition) {
|
|
27
17
|
return `${rendition.src}-${rendition.id}-${segmentId}-${rendition.trackId}`;
|
|
28
18
|
}
|
|
29
|
-
/**
|
|
30
|
-
* Unified fetch method with caching and global deduplication
|
|
31
|
-
* All requests (media, manifest, init segments) go through this method
|
|
32
|
-
*/
|
|
33
19
|
async fetchWithCache(url, options) {
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
20
|
+
return withSpan("mediaEngine.fetchWithCache", {
|
|
21
|
+
url: url.length > 100 ? `${url.substring(0, 100)}...` : url,
|
|
22
|
+
responseType: options.responseType,
|
|
23
|
+
hasHeaders: !!options.headers
|
|
24
|
+
}, void 0, async (span) => {
|
|
25
|
+
const t0 = performance.now();
|
|
26
|
+
const { responseType, headers, signal } = options;
|
|
27
|
+
const cacheKey = headers ? `${url}:${JSON.stringify(headers)}` : url;
|
|
28
|
+
const t1 = performance.now();
|
|
29
|
+
const cached = mediaCache.get(cacheKey);
|
|
30
|
+
const t2 = performance.now();
|
|
31
|
+
span.setAttribute("cacheLookupMs", Math.round((t2 - t1) * 1e3) / 1e3);
|
|
32
|
+
if (cached) {
|
|
33
|
+
span.setAttribute("cacheHit", true);
|
|
34
|
+
if (signal) {
|
|
35
|
+
const t3 = performance.now();
|
|
36
|
+
const result$1 = await this.handleAbortForCachedRequest(cached, signal);
|
|
37
|
+
const t4 = performance.now();
|
|
38
|
+
span.setAttribute("handleAbortMs", Math.round((t4 - t3) * 100) / 100);
|
|
39
|
+
span.setAttribute("totalCacheHitMs", Math.round((t4 - t0) * 100) / 100);
|
|
40
|
+
return result$1;
|
|
41
|
+
}
|
|
42
|
+
span.setAttribute("totalCacheHitMs", Math.round((t2 - t0) * 100) / 100);
|
|
43
|
+
return cached;
|
|
44
|
+
}
|
|
45
|
+
span.setAttribute("cacheHit", false);
|
|
46
|
+
const promise = globalRequestDeduplicator.executeRequest(cacheKey, async () => {
|
|
47
|
+
const fetchStart = performance.now();
|
|
48
|
+
try {
|
|
49
|
+
const response = await this.host.fetch(url, { headers });
|
|
50
|
+
const fetchEnd = performance.now();
|
|
51
|
+
span.setAttribute("fetchMs", fetchEnd - fetchStart);
|
|
52
|
+
if (responseType === "json") return response.json();
|
|
53
|
+
const buffer = await response.arrayBuffer();
|
|
54
|
+
span.setAttribute("sizeBytes", buffer.byteLength);
|
|
55
|
+
return buffer;
|
|
56
|
+
} catch (error) {
|
|
57
|
+
if (error instanceof DOMException && error.name === "AbortError") mediaCache.delete(cacheKey);
|
|
58
|
+
throw error;
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
mediaCache.set(cacheKey, promise);
|
|
62
|
+
promise.catch((error) => {
|
|
47
63
|
if (error instanceof DOMException && error.name === "AbortError") mediaCache.delete(cacheKey);
|
|
48
|
-
|
|
64
|
+
});
|
|
65
|
+
if (signal) {
|
|
66
|
+
const result$1 = await this.handleAbortForCachedRequest(promise, signal);
|
|
67
|
+
const tEnd$1 = performance.now();
|
|
68
|
+
span.setAttribute("totalFetchMs", Math.round((tEnd$1 - t0) * 100) / 100);
|
|
69
|
+
return result$1;
|
|
49
70
|
}
|
|
71
|
+
const result = await promise;
|
|
72
|
+
const tEnd = performance.now();
|
|
73
|
+
span.setAttribute("totalFetchMs", Math.round((tEnd - t0) * 100) / 100);
|
|
74
|
+
return result;
|
|
50
75
|
});
|
|
51
|
-
|
|
52
|
-
promise.catch((error) => {
|
|
53
|
-
if (error instanceof DOMException && error.name === "AbortError") mediaCache.delete(cacheKey);
|
|
54
|
-
});
|
|
55
|
-
if (signal) return this.handleAbortForCachedRequest(promise, signal);
|
|
56
|
-
return promise;
|
|
57
|
-
}
|
|
58
|
-
/**
|
|
59
|
-
* Handles abort logic for a cached request without affecting the underlying fetch
|
|
60
|
-
* This allows multiple instances to share the same cached request while each
|
|
61
|
-
* manages their own abort behavior
|
|
62
|
-
*/
|
|
76
|
+
}
|
|
63
77
|
handleAbortForCachedRequest(promise, signal) {
|
|
64
78
|
if (signal.aborted) throw new DOMException("Aborted", "AbortError");
|
|
65
79
|
return Promise.race([promise, new Promise((_, reject) => {
|
|
@@ -99,39 +113,22 @@ var BaseMediaEngine = class {
|
|
|
99
113
|
async fetchMediaCacheWithHeaders(url, headers, signal) {
|
|
100
114
|
return this.fetchMediaWithHeaders(url, headers, signal);
|
|
101
115
|
}
|
|
102
|
-
/**
|
|
103
|
-
* Fetch media segment with built-in deduplication
|
|
104
|
-
* Now uses global deduplication for all requests
|
|
105
|
-
*/
|
|
106
116
|
async fetchMediaSegmentWithDeduplication(segmentId, rendition, _signal) {
|
|
107
117
|
const cacheKey = this.getSegmentCacheKey(segmentId, rendition);
|
|
108
118
|
return globalRequestDeduplicator.executeRequest(cacheKey, async () => {
|
|
109
119
|
return this.fetchMediaSegment(segmentId, rendition);
|
|
110
120
|
});
|
|
111
121
|
}
|
|
112
|
-
/**
|
|
113
|
-
* Check if a segment is currently being fetched
|
|
114
|
-
*/
|
|
115
122
|
isSegmentBeingFetched(segmentId, rendition) {
|
|
116
123
|
const cacheKey = this.getSegmentCacheKey(segmentId, rendition);
|
|
117
124
|
return globalRequestDeduplicator.isPending(cacheKey);
|
|
118
125
|
}
|
|
119
|
-
/**
|
|
120
|
-
* Get count of active segment requests (for debugging/monitoring)
|
|
121
|
-
*/
|
|
122
126
|
getActiveSegmentRequestCount() {
|
|
123
127
|
return globalRequestDeduplicator.getPendingCount();
|
|
124
128
|
}
|
|
125
|
-
/**
|
|
126
|
-
* Cancel all active segment requests (for cleanup)
|
|
127
|
-
*/
|
|
128
129
|
cancelAllSegmentRequests() {
|
|
129
130
|
globalRequestDeduplicator.clear();
|
|
130
131
|
}
|
|
131
|
-
/**
|
|
132
|
-
* Calculate audio segments needed for a time range
|
|
133
|
-
* Each media engine implements this based on their segment structure
|
|
134
|
-
*/
|
|
135
132
|
calculateAudioSegmentRange(fromMs, toMs, rendition, durationMs) {
|
|
136
133
|
if (fromMs >= toMs) return [];
|
|
137
134
|
const segments = [];
|
|
@@ -169,37 +166,24 @@ var BaseMediaEngine = class {
|
|
|
169
166
|
}
|
|
170
167
|
return segments;
|
|
171
168
|
}
|
|
172
|
-
/**
|
|
173
|
-
* Check if a segment is cached for a given rendition
|
|
174
|
-
* This needs to check the URL-based cache since that's where segments are actually stored
|
|
175
|
-
*/
|
|
176
169
|
isSegmentCached(segmentId, rendition) {
|
|
177
170
|
try {
|
|
178
171
|
const maybeJitEngine = this;
|
|
179
172
|
if (maybeJitEngine.urlGenerator && typeof maybeJitEngine.urlGenerator.generateSegmentUrl === "function") {
|
|
180
173
|
if (!rendition.id) return false;
|
|
181
174
|
const segmentUrl = maybeJitEngine.urlGenerator.generateSegmentUrl(segmentId, rendition.id, maybeJitEngine);
|
|
182
|
-
|
|
183
|
-
return urlIsCached;
|
|
175
|
+
return mediaCache.has(segmentUrl);
|
|
184
176
|
}
|
|
185
177
|
const cacheKey = `${rendition.src}-${rendition.id || "default"}-${segmentId}-${rendition.trackId}`;
|
|
186
|
-
|
|
187
|
-
return isCached;
|
|
178
|
+
return mediaCache.has(cacheKey);
|
|
188
179
|
} catch (error) {
|
|
189
180
|
console.warn(`🎬 BaseMediaEngine: Error checking if segment ${segmentId} is cached:`, error);
|
|
190
181
|
return false;
|
|
191
182
|
}
|
|
192
183
|
}
|
|
193
|
-
/**
|
|
194
|
-
* Get cached segment IDs from a list for a given rendition
|
|
195
|
-
*/
|
|
196
184
|
getCachedSegments(segmentIds, rendition) {
|
|
197
185
|
return new Set(segmentIds.filter((id) => this.isSegmentCached(id, rendition)));
|
|
198
186
|
}
|
|
199
|
-
/**
|
|
200
|
-
* Extract thumbnail canvases at multiple timestamps efficiently
|
|
201
|
-
* Default implementation provides helpful error information
|
|
202
|
-
*/
|
|
203
187
|
async extractThumbnails(timestamps) {
|
|
204
188
|
const engineName = this.constructor.name;
|
|
205
189
|
console.warn(`${engineName}: extractThumbnails not properly implemented. This MediaEngine type does not support thumbnail generation. Supported engines: JitMediaEngine. Requested ${timestamps.length} thumbnail${timestamps.length === 1 ? "" : "s"}.`);
|
|
@@ -37,7 +37,7 @@ export declare class BufferedSeekingInput {
|
|
|
37
37
|
getVideoTrack(trackId: number): Promise<InputVideoTrack>;
|
|
38
38
|
getFirstVideoTrack(): Promise<InputVideoTrack | undefined>;
|
|
39
39
|
getFirstAudioTrack(): Promise<InputAudioTrack | undefined>;
|
|
40
|
-
getTrackIterator(track: InputTrack): AsyncIterator<MediaSample, any,
|
|
40
|
+
getTrackIterator(track: InputTrack): AsyncIterator<MediaSample, any, any>;
|
|
41
41
|
createTrackSampleSink(track: InputTrack): AudioSampleSink | VideoSampleSink;
|
|
42
42
|
createTrackIterator(track: InputTrack): AsyncGenerator<import('mediabunny').VideoSample, void, unknown> | AsyncGenerator<import('mediabunny').AudioSample, void, unknown>;
|
|
43
43
|
createTrackBuffer(track: InputTrack): SampleBuffer;
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
+
import { withSpan } from "../../otel/tracingHelpers.js";
|
|
1
2
|
import { roundToMilliseconds } from "./shared/PrecisionUtils.js";
|
|
2
3
|
import { SampleBuffer } from "../SampleBuffer.js";
|
|
3
4
|
import { AudioSampleSink, BufferSource, Input, InputAudioTrack, InputVideoTrack, MP4, VideoSampleSink } from "mediabunny";
|
|
4
|
-
|
|
5
|
+
var defaultOptions = {
|
|
5
6
|
videoBufferSize: 30,
|
|
6
7
|
audioBufferSize: 100,
|
|
7
8
|
startTimeOffsetMs: 0
|
|
@@ -13,12 +14,10 @@ var BufferedSeekingInput = class {
|
|
|
13
14
|
this.trackBuffers = /* @__PURE__ */ new Map();
|
|
14
15
|
this.trackIteratorCreationPromises = /* @__PURE__ */ new Map();
|
|
15
16
|
this.trackSeekPromises = /* @__PURE__ */ new Map();
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
source: bufferSource,
|
|
17
|
+
this.input = new Input({
|
|
18
|
+
source: new BufferSource(arrayBuffer),
|
|
19
19
|
formats: [MP4]
|
|
20
20
|
});
|
|
21
|
-
this.input = input;
|
|
22
21
|
this.options = {
|
|
23
22
|
...defaultOptions,
|
|
24
23
|
...options
|
|
@@ -34,8 +33,7 @@ var BufferedSeekingInput = class {
|
|
|
34
33
|
return buffer ? Object.freeze([...buffer.getContents()]) : [];
|
|
35
34
|
}
|
|
36
35
|
getBufferTimestamps(trackId) {
|
|
37
|
-
|
|
38
|
-
return contents.map((sample) => sample.timestamp || 0);
|
|
36
|
+
return this.getBufferContents(trackId).map((sample) => sample.timestamp || 0);
|
|
39
37
|
}
|
|
40
38
|
clearBuffer(trackId) {
|
|
41
39
|
const buffer = this.trackBuffers.get(trackId);
|
|
@@ -45,30 +43,25 @@ var BufferedSeekingInput = class {
|
|
|
45
43
|
return this.input.computeDuration();
|
|
46
44
|
}
|
|
47
45
|
async getTrack(trackId) {
|
|
48
|
-
const
|
|
49
|
-
const track = tracks.find((track$1) => track$1.id === trackId);
|
|
46
|
+
const track = (await this.input.getTracks()).find((track$1) => track$1.id === trackId);
|
|
50
47
|
if (!track) throw new Error(`Track ${trackId} not found`);
|
|
51
48
|
return track;
|
|
52
49
|
}
|
|
53
50
|
async getAudioTrack(trackId) {
|
|
54
|
-
const
|
|
55
|
-
const track = tracks.find((track$1) => track$1.id === trackId && track$1.type === "audio");
|
|
51
|
+
const track = (await this.input.getAudioTracks()).find((track$1) => track$1.id === trackId && track$1.type === "audio");
|
|
56
52
|
if (!track) throw new Error(`Track ${trackId} not found`);
|
|
57
53
|
return track;
|
|
58
54
|
}
|
|
59
55
|
async getVideoTrack(trackId) {
|
|
60
|
-
const
|
|
61
|
-
const track = tracks.find((track$1) => track$1.id === trackId && track$1.type === "video");
|
|
56
|
+
const track = (await this.input.getVideoTracks()).find((track$1) => track$1.id === trackId && track$1.type === "video");
|
|
62
57
|
if (!track) throw new Error(`Track ${trackId} not found`);
|
|
63
58
|
return track;
|
|
64
59
|
}
|
|
65
60
|
async getFirstVideoTrack() {
|
|
66
|
-
|
|
67
|
-
return tracks[0];
|
|
61
|
+
return (await this.input.getVideoTracks())[0];
|
|
68
62
|
}
|
|
69
63
|
async getFirstAudioTrack() {
|
|
70
|
-
|
|
71
|
-
return tracks[0];
|
|
64
|
+
return (await this.input.getAudioTracks())[0];
|
|
72
65
|
}
|
|
73
66
|
getTrackIterator(track) {
|
|
74
67
|
if (this.trackIterators.has(track.id)) return this.trackIterators.get(track.id);
|
|
@@ -82,18 +75,15 @@ var BufferedSeekingInput = class {
|
|
|
82
75
|
throw new Error(`Unsupported track type ${track.type}`);
|
|
83
76
|
}
|
|
84
77
|
createTrackIterator(track) {
|
|
85
|
-
|
|
86
|
-
return sampleSink.samples();
|
|
78
|
+
return this.createTrackSampleSink(track).samples();
|
|
87
79
|
}
|
|
88
80
|
createTrackBuffer(track) {
|
|
89
81
|
if (track.type === "audio") {
|
|
90
82
|
const bufferSize$1 = this.options.audioBufferSize;
|
|
91
|
-
|
|
92
|
-
return sampleBuffer$1;
|
|
83
|
+
return new SampleBuffer(bufferSize$1);
|
|
93
84
|
}
|
|
94
85
|
const bufferSize = this.options.videoBufferSize;
|
|
95
|
-
|
|
96
|
-
return sampleBuffer;
|
|
86
|
+
return new SampleBuffer(bufferSize);
|
|
97
87
|
}
|
|
98
88
|
getTrackBuffer(track) {
|
|
99
89
|
const maybeTrackBuffer = this.trackBuffers.get(track.id);
|
|
@@ -103,21 +93,29 @@ var BufferedSeekingInput = class {
|
|
|
103
93
|
return trackBuffer;
|
|
104
94
|
}
|
|
105
95
|
async seek(trackId, timeMs) {
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
96
|
+
return withSpan("bufferedInput.seek", {
|
|
97
|
+
trackId,
|
|
98
|
+
timeMs,
|
|
99
|
+
startTimeOffsetMs: this.startTimeOffsetMs
|
|
100
|
+
}, void 0, async (span) => {
|
|
101
|
+
const roundedMediaTimeMs = roundToMilliseconds(timeMs + this.startTimeOffsetMs);
|
|
102
|
+
span.setAttribute("roundedMediaTimeMs", roundedMediaTimeMs);
|
|
103
|
+
const existingSeek = this.trackSeekPromises.get(trackId);
|
|
104
|
+
if (existingSeek) {
|
|
105
|
+
span.setAttribute("waitedForExistingSeek", true);
|
|
106
|
+
await existingSeek;
|
|
107
|
+
}
|
|
108
|
+
const seekPromise = this.seekSafe(trackId, roundedMediaTimeMs);
|
|
109
|
+
this.trackSeekPromises.set(trackId, seekPromise);
|
|
110
|
+
try {
|
|
111
|
+
return await seekPromise;
|
|
112
|
+
} finally {
|
|
113
|
+
this.trackSeekPromises.delete(trackId);
|
|
114
|
+
}
|
|
115
|
+
});
|
|
117
116
|
}
|
|
118
117
|
async resetIterator(track) {
|
|
119
|
-
|
|
120
|
-
trackBuffer?.clear();
|
|
118
|
+
this.trackBuffers.get(track.id)?.clear();
|
|
121
119
|
const ongoingIteratorCreation = this.trackIteratorCreationPromises.get(track.id);
|
|
122
120
|
if (ongoingIteratorCreation) await ongoingIteratorCreation;
|
|
123
121
|
const iterator = this.trackIterators.get(track.id);
|
|
@@ -128,50 +126,108 @@ var BufferedSeekingInput = class {
|
|
|
128
126
|
}
|
|
129
127
|
#seekLock;
|
|
130
128
|
async seekSafe(trackId, timeMs) {
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
const firstTimestampMs = roundToMilliseconds(await track.getFirstTimestamp() * 1e3);
|
|
139
|
-
if (roundedTimeMs < firstTimestampMs) {
|
|
140
|
-
console.error("Seeking outside bounds of input", {
|
|
141
|
-
roundedTimeMs,
|
|
142
|
-
firstTimestampMs
|
|
143
|
-
});
|
|
144
|
-
throw new NoSample(`Seeking outside bounds of input ${roundedTimeMs} < ${firstTimestampMs}`);
|
|
129
|
+
return withSpan("bufferedInput.seekSafe", {
|
|
130
|
+
trackId,
|
|
131
|
+
timeMs
|
|
132
|
+
}, void 0, async (span) => {
|
|
133
|
+
if (this.#seekLock) {
|
|
134
|
+
span.setAttribute("waitedForSeekLock", true);
|
|
135
|
+
await this.#seekLock.promise;
|
|
145
136
|
}
|
|
146
|
-
const
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
137
|
+
const seekLock = Promise.withResolvers();
|
|
138
|
+
this.#seekLock = seekLock;
|
|
139
|
+
try {
|
|
140
|
+
const track = await this.getTrack(trackId);
|
|
141
|
+
span.setAttribute("trackType", track.type);
|
|
142
|
+
const trackBuffer = this.getTrackBuffer(track);
|
|
143
|
+
const roundedTimeMs = roundToMilliseconds(timeMs);
|
|
144
|
+
const firstTimestampMs = roundToMilliseconds(await track.getFirstTimestamp() * 1e3);
|
|
145
|
+
span.setAttribute("firstTimestampMs", firstTimestampMs);
|
|
146
|
+
if (roundedTimeMs < firstTimestampMs) {
|
|
147
|
+
console.error("Seeking outside bounds of input", {
|
|
148
|
+
roundedTimeMs,
|
|
149
|
+
firstTimestampMs
|
|
150
|
+
});
|
|
151
|
+
throw new NoSample(`Seeking outside bounds of input ${roundedTimeMs} < ${firstTimestampMs}`);
|
|
152
|
+
}
|
|
153
|
+
const bufferContents = trackBuffer.getContents();
|
|
154
|
+
span.setAttribute("bufferContentsLength", bufferContents.length);
|
|
155
|
+
if (bufferContents.length > 0) {
|
|
156
|
+
const bufferStartMs = roundToMilliseconds(trackBuffer.firstTimestamp * 1e3);
|
|
157
|
+
span.setAttribute("bufferStartMs", bufferStartMs);
|
|
158
|
+
if (roundedTimeMs < bufferStartMs) {
|
|
159
|
+
span.setAttribute("resetIterator", true);
|
|
160
|
+
await this.resetIterator(track);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
const alreadyInBuffer = trackBuffer.find(timeMs);
|
|
164
|
+
if (alreadyInBuffer) {
|
|
165
|
+
span.setAttribute("foundInBuffer", true);
|
|
166
|
+
span.setAttribute("bufferSize", trackBuffer.length);
|
|
167
|
+
const contents$1 = trackBuffer.getContents();
|
|
168
|
+
if (contents$1.length > 0) span.setAttribute("bufferTimestamps", contents$1.map((s) => Math.round((s.timestamp || 0) * 1e3)).slice(0, 10).join(","));
|
|
169
|
+
return alreadyInBuffer;
|
|
170
|
+
}
|
|
171
|
+
span.setAttribute("foundInBuffer", false);
|
|
172
|
+
span.setAttribute("bufferSize", trackBuffer.length);
|
|
173
|
+
span.setAttribute("requestedTimeMs", Math.round(timeMs));
|
|
174
|
+
const contents = trackBuffer.getContents();
|
|
175
|
+
if (contents.length > 0) {
|
|
176
|
+
const firstSample = contents[0];
|
|
177
|
+
const lastSample = contents[contents.length - 1];
|
|
178
|
+
if (firstSample && lastSample) {
|
|
179
|
+
const bufferStartMs = Math.round((firstSample.timestamp || 0) * 1e3);
|
|
180
|
+
const bufferEndMs = Math.round(((lastSample.timestamp || 0) + (lastSample.duration || 0)) * 1e3);
|
|
181
|
+
span.setAttribute("bufferStartMs", bufferStartMs);
|
|
182
|
+
span.setAttribute("bufferEndMs", bufferEndMs);
|
|
183
|
+
span.setAttribute("bufferRangeMs", `${bufferStartMs}-${bufferEndMs}`);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
const iterator = this.getTrackIterator(track);
|
|
187
|
+
let iterationCount = 0;
|
|
188
|
+
const decodeStart = performance.now();
|
|
189
|
+
while (true) {
|
|
190
|
+
iterationCount++;
|
|
191
|
+
const iterStart = performance.now();
|
|
192
|
+
const { done, value: decodedSample } = await iterator.next();
|
|
193
|
+
const iterEnd = performance.now();
|
|
194
|
+
if (iterationCount <= 5) span.setAttribute(`iter${iterationCount}Ms`, Math.round((iterEnd - iterStart) * 100) / 100);
|
|
195
|
+
if (decodedSample) {
|
|
196
|
+
trackBuffer.push(decodedSample);
|
|
197
|
+
if (iterationCount <= 5) span.setAttribute(`iter${iterationCount}Timestamp`, Math.round((decodedSample.timestamp || 0) * 1e3));
|
|
198
|
+
}
|
|
199
|
+
const foundSample = trackBuffer.find(roundedTimeMs);
|
|
200
|
+
if (foundSample) {
|
|
201
|
+
const decodeEnd = performance.now();
|
|
202
|
+
span.setAttribute("iterationCount", iterationCount);
|
|
203
|
+
span.setAttribute("decodeMs", Math.round((decodeEnd - decodeStart) * 100) / 100);
|
|
204
|
+
span.setAttribute("avgIterMs", Math.round((decodeEnd - decodeStart) / iterationCount * 100) / 100);
|
|
205
|
+
span.setAttribute("foundSample", true);
|
|
206
|
+
span.setAttribute("foundTimestamp", Math.round((foundSample.timestamp || 0) * 1e3));
|
|
207
|
+
return foundSample;
|
|
208
|
+
}
|
|
209
|
+
if (done) break;
|
|
210
|
+
}
|
|
211
|
+
span.setAttribute("iterationCount", iterationCount);
|
|
212
|
+
span.setAttribute("reachedEnd", true);
|
|
213
|
+
const finalBufferContents = trackBuffer.getContents();
|
|
214
|
+
if (finalBufferContents.length > 0) {
|
|
215
|
+
const lastSample = finalBufferContents[finalBufferContents.length - 1];
|
|
216
|
+
const lastSampleEndMs = roundToMilliseconds(((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) * 1e3);
|
|
217
|
+
const trackDurationMs = await track.computeDuration() * 1e3;
|
|
218
|
+
const isSeekingToTrackEnd = roundToMilliseconds(timeMs) === roundToMilliseconds(trackDurationMs);
|
|
219
|
+
const isAtEndOfTrack = roundToMilliseconds(timeMs) >= lastSampleEndMs;
|
|
220
|
+
if (isSeekingToTrackEnd && isAtEndOfTrack) {
|
|
221
|
+
span.setAttribute("returnedLastSample", true);
|
|
222
|
+
return lastSample;
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
throw new NoSample(`Sample not found for time ${timeMs} in ${track.type} track ${trackId}`);
|
|
226
|
+
} finally {
|
|
227
|
+
this.#seekLock = void 0;
|
|
228
|
+
seekLock.resolve();
|
|
150
229
|
}
|
|
151
|
-
|
|
152
|
-
if (alreadyInBuffer) return alreadyInBuffer;
|
|
153
|
-
const iterator = this.getTrackIterator(track);
|
|
154
|
-
while (true) {
|
|
155
|
-
const { done, value: decodedSample } = await iterator.next();
|
|
156
|
-
if (decodedSample) trackBuffer.push(decodedSample);
|
|
157
|
-
const foundSample = trackBuffer.find(roundedTimeMs);
|
|
158
|
-
if (foundSample) return foundSample;
|
|
159
|
-
if (done) break;
|
|
160
|
-
}
|
|
161
|
-
const finalBufferContents = trackBuffer.getContents();
|
|
162
|
-
if (finalBufferContents.length > 0) {
|
|
163
|
-
const lastSample = finalBufferContents[finalBufferContents.length - 1];
|
|
164
|
-
const lastSampleEndMs = roundToMilliseconds(((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) * 1e3);
|
|
165
|
-
const trackDurationMs = await track.computeDuration() * 1e3;
|
|
166
|
-
const isSeekingToTrackEnd = roundToMilliseconds(timeMs) === roundToMilliseconds(trackDurationMs);
|
|
167
|
-
const isAtEndOfTrack = roundToMilliseconds(timeMs) >= lastSampleEndMs;
|
|
168
|
-
if (isSeekingToTrackEnd && isAtEndOfTrack) return lastSample;
|
|
169
|
-
}
|
|
170
|
-
throw new NoSample(`Sample not found for time ${timeMs} in ${track.type} track ${trackId}`);
|
|
171
|
-
} finally {
|
|
172
|
-
this.#seekLock = void 0;
|
|
173
|
-
seekLock.resolve();
|
|
174
|
-
}
|
|
230
|
+
});
|
|
175
231
|
}
|
|
176
232
|
};
|
|
177
233
|
export { BufferedSeekingInput };
|