@editframe/elements 0.26.2-beta.0 → 0.26.4-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/elements/EFTimegroup.js +7 -2
- package/dist/elements/EFTimegroup.js.map +1 -1
- package/package.json +2 -2
- package/scripts/build-css.js +3 -3
- package/tsdown.config.ts +1 -1
- package/types.json +1 -1
- package/src/elements/ContextProxiesController.ts +0 -124
- package/src/elements/CrossUpdateController.ts +0 -22
- package/src/elements/EFAudio.browsertest.ts +0 -706
- package/src/elements/EFAudio.ts +0 -56
- package/src/elements/EFCaptions.browsertest.ts +0 -1960
- package/src/elements/EFCaptions.ts +0 -823
- package/src/elements/EFImage.browsertest.ts +0 -120
- package/src/elements/EFImage.ts +0 -113
- package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +0 -224
- package/src/elements/EFMedia/AssetIdMediaEngine.ts +0 -110
- package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +0 -140
- package/src/elements/EFMedia/AssetMediaEngine.ts +0 -385
- package/src/elements/EFMedia/BaseMediaEngine.browsertest.ts +0 -400
- package/src/elements/EFMedia/BaseMediaEngine.ts +0 -505
- package/src/elements/EFMedia/BufferedSeekingInput.browsertest.ts +0 -386
- package/src/elements/EFMedia/BufferedSeekingInput.ts +0 -430
- package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +0 -226
- package/src/elements/EFMedia/JitMediaEngine.ts +0 -256
- package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.ts +0 -679
- package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +0 -117
- package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +0 -246
- package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.ts +0 -59
- package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +0 -27
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.ts +0 -55
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +0 -53
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +0 -207
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.ts +0 -72
- package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +0 -32
- package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +0 -29
- package/src/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.ts +0 -95
- package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +0 -184
- package/src/elements/EFMedia/shared/AudioSpanUtils.ts +0 -129
- package/src/elements/EFMedia/shared/BufferUtils.ts +0 -342
- package/src/elements/EFMedia/shared/GlobalInputCache.ts +0 -77
- package/src/elements/EFMedia/shared/MediaTaskUtils.ts +0 -44
- package/src/elements/EFMedia/shared/PrecisionUtils.ts +0 -46
- package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +0 -246
- package/src/elements/EFMedia/shared/RenditionHelpers.ts +0 -56
- package/src/elements/EFMedia/shared/ThumbnailExtractor.ts +0 -227
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.browsertest.ts +0 -167
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +0 -88
- package/src/elements/EFMedia/videoTasks/MainVideoInputCache.ts +0 -76
- package/src/elements/EFMedia/videoTasks/ScrubInputCache.ts +0 -61
- package/src/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.ts +0 -114
- package/src/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.ts +0 -35
- package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +0 -52
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.ts +0 -124
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.ts +0 -44
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.ts +0 -32
- package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +0 -370
- package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +0 -109
- package/src/elements/EFMedia.browsertest.ts +0 -872
- package/src/elements/EFMedia.ts +0 -341
- package/src/elements/EFSourceMixin.ts +0 -60
- package/src/elements/EFSurface.browsertest.ts +0 -151
- package/src/elements/EFSurface.ts +0 -142
- package/src/elements/EFTemporal.browsertest.ts +0 -215
- package/src/elements/EFTemporal.ts +0 -800
- package/src/elements/EFThumbnailStrip.browsertest.ts +0 -585
- package/src/elements/EFThumbnailStrip.media-engine.browsertest.ts +0 -714
- package/src/elements/EFThumbnailStrip.ts +0 -906
- package/src/elements/EFTimegroup.browsertest.ts +0 -870
- package/src/elements/EFTimegroup.ts +0 -878
- package/src/elements/EFVideo.browsertest.ts +0 -1482
- package/src/elements/EFVideo.ts +0 -564
- package/src/elements/EFWaveform.ts +0 -547
- package/src/elements/FetchContext.browsertest.ts +0 -401
- package/src/elements/FetchMixin.ts +0 -38
- package/src/elements/SampleBuffer.ts +0 -94
- package/src/elements/TargetController.browsertest.ts +0 -230
- package/src/elements/TargetController.ts +0 -224
- package/src/elements/TimegroupController.ts +0 -26
- package/src/elements/durationConverter.ts +0 -35
- package/src/elements/parseTimeToMs.ts +0 -9
- package/src/elements/printTaskStatus.ts +0 -16
- package/src/elements/renderTemporalAudio.ts +0 -108
- package/src/elements/updateAnimations.browsertest.ts +0 -1884
- package/src/elements/updateAnimations.ts +0 -217
- package/src/elements/util.ts +0 -24
- package/src/gui/ContextMixin.browsertest.ts +0 -860
- package/src/gui/ContextMixin.ts +0 -562
- package/src/gui/Controllable.browsertest.ts +0 -258
- package/src/gui/Controllable.ts +0 -41
- package/src/gui/EFConfiguration.ts +0 -40
- package/src/gui/EFControls.browsertest.ts +0 -389
- package/src/gui/EFControls.ts +0 -195
- package/src/gui/EFDial.browsertest.ts +0 -84
- package/src/gui/EFDial.ts +0 -172
- package/src/gui/EFFilmstrip.browsertest.ts +0 -712
- package/src/gui/EFFilmstrip.ts +0 -1349
- package/src/gui/EFFitScale.ts +0 -152
- package/src/gui/EFFocusOverlay.ts +0 -79
- package/src/gui/EFPause.browsertest.ts +0 -202
- package/src/gui/EFPause.ts +0 -73
- package/src/gui/EFPlay.browsertest.ts +0 -202
- package/src/gui/EFPlay.ts +0 -73
- package/src/gui/EFPreview.ts +0 -74
- package/src/gui/EFResizableBox.browsertest.ts +0 -79
- package/src/gui/EFResizableBox.ts +0 -898
- package/src/gui/EFScrubber.ts +0 -151
- package/src/gui/EFTimeDisplay.browsertest.ts +0 -237
- package/src/gui/EFTimeDisplay.ts +0 -55
- package/src/gui/EFToggleLoop.ts +0 -35
- package/src/gui/EFTogglePlay.ts +0 -70
- package/src/gui/EFWorkbench.ts +0 -115
- package/src/gui/PlaybackController.ts +0 -527
- package/src/gui/TWMixin.css +0 -6
- package/src/gui/TWMixin.ts +0 -61
- package/src/gui/TargetOrContextMixin.ts +0 -185
- package/src/gui/currentTimeContext.ts +0 -5
- package/src/gui/durationContext.ts +0 -3
- package/src/gui/efContext.ts +0 -6
- package/src/gui/fetchContext.ts +0 -5
- package/src/gui/focusContext.ts +0 -7
- package/src/gui/focusedElementContext.ts +0 -5
- package/src/gui/playingContext.ts +0 -5
- package/src/otel/BridgeSpanExporter.ts +0 -150
- package/src/otel/setupBrowserTracing.ts +0 -73
- package/src/otel/tracingHelpers.ts +0 -251
- package/src/transcoding/cache/RequestDeduplicator.test.ts +0 -170
- package/src/transcoding/cache/RequestDeduplicator.ts +0 -65
- package/src/transcoding/cache/URLTokenDeduplicator.test.ts +0 -182
- package/src/transcoding/cache/URLTokenDeduplicator.ts +0 -101
- package/src/transcoding/types/index.ts +0 -312
- package/src/transcoding/utils/MediaUtils.ts +0 -63
- package/src/transcoding/utils/UrlGenerator.ts +0 -68
- package/src/transcoding/utils/constants.ts +0 -36
- package/src/utils/LRUCache.test.ts +0 -274
- package/src/utils/LRUCache.ts +0 -696
|
@@ -1,385 +0,0 @@
|
|
|
1
|
-
import type { TrackFragmentIndex } from "@editframe/assets";
|
|
2
|
-
|
|
3
|
-
import { withSpan } from "../../otel/tracingHelpers.js";
|
|
4
|
-
import type {
|
|
5
|
-
AudioRendition,
|
|
6
|
-
InitSegmentPaths,
|
|
7
|
-
MediaEngine,
|
|
8
|
-
SegmentTimeRange,
|
|
9
|
-
VideoRendition,
|
|
10
|
-
} from "../../transcoding/types";
|
|
11
|
-
import type { UrlGenerator } from "../../transcoding/utils/UrlGenerator";
|
|
12
|
-
import type { EFMedia } from "../EFMedia";
|
|
13
|
-
import { BaseMediaEngine } from "./BaseMediaEngine";
|
|
14
|
-
import type { MediaRendition } from "./shared/MediaTaskUtils";
|
|
15
|
-
import {
|
|
16
|
-
convertToScaledTime,
|
|
17
|
-
roundToMilliseconds,
|
|
18
|
-
} from "./shared/PrecisionUtils";
|
|
19
|
-
|
|
20
|
-
export class AssetMediaEngine extends BaseMediaEngine implements MediaEngine {
|
|
21
|
-
public src: string;
|
|
22
|
-
protected data: Record<number, TrackFragmentIndex> = {};
|
|
23
|
-
durationMs = 0;
|
|
24
|
-
|
|
25
|
-
constructor(host: EFMedia, src: string) {
|
|
26
|
-
super(host);
|
|
27
|
-
this.src = src;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
static async fetch(host: EFMedia, urlGenerator: UrlGenerator, src: string) {
|
|
31
|
-
const engine = new AssetMediaEngine(host, src);
|
|
32
|
-
const url = urlGenerator.generateTrackFragmentIndexUrl(src);
|
|
33
|
-
const data = await engine.fetchManifest(url);
|
|
34
|
-
engine.data = data as Record<number, TrackFragmentIndex>;
|
|
35
|
-
|
|
36
|
-
// Calculate duration from the data
|
|
37
|
-
const longestFragment = Object.values(engine.data).reduce(
|
|
38
|
-
(max, fragment) => Math.max(max, fragment.duration / fragment.timescale),
|
|
39
|
-
0,
|
|
40
|
-
);
|
|
41
|
-
engine.durationMs = longestFragment * 1000;
|
|
42
|
-
|
|
43
|
-
if (src.startsWith("/")) {
|
|
44
|
-
engine.src = src.slice(1);
|
|
45
|
-
}
|
|
46
|
-
return engine;
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
get audioTrackIndex() {
|
|
50
|
-
return Object.values(this.data).find((track) => track.type === "audio");
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
get videoTrackIndex() {
|
|
54
|
-
return Object.values(this.data).find((track) => track.type === "video");
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
get videoRendition() {
|
|
58
|
-
const videoTrack = this.videoTrackIndex;
|
|
59
|
-
|
|
60
|
-
if (!videoTrack || videoTrack.track === undefined) {
|
|
61
|
-
return undefined;
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
return {
|
|
65
|
-
trackId: videoTrack.track,
|
|
66
|
-
src: this.src,
|
|
67
|
-
startTimeOffsetMs: videoTrack.startTimeOffsetMs,
|
|
68
|
-
};
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
get audioRendition() {
|
|
72
|
-
const audioTrack = this.audioTrackIndex;
|
|
73
|
-
|
|
74
|
-
if (!audioTrack || audioTrack.track === undefined) {
|
|
75
|
-
return undefined;
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
return {
|
|
79
|
-
trackId: audioTrack.track,
|
|
80
|
-
src: this.src,
|
|
81
|
-
};
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
get initSegmentPaths() {
|
|
85
|
-
const paths: InitSegmentPaths = {};
|
|
86
|
-
|
|
87
|
-
if (this.audioTrackIndex !== undefined) {
|
|
88
|
-
paths.audio = {
|
|
89
|
-
path: `@ef-track/${this.audioTrackIndex.track}.m4s`,
|
|
90
|
-
pos: this.audioTrackIndex.initSegment.offset,
|
|
91
|
-
size: this.audioTrackIndex.initSegment.size,
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
if (this.videoTrackIndex !== undefined) {
|
|
96
|
-
paths.video = {
|
|
97
|
-
path: `/@ef-track/${this.videoTrackIndex.track}.m4s`,
|
|
98
|
-
pos: this.videoTrackIndex.initSegment.offset,
|
|
99
|
-
size: this.videoTrackIndex.initSegment.size,
|
|
100
|
-
};
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
return paths;
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
get templates() {
|
|
107
|
-
return {
|
|
108
|
-
initSegment: "/@ef-track/{src}?trackId={trackId}",
|
|
109
|
-
mediaSegment: "/@ef-track/{src}?trackId={trackId}",
|
|
110
|
-
};
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
buildInitSegmentUrl(trackId: number) {
|
|
114
|
-
return `/@ef-track/${this.src}?trackId=${trackId}`;
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
buildMediaSegmentUrl(trackId: number, segmentId: number) {
|
|
118
|
-
return `/@ef-track/${this.src}?trackId=${trackId}&segmentId=${segmentId}`;
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
async fetchInitSegment(
|
|
122
|
-
rendition: { trackId: number | undefined; src: string },
|
|
123
|
-
signal: AbortSignal,
|
|
124
|
-
) {
|
|
125
|
-
return withSpan(
|
|
126
|
-
"assetEngine.fetchInitSegment",
|
|
127
|
-
{
|
|
128
|
-
trackId: rendition.trackId || -1,
|
|
129
|
-
src: rendition.src,
|
|
130
|
-
},
|
|
131
|
-
undefined,
|
|
132
|
-
async (span) => {
|
|
133
|
-
if (!rendition.trackId) {
|
|
134
|
-
throw new Error(
|
|
135
|
-
"[fetchInitSegment] Track ID is required for asset metadata",
|
|
136
|
-
);
|
|
137
|
-
}
|
|
138
|
-
const url = this.buildInitSegmentUrl(rendition.trackId);
|
|
139
|
-
const initSegment = this.data[rendition.trackId]?.initSegment;
|
|
140
|
-
if (!initSegment) {
|
|
141
|
-
throw new Error("Init segment not found");
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
span.setAttribute("offset", initSegment.offset);
|
|
145
|
-
span.setAttribute("size", initSegment.size);
|
|
146
|
-
|
|
147
|
-
// Use unified fetch method with Range headers
|
|
148
|
-
const headers = {
|
|
149
|
-
Range: `bytes=${initSegment.offset}-${initSegment.offset + initSegment.size - 1}`,
|
|
150
|
-
};
|
|
151
|
-
|
|
152
|
-
return this.fetchMediaWithHeaders(url, headers, signal);
|
|
153
|
-
},
|
|
154
|
-
);
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
async fetchMediaSegment(
|
|
158
|
-
segmentId: number,
|
|
159
|
-
rendition: { trackId: number | undefined; src: string },
|
|
160
|
-
signal?: AbortSignal,
|
|
161
|
-
) {
|
|
162
|
-
return withSpan(
|
|
163
|
-
"assetEngine.fetchMediaSegment",
|
|
164
|
-
{
|
|
165
|
-
segmentId,
|
|
166
|
-
trackId: rendition.trackId || -1,
|
|
167
|
-
src: rendition.src,
|
|
168
|
-
},
|
|
169
|
-
undefined,
|
|
170
|
-
async (span) => {
|
|
171
|
-
if (!rendition.trackId) {
|
|
172
|
-
throw new Error(
|
|
173
|
-
"[fetchMediaSegment] Track ID is required for asset metadata",
|
|
174
|
-
);
|
|
175
|
-
}
|
|
176
|
-
if (segmentId === undefined) {
|
|
177
|
-
throw new Error("Segment ID is not available");
|
|
178
|
-
}
|
|
179
|
-
const url = this.buildMediaSegmentUrl(rendition.trackId, segmentId);
|
|
180
|
-
const mediaSegment = this.data[rendition.trackId]?.segments[segmentId];
|
|
181
|
-
if (!mediaSegment) {
|
|
182
|
-
throw new Error("Media segment not found");
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
span.setAttribute("offset", mediaSegment.offset);
|
|
186
|
-
span.setAttribute("size", mediaSegment.size);
|
|
187
|
-
|
|
188
|
-
// Use unified fetch method with Range headers
|
|
189
|
-
const headers = {
|
|
190
|
-
Range: `bytes=${mediaSegment.offset}-${mediaSegment.offset + mediaSegment.size - 1}`,
|
|
191
|
-
};
|
|
192
|
-
|
|
193
|
-
return this.fetchMediaWithHeaders(url, headers, signal);
|
|
194
|
-
},
|
|
195
|
-
);
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
/**
|
|
199
|
-
* Calculate audio segments for variable-duration segments using track fragment index
|
|
200
|
-
*/
|
|
201
|
-
calculateAudioSegmentRange(
|
|
202
|
-
fromMs: number,
|
|
203
|
-
toMs: number,
|
|
204
|
-
rendition: AudioRendition,
|
|
205
|
-
_durationMs: number,
|
|
206
|
-
): SegmentTimeRange[] {
|
|
207
|
-
if (fromMs >= toMs || !rendition.trackId) {
|
|
208
|
-
console.warn(
|
|
209
|
-
`calculateAudioSegmentRange: invalid fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(
|
|
210
|
-
rendition,
|
|
211
|
-
)}`,
|
|
212
|
-
);
|
|
213
|
-
return [];
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
const track = this.data[rendition.trackId];
|
|
217
|
-
if (!track) {
|
|
218
|
-
console.warn(
|
|
219
|
-
`calculateAudioSegmentRange: track not found for rendition ${JSON.stringify(
|
|
220
|
-
rendition,
|
|
221
|
-
)}`,
|
|
222
|
-
);
|
|
223
|
-
return [];
|
|
224
|
-
}
|
|
225
|
-
|
|
226
|
-
const { timescale, segments } = track;
|
|
227
|
-
const segmentRanges: SegmentTimeRange[] = [];
|
|
228
|
-
|
|
229
|
-
for (let i = 0; i < segments.length; i++) {
|
|
230
|
-
// biome-ignore lint/style/noNonNullAssertion: we know the segment is not null
|
|
231
|
-
const segment = segments[i]!;
|
|
232
|
-
const segmentStartTime = segment.cts;
|
|
233
|
-
const segmentEndTime = segment.cts + segment.duration;
|
|
234
|
-
|
|
235
|
-
// Convert to milliseconds
|
|
236
|
-
const segmentStartMs = (segmentStartTime / timescale) * 1000;
|
|
237
|
-
const segmentEndMs = (segmentEndTime / timescale) * 1000;
|
|
238
|
-
|
|
239
|
-
// Check if segment overlaps with requested time range
|
|
240
|
-
if (segmentStartMs < toMs && segmentEndMs > fromMs) {
|
|
241
|
-
segmentRanges.push({
|
|
242
|
-
segmentId: i, // AssetMediaEngine uses 0-based segment IDs
|
|
243
|
-
startMs: segmentStartMs,
|
|
244
|
-
endMs: segmentEndMs,
|
|
245
|
-
});
|
|
246
|
-
}
|
|
247
|
-
}
|
|
248
|
-
if (segmentRanges.length === 0) {
|
|
249
|
-
console.warn(
|
|
250
|
-
`calculateAudioSegmentRange: no segments found for fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(
|
|
251
|
-
{
|
|
252
|
-
rendition,
|
|
253
|
-
track,
|
|
254
|
-
},
|
|
255
|
-
)}`,
|
|
256
|
-
);
|
|
257
|
-
}
|
|
258
|
-
|
|
259
|
-
return segmentRanges;
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
computeSegmentId(seekTimeMs: number, rendition: MediaRendition) {
|
|
263
|
-
if (!rendition.trackId) {
|
|
264
|
-
console.warn(
|
|
265
|
-
`computeSegmentId: trackId not found for rendition ${JSON.stringify(
|
|
266
|
-
rendition,
|
|
267
|
-
)}`,
|
|
268
|
-
);
|
|
269
|
-
throw new Error(
|
|
270
|
-
"[computeSegmentId] Track ID is required for asset metadata",
|
|
271
|
-
);
|
|
272
|
-
}
|
|
273
|
-
const track = this.data[rendition.trackId];
|
|
274
|
-
if (!track) {
|
|
275
|
-
throw new Error("Track not found");
|
|
276
|
-
}
|
|
277
|
-
const { timescale, segments } = track;
|
|
278
|
-
|
|
279
|
-
// Apply startTimeOffsetMs to map user timeline to media timeline for segment selection
|
|
280
|
-
const startTimeOffsetMs =
|
|
281
|
-
("startTimeOffsetMs" in rendition && rendition.startTimeOffsetMs) || 0;
|
|
282
|
-
|
|
283
|
-
const offsetSeekTimeMs = roundToMilliseconds(
|
|
284
|
-
seekTimeMs + startTimeOffsetMs,
|
|
285
|
-
);
|
|
286
|
-
// Convert to timescale units using consistent precision
|
|
287
|
-
const scaledSeekTime = convertToScaledTime(offsetSeekTimeMs, timescale);
|
|
288
|
-
|
|
289
|
-
// Find the segment that contains the actual seek time
|
|
290
|
-
for (let i = segments.length - 1; i >= 0; i--) {
|
|
291
|
-
// biome-ignore lint/style/noNonNullAssertion: we know the segment is not null
|
|
292
|
-
const segment = segments[i]!;
|
|
293
|
-
const segmentEndTime = segment.cts + segment.duration;
|
|
294
|
-
|
|
295
|
-
// Check if the seek time falls within this segment
|
|
296
|
-
if (segment.cts <= scaledSeekTime && scaledSeekTime < segmentEndTime) {
|
|
297
|
-
return i;
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
// Handle gaps: if no exact segment contains the time, find the nearest one
|
|
302
|
-
// This handles cases where seek time falls between segments (like 8041.667ms)
|
|
303
|
-
let nearestSegmentIndex = 0;
|
|
304
|
-
let nearestDistance = Number.MAX_SAFE_INTEGER;
|
|
305
|
-
|
|
306
|
-
for (let i = 0; i < segments.length; i++) {
|
|
307
|
-
// biome-ignore lint/style/noNonNullAssertion: we know the segment is not null
|
|
308
|
-
const segment = segments[i]!;
|
|
309
|
-
const segmentStartTime = segment.cts;
|
|
310
|
-
const segmentEndTime = segment.cts + segment.duration;
|
|
311
|
-
|
|
312
|
-
let distance: number;
|
|
313
|
-
if (scaledSeekTime < segmentStartTime) {
|
|
314
|
-
// Time is before this segment
|
|
315
|
-
distance = segmentStartTime - scaledSeekTime;
|
|
316
|
-
} else if (scaledSeekTime >= segmentEndTime) {
|
|
317
|
-
// Time is after this segment
|
|
318
|
-
distance = scaledSeekTime - segmentEndTime;
|
|
319
|
-
} else {
|
|
320
|
-
// Time is within this segment (should have been caught above, but just in case)
|
|
321
|
-
return i;
|
|
322
|
-
}
|
|
323
|
-
|
|
324
|
-
if (distance < nearestDistance) {
|
|
325
|
-
nearestDistance = distance;
|
|
326
|
-
nearestSegmentIndex = i;
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
return nearestSegmentIndex;
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
getScrubVideoRendition(): VideoRendition | undefined {
|
|
334
|
-
// AssetMediaEngine does not have a dedicated scrub track
|
|
335
|
-
return undefined;
|
|
336
|
-
}
|
|
337
|
-
|
|
338
|
-
/**
|
|
339
|
-
* Get preferred buffer configuration for this media engine
|
|
340
|
-
* AssetMediaEngine uses lower buffering since segments are already optimized
|
|
341
|
-
*/
|
|
342
|
-
getBufferConfig() {
|
|
343
|
-
return {
|
|
344
|
-
// Buffer just 1 segment ahead (~2 seconds) for assets
|
|
345
|
-
videoBufferDurationMs: 2000,
|
|
346
|
-
audioBufferDurationMs: 2000,
|
|
347
|
-
maxVideoBufferFetches: 1,
|
|
348
|
-
maxAudioBufferFetches: 1,
|
|
349
|
-
bufferThresholdMs: 30000, // Timeline-aware buffering threshold
|
|
350
|
-
};
|
|
351
|
-
}
|
|
352
|
-
|
|
353
|
-
// AssetMediaEngine inherits the default extractThumbnails from BaseMediaEngine
|
|
354
|
-
// which provides a clear warning that this engine type is not supported
|
|
355
|
-
|
|
356
|
-
convertToSegmentRelativeTimestamps(
|
|
357
|
-
globalTimestamps: number[],
|
|
358
|
-
segmentId: number,
|
|
359
|
-
rendition: VideoRendition,
|
|
360
|
-
): number[] {
|
|
361
|
-
{
|
|
362
|
-
// Asset: MediaBunny expects segment-relative timestamps in seconds
|
|
363
|
-
// This is because Asset segments are independent timeline fragments
|
|
364
|
-
|
|
365
|
-
if (!rendition.trackId) {
|
|
366
|
-
throw new Error("Track ID is required for asset metadata");
|
|
367
|
-
}
|
|
368
|
-
// For AssetMediaEngine, we need to calculate the actual segment start time
|
|
369
|
-
// using the precise segment boundaries from the track fragment index
|
|
370
|
-
const trackData = this.data[rendition.trackId];
|
|
371
|
-
if (!trackData) {
|
|
372
|
-
throw new Error("Track not found");
|
|
373
|
-
}
|
|
374
|
-
const segment = trackData.segments?.[segmentId];
|
|
375
|
-
if (!segment) {
|
|
376
|
-
throw new Error("Segment not found");
|
|
377
|
-
}
|
|
378
|
-
const segmentStartMs = (segment.cts / trackData.timescale) * 1000;
|
|
379
|
-
|
|
380
|
-
return globalTimestamps.map(
|
|
381
|
-
(globalMs) => (globalMs - segmentStartMs) / 1000,
|
|
382
|
-
);
|
|
383
|
-
}
|
|
384
|
-
}
|
|
385
|
-
}
|