@editframe/elements 0.26.2-beta.0 → 0.26.4-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/elements/EFTimegroup.js +7 -2
- package/dist/elements/EFTimegroup.js.map +1 -1
- package/package.json +2 -2
- package/scripts/build-css.js +3 -3
- package/tsdown.config.ts +1 -1
- package/types.json +1 -1
- package/src/elements/ContextProxiesController.ts +0 -124
- package/src/elements/CrossUpdateController.ts +0 -22
- package/src/elements/EFAudio.browsertest.ts +0 -706
- package/src/elements/EFAudio.ts +0 -56
- package/src/elements/EFCaptions.browsertest.ts +0 -1960
- package/src/elements/EFCaptions.ts +0 -823
- package/src/elements/EFImage.browsertest.ts +0 -120
- package/src/elements/EFImage.ts +0 -113
- package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +0 -224
- package/src/elements/EFMedia/AssetIdMediaEngine.ts +0 -110
- package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +0 -140
- package/src/elements/EFMedia/AssetMediaEngine.ts +0 -385
- package/src/elements/EFMedia/BaseMediaEngine.browsertest.ts +0 -400
- package/src/elements/EFMedia/BaseMediaEngine.ts +0 -505
- package/src/elements/EFMedia/BufferedSeekingInput.browsertest.ts +0 -386
- package/src/elements/EFMedia/BufferedSeekingInput.ts +0 -430
- package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +0 -226
- package/src/elements/EFMedia/JitMediaEngine.ts +0 -256
- package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.ts +0 -679
- package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +0 -117
- package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +0 -246
- package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.ts +0 -59
- package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +0 -27
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.ts +0 -55
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +0 -53
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +0 -207
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.ts +0 -72
- package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +0 -32
- package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +0 -29
- package/src/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.ts +0 -95
- package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +0 -184
- package/src/elements/EFMedia/shared/AudioSpanUtils.ts +0 -129
- package/src/elements/EFMedia/shared/BufferUtils.ts +0 -342
- package/src/elements/EFMedia/shared/GlobalInputCache.ts +0 -77
- package/src/elements/EFMedia/shared/MediaTaskUtils.ts +0 -44
- package/src/elements/EFMedia/shared/PrecisionUtils.ts +0 -46
- package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +0 -246
- package/src/elements/EFMedia/shared/RenditionHelpers.ts +0 -56
- package/src/elements/EFMedia/shared/ThumbnailExtractor.ts +0 -227
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.browsertest.ts +0 -167
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +0 -88
- package/src/elements/EFMedia/videoTasks/MainVideoInputCache.ts +0 -76
- package/src/elements/EFMedia/videoTasks/ScrubInputCache.ts +0 -61
- package/src/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.ts +0 -114
- package/src/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.ts +0 -35
- package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +0 -52
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.ts +0 -124
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.ts +0 -44
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.ts +0 -32
- package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +0 -370
- package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +0 -109
- package/src/elements/EFMedia.browsertest.ts +0 -872
- package/src/elements/EFMedia.ts +0 -341
- package/src/elements/EFSourceMixin.ts +0 -60
- package/src/elements/EFSurface.browsertest.ts +0 -151
- package/src/elements/EFSurface.ts +0 -142
- package/src/elements/EFTemporal.browsertest.ts +0 -215
- package/src/elements/EFTemporal.ts +0 -800
- package/src/elements/EFThumbnailStrip.browsertest.ts +0 -585
- package/src/elements/EFThumbnailStrip.media-engine.browsertest.ts +0 -714
- package/src/elements/EFThumbnailStrip.ts +0 -906
- package/src/elements/EFTimegroup.browsertest.ts +0 -870
- package/src/elements/EFTimegroup.ts +0 -878
- package/src/elements/EFVideo.browsertest.ts +0 -1482
- package/src/elements/EFVideo.ts +0 -564
- package/src/elements/EFWaveform.ts +0 -547
- package/src/elements/FetchContext.browsertest.ts +0 -401
- package/src/elements/FetchMixin.ts +0 -38
- package/src/elements/SampleBuffer.ts +0 -94
- package/src/elements/TargetController.browsertest.ts +0 -230
- package/src/elements/TargetController.ts +0 -224
- package/src/elements/TimegroupController.ts +0 -26
- package/src/elements/durationConverter.ts +0 -35
- package/src/elements/parseTimeToMs.ts +0 -9
- package/src/elements/printTaskStatus.ts +0 -16
- package/src/elements/renderTemporalAudio.ts +0 -108
- package/src/elements/updateAnimations.browsertest.ts +0 -1884
- package/src/elements/updateAnimations.ts +0 -217
- package/src/elements/util.ts +0 -24
- package/src/gui/ContextMixin.browsertest.ts +0 -860
- package/src/gui/ContextMixin.ts +0 -562
- package/src/gui/Controllable.browsertest.ts +0 -258
- package/src/gui/Controllable.ts +0 -41
- package/src/gui/EFConfiguration.ts +0 -40
- package/src/gui/EFControls.browsertest.ts +0 -389
- package/src/gui/EFControls.ts +0 -195
- package/src/gui/EFDial.browsertest.ts +0 -84
- package/src/gui/EFDial.ts +0 -172
- package/src/gui/EFFilmstrip.browsertest.ts +0 -712
- package/src/gui/EFFilmstrip.ts +0 -1349
- package/src/gui/EFFitScale.ts +0 -152
- package/src/gui/EFFocusOverlay.ts +0 -79
- package/src/gui/EFPause.browsertest.ts +0 -202
- package/src/gui/EFPause.ts +0 -73
- package/src/gui/EFPlay.browsertest.ts +0 -202
- package/src/gui/EFPlay.ts +0 -73
- package/src/gui/EFPreview.ts +0 -74
- package/src/gui/EFResizableBox.browsertest.ts +0 -79
- package/src/gui/EFResizableBox.ts +0 -898
- package/src/gui/EFScrubber.ts +0 -151
- package/src/gui/EFTimeDisplay.browsertest.ts +0 -237
- package/src/gui/EFTimeDisplay.ts +0 -55
- package/src/gui/EFToggleLoop.ts +0 -35
- package/src/gui/EFTogglePlay.ts +0 -70
- package/src/gui/EFWorkbench.ts +0 -115
- package/src/gui/PlaybackController.ts +0 -527
- package/src/gui/TWMixin.css +0 -6
- package/src/gui/TWMixin.ts +0 -61
- package/src/gui/TargetOrContextMixin.ts +0 -185
- package/src/gui/currentTimeContext.ts +0 -5
- package/src/gui/durationContext.ts +0 -3
- package/src/gui/efContext.ts +0 -6
- package/src/gui/fetchContext.ts +0 -5
- package/src/gui/focusContext.ts +0 -7
- package/src/gui/focusedElementContext.ts +0 -5
- package/src/gui/playingContext.ts +0 -5
- package/src/otel/BridgeSpanExporter.ts +0 -150
- package/src/otel/setupBrowserTracing.ts +0 -73
- package/src/otel/tracingHelpers.ts +0 -251
- package/src/transcoding/cache/RequestDeduplicator.test.ts +0 -170
- package/src/transcoding/cache/RequestDeduplicator.ts +0 -65
- package/src/transcoding/cache/URLTokenDeduplicator.test.ts +0 -182
- package/src/transcoding/cache/URLTokenDeduplicator.ts +0 -101
- package/src/transcoding/types/index.ts +0 -312
- package/src/transcoding/utils/MediaUtils.ts +0 -63
- package/src/transcoding/utils/UrlGenerator.ts +0 -68
- package/src/transcoding/utils/constants.ts +0 -36
- package/src/utils/LRUCache.test.ts +0 -274
- package/src/utils/LRUCache.ts +0 -696
|
@@ -1,872 +0,0 @@
|
|
|
1
|
-
import { css } from "lit";
|
|
2
|
-
import { customElement } from "lit/decorators.js";
|
|
3
|
-
import { afterEach, beforeEach, describe, vi } from "vitest";
|
|
4
|
-
import { test as baseTest } from "../../test/useMSW.js";
|
|
5
|
-
|
|
6
|
-
import type { EFConfiguration } from "../gui/EFConfiguration.js";
|
|
7
|
-
import "../gui/EFPreview.js";
|
|
8
|
-
import "../gui/EFWorkbench.js";
|
|
9
|
-
import { JitMediaEngine } from "./EFMedia/JitMediaEngine.js";
|
|
10
|
-
import { EFMedia } from "./EFMedia.js";
|
|
11
|
-
import "./EFTimegroup.js";
|
|
12
|
-
import type { EFTimegroup } from "./EFTimegroup.js";
|
|
13
|
-
import "./EFVideo.js";
|
|
14
|
-
import type { EFPreview } from "../gui/EFPreview.js";
|
|
15
|
-
import { UrlGenerator } from "../transcoding/utils/UrlGenerator.js";
|
|
16
|
-
import { AssetMediaEngine } from "./EFMedia/AssetMediaEngine.js";
|
|
17
|
-
import type { EFVideo } from "./EFVideo.js";
|
|
18
|
-
|
|
19
|
-
@customElement("test-media")
|
|
20
|
-
class TestMedia extends EFMedia {
|
|
21
|
-
static styles = [
|
|
22
|
-
...EFMedia.styles,
|
|
23
|
-
css`
|
|
24
|
-
:host {
|
|
25
|
-
display: block;
|
|
26
|
-
width: 100%;
|
|
27
|
-
height: 100%;
|
|
28
|
-
}
|
|
29
|
-
video {
|
|
30
|
-
width: 100%;
|
|
31
|
-
height: 100%;
|
|
32
|
-
}
|
|
33
|
-
`,
|
|
34
|
-
];
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
declare global {
|
|
38
|
-
interface HTMLElementTagNameMap {
|
|
39
|
-
"test-media": TestMedia;
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
const test = baseTest.extend<{
|
|
44
|
-
timegroup: EFTimegroup;
|
|
45
|
-
preview: EFPreview;
|
|
46
|
-
jitVideo: EFVideo;
|
|
47
|
-
configuration: EFConfiguration;
|
|
48
|
-
urlGenerator: UrlGenerator;
|
|
49
|
-
host: EFVideo;
|
|
50
|
-
}>({
|
|
51
|
-
preview: async ({}, use) => {
|
|
52
|
-
const preview = document.createElement("ef-preview");
|
|
53
|
-
await use(preview);
|
|
54
|
-
},
|
|
55
|
-
timegroup: async ({}, use) => {
|
|
56
|
-
const timegroup = document.createElement("ef-timegroup");
|
|
57
|
-
timegroup.setAttribute("mode", "contain");
|
|
58
|
-
await use(timegroup);
|
|
59
|
-
},
|
|
60
|
-
configuration: async ({ expect }, use) => {
|
|
61
|
-
const configuration = document.createElement("ef-configuration");
|
|
62
|
-
configuration.innerHTML = `<h1 style="font: 10px monospace">${expect.getState().currentTestName}</h1>`;
|
|
63
|
-
// Use integrated proxy server (same host/port as test runner)
|
|
64
|
-
const apiHost = `${window.location.protocol}//${window.location.host}`;
|
|
65
|
-
configuration.setAttribute("api-host", apiHost);
|
|
66
|
-
configuration.apiHost = apiHost;
|
|
67
|
-
configuration.signingURL = "";
|
|
68
|
-
document.body.appendChild(configuration);
|
|
69
|
-
await use(configuration);
|
|
70
|
-
},
|
|
71
|
-
urlGenerator: async ({}, use) => {
|
|
72
|
-
// UrlGenerator points to integrated proxy server (same host/port as test runner)
|
|
73
|
-
const apiHost = `${window.location.protocol}//${window.location.host}`;
|
|
74
|
-
const generator = new UrlGenerator(() => apiHost);
|
|
75
|
-
await use(generator);
|
|
76
|
-
},
|
|
77
|
-
host: async ({ configuration }, use) => {
|
|
78
|
-
const host = document.createElement("ef-video");
|
|
79
|
-
configuration.appendChild(host);
|
|
80
|
-
host.src = "http://web:3000/head-moov-480p.mp4";
|
|
81
|
-
await use(host);
|
|
82
|
-
},
|
|
83
|
-
jitVideo: async ({ configuration, timegroup, host, preview }, use) => {
|
|
84
|
-
timegroup.append(host);
|
|
85
|
-
configuration.append(preview);
|
|
86
|
-
preview.append(timegroup);
|
|
87
|
-
await host.mediaEngineTask.run();
|
|
88
|
-
await use(host);
|
|
89
|
-
},
|
|
90
|
-
});
|
|
91
|
-
|
|
92
|
-
describe("JIT Media Engine", () => {
|
|
93
|
-
test("initializes JitMediaEngine", async ({ jitVideo, expect }) => {
|
|
94
|
-
const mediaEngine = jitVideo.mediaEngineTask.value;
|
|
95
|
-
expect(mediaEngine).toBeInstanceOf(JitMediaEngine);
|
|
96
|
-
});
|
|
97
|
-
|
|
98
|
-
test("loads media duration", async ({ jitVideo, expect }) => {
|
|
99
|
-
expect(jitVideo.intrinsicDurationMs).toBe(10_000);
|
|
100
|
-
});
|
|
101
|
-
|
|
102
|
-
describe("video seek on load", () => {
|
|
103
|
-
test("seeks to time specified on element", async ({
|
|
104
|
-
timegroup,
|
|
105
|
-
jitVideo,
|
|
106
|
-
expect,
|
|
107
|
-
}) => {
|
|
108
|
-
await timegroup.seek(2200);
|
|
109
|
-
const sample = jitVideo.unifiedVideoSeekTask.value;
|
|
110
|
-
expect(sample?.timestamp).toBeCloseTo(2.2, 1);
|
|
111
|
-
});
|
|
112
|
-
});
|
|
113
|
-
|
|
114
|
-
describe("video seeking", () => {
|
|
115
|
-
test("seeks to 0 seconds and loads first frame", async ({
|
|
116
|
-
timegroup,
|
|
117
|
-
jitVideo,
|
|
118
|
-
expect,
|
|
119
|
-
}) => {
|
|
120
|
-
// Debug: Check what segment should be loaded for 0ms
|
|
121
|
-
const mediaEngine = await (jitVideo as any).mediaEngineTask.taskComplete;
|
|
122
|
-
const videoRendition = mediaEngine?.getVideoRendition();
|
|
123
|
-
const expectedSegmentId = mediaEngine?.computeSegmentId(
|
|
124
|
-
0,
|
|
125
|
-
videoRendition,
|
|
126
|
-
);
|
|
127
|
-
console.log(`MediaEngine.computeSegmentId(0ms) = ${expectedSegmentId}`);
|
|
128
|
-
|
|
129
|
-
timegroup.currentTimeMs = 0;
|
|
130
|
-
await timegroup.seekTask.taskComplete;
|
|
131
|
-
|
|
132
|
-
// Check what segment actually got loaded
|
|
133
|
-
const actualSegmentId = (jitVideo as any).unifiedVideoSeekTask.value;
|
|
134
|
-
console.log(`videoSegmentIdTask.value = ${actualSegmentId}`);
|
|
135
|
-
|
|
136
|
-
const frame = await (jitVideo as any).unifiedVideoSeekTask.taskComplete;
|
|
137
|
-
console.log(`Frame timestamp when seeking to 0ms: ${frame?.timestamp}`);
|
|
138
|
-
|
|
139
|
-
expect(frame).toBeDefined();
|
|
140
|
-
expect(frame?.timestamp).toEqual(0);
|
|
141
|
-
});
|
|
142
|
-
|
|
143
|
-
test("seeks to 3 seconds and loads frame", async ({
|
|
144
|
-
timegroup,
|
|
145
|
-
jitVideo,
|
|
146
|
-
expect,
|
|
147
|
-
}) => {
|
|
148
|
-
await timegroup.waitForMediaDurations();
|
|
149
|
-
await timegroup.seek(3000);
|
|
150
|
-
const frame = jitVideo.unifiedVideoSeekTask.value;
|
|
151
|
-
expect(frame?.timestamp).toBeCloseTo(3, 1);
|
|
152
|
-
});
|
|
153
|
-
|
|
154
|
-
test("seeks to 5 seconds and loads frame", async ({
|
|
155
|
-
timegroup,
|
|
156
|
-
jitVideo,
|
|
157
|
-
expect,
|
|
158
|
-
}) => {
|
|
159
|
-
await timegroup.waitForMediaDurations();
|
|
160
|
-
await timegroup.seek(5000);
|
|
161
|
-
const frame = jitVideo.unifiedVideoSeekTask.value;
|
|
162
|
-
expect(frame?.timestamp).toBeCloseTo(5, 1);
|
|
163
|
-
});
|
|
164
|
-
|
|
165
|
-
test("seeks ahead in increments", async ({
|
|
166
|
-
timegroup,
|
|
167
|
-
jitVideo,
|
|
168
|
-
expect,
|
|
169
|
-
}) => {
|
|
170
|
-
await timegroup.waitForMediaDurations();
|
|
171
|
-
|
|
172
|
-
// Test seeking in larger increments to avoid CI timeouts
|
|
173
|
-
// while still validating incremental seeking works
|
|
174
|
-
const testPoints = [0, 500, 1000, 1500, 2000, 2500, 3000];
|
|
175
|
-
|
|
176
|
-
for (const timeMs of testPoints) {
|
|
177
|
-
await timegroup.seek(timeMs);
|
|
178
|
-
const frame = jitVideo.unifiedVideoSeekTask.value;
|
|
179
|
-
expect(frame).toBeDefined();
|
|
180
|
-
expect(frame?.timestamp).toBeCloseTo(timeMs / 1000, 1);
|
|
181
|
-
}
|
|
182
|
-
});
|
|
183
|
-
});
|
|
184
|
-
|
|
185
|
-
describe("boundary seeking", () => {
|
|
186
|
-
test.skip("segment 2 track range and segment 3 track range have no gap between them", async ({
|
|
187
|
-
expect,
|
|
188
|
-
jitVideo,
|
|
189
|
-
timegroup,
|
|
190
|
-
}) => {
|
|
191
|
-
// SKIP: audioSeekTask is not part of the audio rendering pipeline
|
|
192
|
-
await timegroup.waitForMediaDurations();
|
|
193
|
-
timegroup.currentTimeMs = 1000;
|
|
194
|
-
await timegroup.frameTask.taskComplete;
|
|
195
|
-
|
|
196
|
-
timegroup.currentTimeMs = 2026.6666666666663;
|
|
197
|
-
await timegroup.frameTask.taskComplete;
|
|
198
|
-
const sample = await jitVideo.unifiedVideoSeekTask.taskComplete;
|
|
199
|
-
expect(sample?.timestamp).toBeCloseTo(2, 1);
|
|
200
|
-
});
|
|
201
|
-
|
|
202
|
-
test("Can seek audio to 4025.0000000000005ms in head-moov-480p.mp4", async ({
|
|
203
|
-
expect,
|
|
204
|
-
jitVideo,
|
|
205
|
-
timegroup,
|
|
206
|
-
}) => {
|
|
207
|
-
await timegroup.waitForMediaDurations();
|
|
208
|
-
timegroup.currentTimeMs = 2026.6666666666663;
|
|
209
|
-
await expect(
|
|
210
|
-
jitVideo.audioSeekTask.taskComplete,
|
|
211
|
-
).resolves.to.not.toThrowError();
|
|
212
|
-
});
|
|
213
|
-
|
|
214
|
-
test("can seek audio to 4050ms in head-moov-480p.mp4", async ({
|
|
215
|
-
expect,
|
|
216
|
-
jitVideo,
|
|
217
|
-
timegroup,
|
|
218
|
-
}) => {
|
|
219
|
-
timegroup.currentTimeMs = 4050;
|
|
220
|
-
jitVideo.desiredSeekTimeMs = 4050;
|
|
221
|
-
await expect(
|
|
222
|
-
jitVideo.audioSeekTask.taskComplete,
|
|
223
|
-
).resolves.to.not.toThrowError();
|
|
224
|
-
});
|
|
225
|
-
|
|
226
|
-
// test.only("computes correct audio segment id for 4025.0000000000005ms", async ({ expect, jitVideo, timegroup }) => {
|
|
227
|
-
// timegroup.currentTimeMs = 4025.0000000000005;
|
|
228
|
-
// await expect(jitVideo.audioSegmentIdTask.taskComplete).resolves.toBe(2);
|
|
229
|
-
// });
|
|
230
|
-
});
|
|
231
|
-
});
|
|
232
|
-
|
|
233
|
-
describe("Media Engine Selection", () => {
|
|
234
|
-
const remoteSrc = "http://web:3000/head-moov-480p.mp4";
|
|
235
|
-
const localSrc = "10s-bars.mp4";
|
|
236
|
-
|
|
237
|
-
test("defaults to JitMediaEngine for remote URLs without a configuration element", async ({
|
|
238
|
-
expect,
|
|
239
|
-
}) => {
|
|
240
|
-
const video = document.createElement("ef-video");
|
|
241
|
-
video.src = remoteSrc;
|
|
242
|
-
document.body.appendChild(video);
|
|
243
|
-
await video.mediaEngineTask.run();
|
|
244
|
-
expect(video.mediaEngineTask.value).toBeInstanceOf(JitMediaEngine);
|
|
245
|
-
video.remove();
|
|
246
|
-
});
|
|
247
|
-
|
|
248
|
-
test("uses JitMediaEngine for remote URLs when wrapped in a default configuration", async ({
|
|
249
|
-
configuration,
|
|
250
|
-
expect,
|
|
251
|
-
}) => {
|
|
252
|
-
const video = document.createElement("ef-video");
|
|
253
|
-
video.src = remoteSrc;
|
|
254
|
-
configuration.appendChild(video); // Fixture `configuration` is already on the page.
|
|
255
|
-
await video.mediaEngineTask.run();
|
|
256
|
-
expect(video.mediaEngineTask.value).toBeInstanceOf(JitMediaEngine);
|
|
257
|
-
video.remove();
|
|
258
|
-
});
|
|
259
|
-
|
|
260
|
-
test("uses JitMediaEngine for remote URLs when configured with media-engine='cloud'", async ({
|
|
261
|
-
configuration,
|
|
262
|
-
expect,
|
|
263
|
-
}) => {
|
|
264
|
-
configuration.setAttribute("media-engine", "cloud");
|
|
265
|
-
const video = document.createElement("ef-video");
|
|
266
|
-
video.src = remoteSrc;
|
|
267
|
-
configuration.appendChild(video);
|
|
268
|
-
await video.mediaEngineTask.run();
|
|
269
|
-
expect(video.mediaEngineTask.value).toBeInstanceOf(JitMediaEngine);
|
|
270
|
-
video.remove();
|
|
271
|
-
});
|
|
272
|
-
|
|
273
|
-
// Note: media-engine='local' with remote URLs is not supported
|
|
274
|
-
// AssetMediaEngine is designed for local files and track fragment indexes only
|
|
275
|
-
|
|
276
|
-
test("always uses AssetMediaEngine for local src paths", async ({
|
|
277
|
-
configuration,
|
|
278
|
-
expect,
|
|
279
|
-
}) => {
|
|
280
|
-
configuration.setAttribute("media-engine", "cloud"); // Explicitly set to cloud
|
|
281
|
-
const video = document.createElement("ef-video");
|
|
282
|
-
video.src = localSrc;
|
|
283
|
-
configuration.appendChild(video);
|
|
284
|
-
await video.mediaEngineTask.run();
|
|
285
|
-
expect(video.mediaEngineTask.value).toBeInstanceOf(AssetMediaEngine);
|
|
286
|
-
video.remove();
|
|
287
|
-
});
|
|
288
|
-
});
|
|
289
|
-
|
|
290
|
-
describe("EFMedia", () => {
|
|
291
|
-
beforeEach(() => {
|
|
292
|
-
// Clean up DOM
|
|
293
|
-
while (document.body.children.length) {
|
|
294
|
-
document.body.children[0]?.remove();
|
|
295
|
-
}
|
|
296
|
-
});
|
|
297
|
-
|
|
298
|
-
afterEach(() => {
|
|
299
|
-
// Clean up any remaining elements
|
|
300
|
-
const elements = document.querySelectorAll("test-media");
|
|
301
|
-
for (const element of elements) {
|
|
302
|
-
element.remove();
|
|
303
|
-
}
|
|
304
|
-
});
|
|
305
|
-
|
|
306
|
-
const test = baseTest.extend<{
|
|
307
|
-
element: TestMedia;
|
|
308
|
-
}>({
|
|
309
|
-
element: async ({}, use) => {
|
|
310
|
-
const element = document.createElement("test-media");
|
|
311
|
-
document.body.appendChild(element);
|
|
312
|
-
await use(element);
|
|
313
|
-
element.remove();
|
|
314
|
-
},
|
|
315
|
-
});
|
|
316
|
-
|
|
317
|
-
test("should be defined", ({ element, expect }) => {
|
|
318
|
-
expect(element.tagName).toBe("TEST-MEDIA");
|
|
319
|
-
});
|
|
320
|
-
|
|
321
|
-
describe("mute", () => {
|
|
322
|
-
test("defaults to false", ({ element, expect }) => {
|
|
323
|
-
expect(element.mute).toBe(false);
|
|
324
|
-
});
|
|
325
|
-
|
|
326
|
-
test("reads from js property", ({ element, expect }) => {
|
|
327
|
-
element.mute = true;
|
|
328
|
-
expect(element.mute).toBe(true);
|
|
329
|
-
});
|
|
330
|
-
|
|
331
|
-
test("reads from dom attribute", ({ element, expect }) => {
|
|
332
|
-
element.setAttribute("mute", "true");
|
|
333
|
-
expect(element.mute).toBe(true);
|
|
334
|
-
});
|
|
335
|
-
|
|
336
|
-
test("handles any attribute value as true (standard boolean behavior)", ({
|
|
337
|
-
element,
|
|
338
|
-
expect,
|
|
339
|
-
}) => {
|
|
340
|
-
element.setAttribute("mute", "false");
|
|
341
|
-
expect(element.mute).toBe(true); // Standard boolean attributes: any value = true
|
|
342
|
-
});
|
|
343
|
-
|
|
344
|
-
test("reflects property changes to attribute", async ({
|
|
345
|
-
element,
|
|
346
|
-
expect,
|
|
347
|
-
}) => {
|
|
348
|
-
element.mute = true;
|
|
349
|
-
await element.updateComplete; // Wait for Lit to update
|
|
350
|
-
expect(element.hasAttribute("mute")).toBe(true);
|
|
351
|
-
expect(element.getAttribute("mute")).toBe(""); // Standard boolean reflection
|
|
352
|
-
|
|
353
|
-
element.mute = false;
|
|
354
|
-
await element.updateComplete; // Wait for Lit to update
|
|
355
|
-
expect(element.hasAttribute("mute")).toBe(false); // Standard boolean reflection removes attribute
|
|
356
|
-
});
|
|
357
|
-
|
|
358
|
-
describe("audio rendering", () => {
|
|
359
|
-
// Create a separate test context for audio rendering tests that need configuration
|
|
360
|
-
const audioTest = baseTest.extend<{
|
|
361
|
-
timegroup: EFTimegroup;
|
|
362
|
-
configuration: EFConfiguration;
|
|
363
|
-
}>({
|
|
364
|
-
timegroup: async ({}, use) => {
|
|
365
|
-
const timegroup = document.createElement("ef-timegroup");
|
|
366
|
-
timegroup.setAttribute("mode", "contain");
|
|
367
|
-
await use(timegroup);
|
|
368
|
-
},
|
|
369
|
-
configuration: async ({ expect }, use) => {
|
|
370
|
-
const configuration = document.createElement("ef-configuration");
|
|
371
|
-
configuration.innerHTML = `<h1 style="font: 10px monospace">${expect.getState().currentTestName}</h1>`;
|
|
372
|
-
// Use integrated proxy server (same host/port as test runner)
|
|
373
|
-
const apiHost = `${window.location.protocol}//${window.location.host}`;
|
|
374
|
-
configuration.setAttribute("api-host", apiHost);
|
|
375
|
-
configuration.apiHost = apiHost;
|
|
376
|
-
configuration.signingURL = ""; // Disable URL signing for tests
|
|
377
|
-
document.body.appendChild(configuration);
|
|
378
|
-
await use(configuration);
|
|
379
|
-
// configuration.remove();
|
|
380
|
-
},
|
|
381
|
-
});
|
|
382
|
-
|
|
383
|
-
audioTest(
|
|
384
|
-
"skips muted elements during audio rendering",
|
|
385
|
-
async ({ configuration, timegroup, expect }) => {
|
|
386
|
-
// Create a muted media element
|
|
387
|
-
const mutedElement = document.createElement("test-media");
|
|
388
|
-
mutedElement.src = "http://web:3000/head-moov-480p.mp4";
|
|
389
|
-
mutedElement.mute = true;
|
|
390
|
-
timegroup.append(mutedElement);
|
|
391
|
-
|
|
392
|
-
// Create an unmuted media element
|
|
393
|
-
const unmutedElement = document.createElement("test-media");
|
|
394
|
-
unmutedElement.src = "http://web:3000/head-moov-480p.mp4";
|
|
395
|
-
unmutedElement.mute = false;
|
|
396
|
-
timegroup.append(unmutedElement);
|
|
397
|
-
|
|
398
|
-
configuration.append(timegroup);
|
|
399
|
-
|
|
400
|
-
// Wait for media engines to initialize
|
|
401
|
-
await mutedElement.mediaEngineTask.run();
|
|
402
|
-
await unmutedElement.mediaEngineTask.run();
|
|
403
|
-
|
|
404
|
-
// Spy on fetchAudioSpanningTime to verify muted element is skipped
|
|
405
|
-
const mutedFetchSpy = vi.spyOn(
|
|
406
|
-
mutedElement,
|
|
407
|
-
"fetchAudioSpanningTime",
|
|
408
|
-
);
|
|
409
|
-
const unmutedFetchSpy = vi.spyOn(
|
|
410
|
-
unmutedElement,
|
|
411
|
-
"fetchAudioSpanningTime",
|
|
412
|
-
);
|
|
413
|
-
|
|
414
|
-
// Render a short audio segment
|
|
415
|
-
try {
|
|
416
|
-
await timegroup.renderAudio(0, 1000); // 1 second
|
|
417
|
-
} catch (error) {
|
|
418
|
-
// Audio rendering might fail in test environment, but we're testing the mute logic
|
|
419
|
-
console.log("Audio rendering failed (expected in test):", error);
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
// Verify muted element was skipped (no fetch calls)
|
|
423
|
-
expect(mutedFetchSpy).not.toHaveBeenCalled();
|
|
424
|
-
|
|
425
|
-
// Verify unmuted element was processed (would have fetch calls if audio succeeds)
|
|
426
|
-
// Note: In test environment, this might still be 0 due to audio context limitations
|
|
427
|
-
// but the important thing is that muted element definitely wasn't called
|
|
428
|
-
const mutedCalls = mutedFetchSpy.mock.calls.length;
|
|
429
|
-
const unmutedCalls = unmutedFetchSpy.mock.calls.length;
|
|
430
|
-
|
|
431
|
-
expect(mutedCalls).toBe(0);
|
|
432
|
-
// Unmuted element should either be called (audio works) or both fail equally
|
|
433
|
-
// The key test is that muted=0 and muted < unmuted (if audio works)
|
|
434
|
-
expect(mutedCalls).toBeLessThanOrEqual(unmutedCalls);
|
|
435
|
-
|
|
436
|
-
mutedFetchSpy.mockRestore();
|
|
437
|
-
unmutedFetchSpy.mockRestore();
|
|
438
|
-
},
|
|
439
|
-
);
|
|
440
|
-
|
|
441
|
-
audioTest(
|
|
442
|
-
"processes unmuted elements normally",
|
|
443
|
-
async ({ configuration, timegroup, expect }) => {
|
|
444
|
-
// Create an unmuted media element
|
|
445
|
-
const element = document.createElement("test-media");
|
|
446
|
-
element.src = "http://web:3000/head-moov-480p.mp4";
|
|
447
|
-
element.mute = false;
|
|
448
|
-
timegroup.append(element);
|
|
449
|
-
|
|
450
|
-
configuration.append(timegroup);
|
|
451
|
-
|
|
452
|
-
await element.mediaEngineTask.run();
|
|
453
|
-
|
|
454
|
-
const fetchSpy = vi.spyOn(element, "fetchAudioSpanningTime");
|
|
455
|
-
|
|
456
|
-
try {
|
|
457
|
-
await timegroup.renderAudio(0, 1000);
|
|
458
|
-
} catch (error) {
|
|
459
|
-
// Audio rendering might fail in test environment
|
|
460
|
-
console.log("Audio rendering failed (expected in test):", error);
|
|
461
|
-
}
|
|
462
|
-
|
|
463
|
-
// The element should not have been skipped due to mute
|
|
464
|
-
// (whether it actually gets called depends on test environment audio support)
|
|
465
|
-
expect(element.mute).toBe(false);
|
|
466
|
-
|
|
467
|
-
fetchSpy.mockRestore();
|
|
468
|
-
},
|
|
469
|
-
);
|
|
470
|
-
|
|
471
|
-
audioTest(
|
|
472
|
-
"handles dynamic mute changes",
|
|
473
|
-
async ({ configuration, timegroup, expect }) => {
|
|
474
|
-
const element = document.createElement("test-media");
|
|
475
|
-
element.src = "http://web:3000/head-moov-480p.mp4";
|
|
476
|
-
element.mute = false; // Start unmuted
|
|
477
|
-
timegroup.append(element);
|
|
478
|
-
|
|
479
|
-
configuration.append(timegroup);
|
|
480
|
-
|
|
481
|
-
await element.mediaEngineTask.run();
|
|
482
|
-
|
|
483
|
-
const fetchSpy = vi.spyOn(element, "fetchAudioSpanningTime");
|
|
484
|
-
|
|
485
|
-
// First render - unmuted
|
|
486
|
-
try {
|
|
487
|
-
await timegroup.renderAudio(0, 500);
|
|
488
|
-
} catch (error) {
|
|
489
|
-
console.log("Audio rendering failed (expected in test):", error);
|
|
490
|
-
}
|
|
491
|
-
|
|
492
|
-
const firstCallCount = fetchSpy.mock.calls.length;
|
|
493
|
-
|
|
494
|
-
// Mute the element
|
|
495
|
-
element.mute = true;
|
|
496
|
-
await element.updateComplete;
|
|
497
|
-
|
|
498
|
-
// Second render - muted (should be skipped)
|
|
499
|
-
try {
|
|
500
|
-
await timegroup.renderAudio(500, 1000);
|
|
501
|
-
} catch (error) {
|
|
502
|
-
console.log("Audio rendering failed (expected in test):", error);
|
|
503
|
-
}
|
|
504
|
-
|
|
505
|
-
const secondCallCount = fetchSpy.mock.calls.length;
|
|
506
|
-
|
|
507
|
-
// Verify no additional calls were made when muted
|
|
508
|
-
expect(secondCallCount).toBe(firstCallCount);
|
|
509
|
-
|
|
510
|
-
fetchSpy.mockRestore();
|
|
511
|
-
},
|
|
512
|
-
);
|
|
513
|
-
});
|
|
514
|
-
});
|
|
515
|
-
|
|
516
|
-
describe("audio analysis", () => {
|
|
517
|
-
const audioAnalysisTest = baseTest.extend<{
|
|
518
|
-
timegroup: EFTimegroup;
|
|
519
|
-
configuration: EFConfiguration;
|
|
520
|
-
}>({
|
|
521
|
-
timegroup: async ({}, use) => {
|
|
522
|
-
const timegroup = document.createElement("ef-timegroup");
|
|
523
|
-
timegroup.setAttribute("mode", "contain");
|
|
524
|
-
await use(timegroup);
|
|
525
|
-
},
|
|
526
|
-
configuration: async ({ expect }, use) => {
|
|
527
|
-
const configuration = document.createElement("ef-configuration");
|
|
528
|
-
configuration.innerHTML = `<h1 style="font: 10px monospace">${expect.getState().currentTestName}</h1>`;
|
|
529
|
-
// Use integrated proxy server (same host/port as test runner)
|
|
530
|
-
const apiHost = `${window.location.protocol}//${window.location.host}`;
|
|
531
|
-
configuration.setAttribute("api-host", apiHost);
|
|
532
|
-
configuration.apiHost = apiHost;
|
|
533
|
-
configuration.signingURL = ""; // Disable URL signing for tests
|
|
534
|
-
document.body.appendChild(configuration);
|
|
535
|
-
await use(configuration);
|
|
536
|
-
},
|
|
537
|
-
});
|
|
538
|
-
|
|
539
|
-
audioAnalysisTest(
|
|
540
|
-
"has time domain analysis task",
|
|
541
|
-
async ({ configuration, timegroup, expect }) => {
|
|
542
|
-
const element = document.createElement("test-media");
|
|
543
|
-
element.src = "http://web:3000/head-moov-480p.mp4";
|
|
544
|
-
timegroup.append(element);
|
|
545
|
-
configuration.append(timegroup);
|
|
546
|
-
|
|
547
|
-
await element.mediaEngineTask.run();
|
|
548
|
-
|
|
549
|
-
expect(element.byteTimeDomainTask).toBeDefined();
|
|
550
|
-
expect(typeof element.byteTimeDomainTask.taskComplete).toBe("object");
|
|
551
|
-
},
|
|
552
|
-
);
|
|
553
|
-
|
|
554
|
-
audioAnalysisTest(
|
|
555
|
-
"has frequency analysis task",
|
|
556
|
-
async ({ configuration, timegroup, expect }) => {
|
|
557
|
-
const element = document.createElement("test-media");
|
|
558
|
-
element.src = "http://web:3000/head-moov-480p.mp4";
|
|
559
|
-
timegroup.append(element);
|
|
560
|
-
configuration.append(timegroup);
|
|
561
|
-
|
|
562
|
-
await element.mediaEngineTask.run();
|
|
563
|
-
|
|
564
|
-
expect(element.frequencyDataTask).toBeDefined();
|
|
565
|
-
expect(typeof element.frequencyDataTask.taskComplete).toBe("object");
|
|
566
|
-
},
|
|
567
|
-
);
|
|
568
|
-
|
|
569
|
-
audioAnalysisTest(
|
|
570
|
-
"respects FFT configuration properties",
|
|
571
|
-
async ({ configuration, timegroup, expect }) => {
|
|
572
|
-
const element = document.createElement("test-media");
|
|
573
|
-
element.src = "http://web:3000/head-moov-480p.mp4";
|
|
574
|
-
element.fftSize = 256;
|
|
575
|
-
element.fftDecay = 4;
|
|
576
|
-
element.fftGain = 2.0;
|
|
577
|
-
element.interpolateFrequencies = true;
|
|
578
|
-
timegroup.append(element);
|
|
579
|
-
configuration.append(timegroup);
|
|
580
|
-
|
|
581
|
-
await element.mediaEngineTask.run();
|
|
582
|
-
|
|
583
|
-
expect(element.fftSize).toBe(256);
|
|
584
|
-
expect(element.fftDecay).toBe(4);
|
|
585
|
-
expect(element.fftGain).toBe(2.0);
|
|
586
|
-
expect(element.interpolateFrequencies).toBe(true);
|
|
587
|
-
expect(element.shouldInterpolateFrequencies).toBe(true);
|
|
588
|
-
},
|
|
589
|
-
);
|
|
590
|
-
|
|
591
|
-
audioAnalysisTest(
|
|
592
|
-
"generates FREQ_WEIGHTS based on fftSize",
|
|
593
|
-
async ({ configuration, timegroup, expect }) => {
|
|
594
|
-
const element = document.createElement("test-media");
|
|
595
|
-
element.src = "http://web:3000/head-moov-480p.mp4";
|
|
596
|
-
element.fftSize = 128;
|
|
597
|
-
timegroup.append(element);
|
|
598
|
-
configuration.append(timegroup);
|
|
599
|
-
|
|
600
|
-
await element.mediaEngineTask.run();
|
|
601
|
-
|
|
602
|
-
const weights = element.FREQ_WEIGHTS;
|
|
603
|
-
expect(weights).toBeInstanceOf(Float32Array);
|
|
604
|
-
expect(weights.length).toBe(element.fftSize / 2); // 64 for fftSize 128
|
|
605
|
-
|
|
606
|
-
// Test frequency weighting - lower frequencies should have lower weights
|
|
607
|
-
expect(weights.length).toBeGreaterThan(0);
|
|
608
|
-
const firstWeight = weights[0];
|
|
609
|
-
const lastWeight = weights[weights.length - 1];
|
|
610
|
-
expect(firstWeight).toBeDefined();
|
|
611
|
-
expect(lastWeight).toBeDefined();
|
|
612
|
-
expect(firstWeight!).toBeLessThan(lastWeight!);
|
|
613
|
-
},
|
|
614
|
-
);
|
|
615
|
-
});
|
|
616
|
-
|
|
617
|
-
describe("assetId", () => {
|
|
618
|
-
test("reads from js property", ({ element, expect }) => {
|
|
619
|
-
element.assetId = "test-asset-123";
|
|
620
|
-
expect(element.assetId).toBe("test-asset-123");
|
|
621
|
-
});
|
|
622
|
-
|
|
623
|
-
test("reads from dom attribute", ({ element, expect }) => {
|
|
624
|
-
element.setAttribute("asset-id", "test-asset-123");
|
|
625
|
-
expect(element.assetId).toBe("test-asset-123");
|
|
626
|
-
});
|
|
627
|
-
|
|
628
|
-
test("defaults to null", ({ element, expect }) => {
|
|
629
|
-
expect(element.assetId).toBe(null);
|
|
630
|
-
});
|
|
631
|
-
|
|
632
|
-
test("reflects property changes to attribute", async ({
|
|
633
|
-
element,
|
|
634
|
-
expect,
|
|
635
|
-
}) => {
|
|
636
|
-
element.assetId = "test-asset-456";
|
|
637
|
-
await element.updateComplete;
|
|
638
|
-
expect(element.getAttribute("asset-id")).toBe("test-asset-456");
|
|
639
|
-
|
|
640
|
-
element.assetId = null;
|
|
641
|
-
await element.updateComplete;
|
|
642
|
-
expect(element.hasAttribute("asset-id")).toBe(false);
|
|
643
|
-
});
|
|
644
|
-
|
|
645
|
-
test("reads assetId from html source", async ({ expect }) => {
|
|
646
|
-
const container = document.createElement("div");
|
|
647
|
-
container.innerHTML = `<test-media asset-id="test-asset-789"></test-media>`;
|
|
648
|
-
const media = container.querySelector("test-media") as TestMedia;
|
|
649
|
-
expect(media).toBeDefined();
|
|
650
|
-
expect(media.assetId).toBe("test-asset-789");
|
|
651
|
-
});
|
|
652
|
-
});
|
|
653
|
-
|
|
654
|
-
describe("fftSize", () => {
|
|
655
|
-
test("defaults to 128", ({ element, expect }) => {
|
|
656
|
-
expect(element.fftSize).toBe(128);
|
|
657
|
-
});
|
|
658
|
-
|
|
659
|
-
test("reads from js property", ({ element, expect }) => {
|
|
660
|
-
element.fftSize = 1024;
|
|
661
|
-
expect(element.fftSize).toBe(1024);
|
|
662
|
-
});
|
|
663
|
-
|
|
664
|
-
test("reads from dom attribute", ({ element, expect }) => {
|
|
665
|
-
element.setAttribute("fft-size", "1024");
|
|
666
|
-
expect(element.fftSize).toBe(1024);
|
|
667
|
-
});
|
|
668
|
-
|
|
669
|
-
test("reflects property changes to attribute", async ({
|
|
670
|
-
element,
|
|
671
|
-
expect,
|
|
672
|
-
}) => {
|
|
673
|
-
element.fftSize = 512;
|
|
674
|
-
await element.updateComplete;
|
|
675
|
-
expect(element.getAttribute("fft-size")).toBe("512");
|
|
676
|
-
});
|
|
677
|
-
});
|
|
678
|
-
|
|
679
|
-
describe("fftDecay", () => {
|
|
680
|
-
test("defaults to 8", ({ element, expect }) => {
|
|
681
|
-
expect(element.fftDecay).toBe(8);
|
|
682
|
-
});
|
|
683
|
-
|
|
684
|
-
test("reads from js property", ({ element, expect }) => {
|
|
685
|
-
element.fftDecay = 16;
|
|
686
|
-
expect(element.fftDecay).toBe(16);
|
|
687
|
-
});
|
|
688
|
-
|
|
689
|
-
test("reads from dom attribute", ({ element, expect }) => {
|
|
690
|
-
element.setAttribute("fft-decay", "16");
|
|
691
|
-
expect(element.fftDecay).toBe(16);
|
|
692
|
-
});
|
|
693
|
-
|
|
694
|
-
test("reflects property changes to attribute", async ({
|
|
695
|
-
element,
|
|
696
|
-
expect,
|
|
697
|
-
}) => {
|
|
698
|
-
element.fftDecay = 32;
|
|
699
|
-
await element.updateComplete;
|
|
700
|
-
expect(element.getAttribute("fft-decay")).toBe("32");
|
|
701
|
-
});
|
|
702
|
-
});
|
|
703
|
-
|
|
704
|
-
describe("fftGain", () => {
|
|
705
|
-
test("defaults to 3.0", ({ element, expect }) => {
|
|
706
|
-
expect(element.fftGain).toBe(3.0);
|
|
707
|
-
});
|
|
708
|
-
|
|
709
|
-
test("reads from js property", ({ element, expect }) => {
|
|
710
|
-
element.fftGain = 0.5;
|
|
711
|
-
expect(element.fftGain).toBe(0.5);
|
|
712
|
-
});
|
|
713
|
-
|
|
714
|
-
test("reads from dom attribute", ({ element, expect }) => {
|
|
715
|
-
element.setAttribute("fft-gain", "0.5");
|
|
716
|
-
expect(element.fftGain).toBe(0.5);
|
|
717
|
-
});
|
|
718
|
-
|
|
719
|
-
test("reflects property changes to attribute", async ({
|
|
720
|
-
element,
|
|
721
|
-
expect,
|
|
722
|
-
}) => {
|
|
723
|
-
element.fftGain = 2.5;
|
|
724
|
-
await element.updateComplete;
|
|
725
|
-
expect(element.getAttribute("fft-gain")).toBe("2.5");
|
|
726
|
-
});
|
|
727
|
-
});
|
|
728
|
-
|
|
729
|
-
describe("interpolateFrequencies", () => {
|
|
730
|
-
test("defaults to false", ({ element, expect }) => {
|
|
731
|
-
expect(element.interpolateFrequencies).toBe(false);
|
|
732
|
-
});
|
|
733
|
-
|
|
734
|
-
test("reads from js property", ({ element, expect }) => {
|
|
735
|
-
element.interpolateFrequencies = true;
|
|
736
|
-
expect(element.interpolateFrequencies).toBe(true);
|
|
737
|
-
});
|
|
738
|
-
|
|
739
|
-
test("reads from dom attribute", ({ element, expect }) => {
|
|
740
|
-
element.setAttribute("interpolate-frequencies", "true");
|
|
741
|
-
expect(element.interpolateFrequencies).toBe(true);
|
|
742
|
-
});
|
|
743
|
-
|
|
744
|
-
test("handles any attribute value as true (standard boolean behavior)", ({
|
|
745
|
-
element,
|
|
746
|
-
expect,
|
|
747
|
-
}) => {
|
|
748
|
-
element.setAttribute("interpolate-frequencies", "false");
|
|
749
|
-
expect(element.interpolateFrequencies).toBe(true); // Standard boolean attributes: any value = true
|
|
750
|
-
});
|
|
751
|
-
|
|
752
|
-
test("reflects property changes to attribute", async ({
|
|
753
|
-
element,
|
|
754
|
-
expect,
|
|
755
|
-
}) => {
|
|
756
|
-
element.interpolateFrequencies = true;
|
|
757
|
-
await element.updateComplete;
|
|
758
|
-
expect(element.hasAttribute("interpolate-frequencies")).toBe(true);
|
|
759
|
-
expect(element.getAttribute("interpolate-frequencies")).toBe(""); // Standard boolean reflection
|
|
760
|
-
|
|
761
|
-
element.interpolateFrequencies = false;
|
|
762
|
-
await element.updateComplete;
|
|
763
|
-
expect(element.hasAttribute("interpolate-frequencies")).toBe(false); // Standard boolean reflection removes attribute
|
|
764
|
-
});
|
|
765
|
-
});
|
|
766
|
-
|
|
767
|
-
// describe("mediaEngineTask", () => {
|
|
768
|
-
// test("is defined", ({ element, expect }) => {
|
|
769
|
-
// expect(element.mediaEngineTask).toBeDefined();
|
|
770
|
-
// });
|
|
771
|
-
|
|
772
|
-
// test("is a task", ({ element, expect }) => {
|
|
773
|
-
// expect(element.mediaEngineTask).toBeInstanceOf(Task);
|
|
774
|
-
// });
|
|
775
|
-
|
|
776
|
-
// test("throws if assetId is set", async ({ element, expect }) => {
|
|
777
|
-
// element.assetId = "test-asset-123";
|
|
778
|
-
// await element.mediaEngineTask.run();
|
|
779
|
-
// expect(element.mediaEngineTask.error).toBeInstanceOf(Error);
|
|
780
|
-
// });
|
|
781
|
-
|
|
782
|
-
// test("creates JitMediaEngine for http sources", async ({
|
|
783
|
-
// elementWithJitManifest,
|
|
784
|
-
// expect,
|
|
785
|
-
// worker,
|
|
786
|
-
// }) => {
|
|
787
|
-
// await elementWithJitManifest.mediaEngineTask.run();
|
|
788
|
-
// expect(elementWithJitManifest.mediaEngineTask.value).toBeInstanceOf(
|
|
789
|
-
// JitMediaEngine,
|
|
790
|
-
// );
|
|
791
|
-
// });
|
|
792
|
-
|
|
793
|
-
// test("creates AssetMediaEngine for local sources", async ({
|
|
794
|
-
// elementWithAsset,
|
|
795
|
-
// expect,
|
|
796
|
-
// }) => {
|
|
797
|
-
// await elementWithAsset.mediaEngineTask.run();
|
|
798
|
-
// expect(elementWithAsset.mediaEngineTask.value).toBeInstanceOf(
|
|
799
|
-
// AssetMediaEngine,
|
|
800
|
-
// );
|
|
801
|
-
// });
|
|
802
|
-
// });
|
|
803
|
-
|
|
804
|
-
// describe("Video Buffering Integration", () => {
|
|
805
|
-
// test("videoBufferTask is available and configured", ({
|
|
806
|
-
// element,
|
|
807
|
-
// expect,
|
|
808
|
-
// }) => {
|
|
809
|
-
// expect(element.videoBufferTask).toBeDefined();
|
|
810
|
-
// expect(element.videoBufferDurationMs).toBe(60000); // 60 seconds default
|
|
811
|
-
// expect(element.maxVideoBufferFetches).toBe(2); // 2 parallel fetches default
|
|
812
|
-
// expect(element.enableVideoBuffering).toBe(true); // enabled by default
|
|
813
|
-
// });
|
|
814
|
-
|
|
815
|
-
// test("buffer configuration can be customized", ({ element, expect }) => {
|
|
816
|
-
// element.videoBufferDurationMs = 45000;
|
|
817
|
-
// element.maxVideoBufferFetches = 3;
|
|
818
|
-
// element.enableVideoBuffering = false;
|
|
819
|
-
|
|
820
|
-
// expect(element.videoBufferDurationMs).toBe(45000);
|
|
821
|
-
// expect(element.maxVideoBufferFetches).toBe(3);
|
|
822
|
-
// expect(element.enableVideoBuffering).toBe(false);
|
|
823
|
-
// });
|
|
824
|
-
|
|
825
|
-
// test("buffer task starts automatically with JIT asset", async ({
|
|
826
|
-
// elementWithJitManifest,
|
|
827
|
-
// expect,
|
|
828
|
-
// }) => {
|
|
829
|
-
// const element = elementWithJitManifest;
|
|
830
|
-
|
|
831
|
-
// // Wait for media engine to initialize
|
|
832
|
-
// await element.mediaEngineTask.taskComplete;
|
|
833
|
-
|
|
834
|
-
// // Buffer task should be available and have started
|
|
835
|
-
// expect(element.videoBufferTask).toBeDefined();
|
|
836
|
-
// // Task status should be INITIAL (0) or higher, indicating it's been created
|
|
837
|
-
// expect(element.videoBufferTask.status).toBeGreaterThanOrEqual(0);
|
|
838
|
-
// });
|
|
839
|
-
// });
|
|
840
|
-
// });
|
|
841
|
-
|
|
842
|
-
// // Test to verify buffer tasks use EFMedia properties directly (no hardcoded config duplication)
|
|
843
|
-
// describe("Buffer Task Property Integration", () => {
|
|
844
|
-
// test("audio and video buffer tasks use EFMedia properties directly", async ({
|
|
845
|
-
// element,
|
|
846
|
-
// expect,
|
|
847
|
-
// }) => {
|
|
848
|
-
// // Set custom buffer configuration on the element
|
|
849
|
-
// element.audioBufferDurationMs = 15000;
|
|
850
|
-
// element.maxAudioBufferFetches = 3;
|
|
851
|
-
// element.enableAudioBuffering = false;
|
|
852
|
-
|
|
853
|
-
// element.videoBufferDurationMs = 45000;
|
|
854
|
-
// element.maxVideoBufferFetches = 5;
|
|
855
|
-
// element.enableVideoBuffering = false;
|
|
856
|
-
|
|
857
|
-
// // Verify the tasks are created without requiring hardcoded config
|
|
858
|
-
// expect(element.audioBufferTask).toBeDefined();
|
|
859
|
-
// expect(element.videoBufferTask).toBeDefined();
|
|
860
|
-
|
|
861
|
-
// // The task configuration should now come directly from element properties
|
|
862
|
-
// // This test ensures no hardcoded config duplication exists
|
|
863
|
-
// expect(element.audioBufferDurationMs).toBe(15000);
|
|
864
|
-
// expect(element.maxAudioBufferFetches).toBe(3);
|
|
865
|
-
// expect(element.enableAudioBuffering).toBe(false);
|
|
866
|
-
|
|
867
|
-
// expect(element.videoBufferDurationMs).toBe(45000);
|
|
868
|
-
// expect(element.maxVideoBufferFetches).toBe(5);
|
|
869
|
-
// expect(element.enableVideoBuffering).toBe(false);
|
|
870
|
-
// });
|
|
871
|
-
// });
|
|
872
|
-
});
|