@editframe/elements 0.20.4-beta.0 → 0.21.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DelayedLoadingState.js +0 -27
- package/dist/EF_FRAMEGEN.d.ts +5 -3
- package/dist/EF_FRAMEGEN.js +50 -11
- package/dist/_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js +7 -0
- package/dist/elements/ContextProxiesController.js +2 -22
- package/dist/elements/EFAudio.js +4 -8
- package/dist/elements/EFCaptions.js +59 -84
- package/dist/elements/EFImage.js +5 -6
- package/dist/elements/EFMedia/AssetIdMediaEngine.js +2 -4
- package/dist/elements/EFMedia/AssetMediaEngine.js +35 -30
- package/dist/elements/EFMedia/BaseMediaEngine.js +57 -73
- package/dist/elements/EFMedia/BufferedSeekingInput.js +134 -76
- package/dist/elements/EFMedia/JitMediaEngine.js +9 -19
- package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +3 -6
- package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +6 -5
- package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +1 -3
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +1 -1
- package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +1 -1
- package/dist/elements/EFMedia/shared/AudioSpanUtils.js +4 -16
- package/dist/elements/EFMedia/shared/BufferUtils.js +2 -15
- package/dist/elements/EFMedia/shared/GlobalInputCache.js +0 -24
- package/dist/elements/EFMedia/shared/PrecisionUtils.js +0 -21
- package/dist/elements/EFMedia/shared/ThumbnailExtractor.js +0 -17
- package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +1 -10
- package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.d.ts +29 -0
- package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.js +32 -0
- package/dist/elements/EFMedia/videoTasks/ScrubInputCache.js +1 -15
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.js +1 -7
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoInputTask.js +8 -5
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.js +12 -13
- package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.js +1 -1
- package/dist/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.js +134 -70
- package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +7 -11
- package/dist/elements/EFMedia.js +26 -24
- package/dist/elements/EFSourceMixin.js +5 -7
- package/dist/elements/EFSurface.js +6 -9
- package/dist/elements/EFTemporal.js +19 -37
- package/dist/elements/EFThumbnailStrip.js +16 -59
- package/dist/elements/EFTimegroup.js +95 -90
- package/dist/elements/EFVideo.d.ts +6 -2
- package/dist/elements/EFVideo.js +142 -107
- package/dist/elements/EFWaveform.js +18 -27
- package/dist/elements/SampleBuffer.js +2 -5
- package/dist/elements/TargetController.js +3 -3
- package/dist/elements/durationConverter.js +4 -4
- package/dist/elements/updateAnimations.js +14 -35
- package/dist/gui/ContextMixin.js +23 -52
- package/dist/gui/EFConfiguration.js +7 -7
- package/dist/gui/EFControls.js +5 -5
- package/dist/gui/EFFilmstrip.js +77 -98
- package/dist/gui/EFFitScale.js +5 -6
- package/dist/gui/EFFocusOverlay.js +4 -4
- package/dist/gui/EFPreview.js +4 -4
- package/dist/gui/EFScrubber.js +9 -9
- package/dist/gui/EFTimeDisplay.js +5 -5
- package/dist/gui/EFToggleLoop.js +4 -4
- package/dist/gui/EFTogglePlay.js +5 -5
- package/dist/gui/EFWorkbench.js +5 -5
- package/dist/gui/TWMixin2.js +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/otel/BridgeSpanExporter.d.ts +13 -0
- package/dist/otel/BridgeSpanExporter.js +87 -0
- package/dist/otel/setupBrowserTracing.d.ts +12 -0
- package/dist/otel/setupBrowserTracing.js +30 -0
- package/dist/otel/tracingHelpers.d.ts +34 -0
- package/dist/otel/tracingHelpers.js +113 -0
- package/dist/transcoding/cache/RequestDeduplicator.js +0 -21
- package/dist/transcoding/cache/URLTokenDeduplicator.js +1 -21
- package/dist/transcoding/utils/UrlGenerator.js +2 -19
- package/dist/utils/LRUCache.js +6 -53
- package/package.json +10 -2
- package/src/elements/EFCaptions.browsertest.ts +2 -0
- package/src/elements/EFMedia/AssetMediaEngine.ts +65 -37
- package/src/elements/EFMedia/BaseMediaEngine.ts +110 -52
- package/src/elements/EFMedia/BufferedSeekingInput.ts +218 -101
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +7 -3
- package/src/elements/EFMedia/videoTasks/MainVideoInputCache.ts +76 -0
- package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +16 -10
- package/src/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.ts +7 -1
- package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +222 -116
- package/src/elements/EFMedia.ts +16 -1
- package/src/elements/EFTimegroup.browsertest.ts +10 -8
- package/src/elements/EFTimegroup.ts +164 -76
- package/src/elements/EFVideo.browsertest.ts +19 -27
- package/src/elements/EFVideo.ts +203 -101
- package/src/otel/BridgeSpanExporter.ts +150 -0
- package/src/otel/setupBrowserTracing.ts +68 -0
- package/src/otel/tracingHelpers.ts +251 -0
- package/types.json +1 -1
|
@@ -1,7 +1,12 @@
|
|
|
1
|
+
import { withSpan } from "../../../otel/tracingHelpers.js";
|
|
1
2
|
import { getLatestMediaEngine } from "../tasks/makeMediaEngineTask.js";
|
|
3
|
+
import { BufferedSeekingInput } from "../BufferedSeekingInput.js";
|
|
4
|
+
import { EFMedia } from "../../EFMedia.js";
|
|
2
5
|
import { ScrubInputCache } from "./ScrubInputCache.js";
|
|
6
|
+
import { MainVideoInputCache } from "./MainVideoInputCache.js";
|
|
3
7
|
import { Task } from "@lit/task";
|
|
4
|
-
|
|
8
|
+
var scrubInputCache = new ScrubInputCache();
|
|
9
|
+
var mainVideoInputCache = new MainVideoInputCache();
|
|
5
10
|
const makeUnifiedVideoSeekTask = (host) => {
|
|
6
11
|
return new Task(host, {
|
|
7
12
|
autoRun: false,
|
|
@@ -12,92 +17,151 @@ const makeUnifiedVideoSeekTask = (host) => {
|
|
|
12
17
|
onComplete: (_value) => {},
|
|
13
18
|
task: async ([desiredSeekTimeMs], { signal }) => {
|
|
14
19
|
const mediaEngine = await getLatestMediaEngine(host, signal);
|
|
15
|
-
if (!mediaEngine) return void 0;
|
|
20
|
+
if (!mediaEngine || signal.aborted) return void 0;
|
|
16
21
|
const mainRendition = mediaEngine.videoRendition;
|
|
17
22
|
if (mainRendition) {
|
|
18
23
|
const mainSegmentId = mediaEngine.computeSegmentId(desiredSeekTimeMs, mainRendition);
|
|
19
|
-
if (mainSegmentId !== void 0 && mediaEngine.isSegmentCached(mainSegmentId, mainRendition))
|
|
24
|
+
if (mainSegmentId !== void 0 && mediaEngine.isSegmentCached(mainSegmentId, mainRendition)) {
|
|
25
|
+
const result$1 = await getMainVideoSample(host, mediaEngine, desiredSeekTimeMs, signal);
|
|
26
|
+
if (signal.aborted) return;
|
|
27
|
+
return result$1;
|
|
28
|
+
}
|
|
20
29
|
}
|
|
21
30
|
const scrubSample = await tryGetScrubSample(mediaEngine, desiredSeekTimeMs, signal);
|
|
22
31
|
if (scrubSample || signal.aborted) {
|
|
32
|
+
if (signal.aborted) return;
|
|
23
33
|
if (scrubSample) startMainQualityUpgrade(host, mediaEngine, desiredSeekTimeMs, signal).catch(() => {});
|
|
24
34
|
return scrubSample;
|
|
25
35
|
}
|
|
26
|
-
|
|
36
|
+
const result = await getMainVideoSample(host, mediaEngine, desiredSeekTimeMs, signal);
|
|
37
|
+
if (signal.aborted) return;
|
|
38
|
+
return result;
|
|
27
39
|
}
|
|
28
40
|
});
|
|
29
41
|
};
|
|
30
|
-
/**
|
|
31
|
-
* Try to get scrub sample from cache (instant if available)
|
|
32
|
-
*/
|
|
33
42
|
async function tryGetScrubSample(mediaEngine, desiredSeekTimeMs, signal) {
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
const
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
43
|
+
return withSpan("video.tryGetScrubSample", {
|
|
44
|
+
desiredSeekTimeMs,
|
|
45
|
+
src: mediaEngine.src || "unknown"
|
|
46
|
+
}, void 0, async (span) => {
|
|
47
|
+
try {
|
|
48
|
+
let scrubRendition;
|
|
49
|
+
if (typeof mediaEngine.getScrubVideoRendition === "function") scrubRendition = mediaEngine.getScrubVideoRendition();
|
|
50
|
+
else if ("data" in mediaEngine && mediaEngine.data?.videoRenditions) scrubRendition = mediaEngine.data.videoRenditions.find((r) => r.id === "scrub");
|
|
51
|
+
if (!scrubRendition) {
|
|
52
|
+
span.setAttribute("result", "no-scrub-rendition");
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const scrubRenditionWithSrc = {
|
|
56
|
+
...scrubRendition,
|
|
57
|
+
src: mediaEngine.src
|
|
58
|
+
};
|
|
59
|
+
const segmentId = mediaEngine.computeSegmentId(desiredSeekTimeMs, scrubRenditionWithSrc);
|
|
60
|
+
if (segmentId === void 0) {
|
|
61
|
+
span.setAttribute("result", "no-segment-id");
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
const isCached = mediaEngine.isSegmentCached(segmentId, scrubRenditionWithSrc);
|
|
65
|
+
span.setAttribute("isCached", isCached);
|
|
66
|
+
if (!isCached) {
|
|
67
|
+
span.setAttribute("result", "not-cached");
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
const scrubInput = await scrubInputCache.getOrCreateInput(segmentId, async () => {
|
|
71
|
+
const [initSegment, mediaSegment] = await Promise.all([mediaEngine.fetchInitSegment(scrubRenditionWithSrc, signal), mediaEngine.fetchMediaSegment(segmentId, scrubRenditionWithSrc)]);
|
|
72
|
+
if (!initSegment || !mediaSegment || signal.aborted) return void 0;
|
|
73
|
+
const { BufferedSeekingInput: BufferedSeekingInput$1 } = await import("../BufferedSeekingInput.js");
|
|
74
|
+
const { EFMedia: EFMedia$1 } = await import("../../EFMedia.js");
|
|
75
|
+
return new BufferedSeekingInput$1(await new Blob([initSegment, mediaSegment]).arrayBuffer(), {
|
|
76
|
+
videoBufferSize: EFMedia$1.VIDEO_SAMPLE_BUFFER_SIZE,
|
|
77
|
+
audioBufferSize: EFMedia$1.AUDIO_SAMPLE_BUFFER_SIZE,
|
|
78
|
+
startTimeOffsetMs: scrubRendition.startTimeOffsetMs
|
|
79
|
+
});
|
|
56
80
|
});
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
81
|
+
if (!scrubInput) {
|
|
82
|
+
span.setAttribute("result", "no-scrub-input");
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
if (signal.aborted) {
|
|
86
|
+
span.setAttribute("result", "aborted-after-scrub-input");
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
const videoTrack = await scrubInput.getFirstVideoTrack();
|
|
90
|
+
if (!videoTrack) {
|
|
91
|
+
span.setAttribute("result", "no-video-track");
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
if (signal.aborted) {
|
|
95
|
+
span.setAttribute("result", "aborted-after-scrub-track");
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
const sample = await scrubInput.seek(videoTrack.id, desiredSeekTimeMs);
|
|
99
|
+
span.setAttribute("result", sample ? "success" : "no-sample");
|
|
100
|
+
return sample;
|
|
101
|
+
} catch (_error) {
|
|
102
|
+
if (signal.aborted) {
|
|
103
|
+
span.setAttribute("result", "aborted");
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
span.setAttribute("result", "error");
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
});
|
|
67
110
|
}
|
|
68
|
-
/**
|
|
69
|
-
* Get main video sample (slower path with fetching)
|
|
70
|
-
*/
|
|
71
111
|
async function getMainVideoSample(_host, mediaEngine, desiredSeekTimeMs, signal) {
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
112
|
+
return withSpan("video.getMainVideoSample", {
|
|
113
|
+
desiredSeekTimeMs,
|
|
114
|
+
src: mediaEngine.src || "unknown"
|
|
115
|
+
}, void 0, async (span) => {
|
|
116
|
+
try {
|
|
117
|
+
const videoRendition = mediaEngine.getVideoRendition();
|
|
118
|
+
if (!videoRendition) throw new Error("Video rendition unavailable after checking videoRendition exists");
|
|
119
|
+
const segmentId = mediaEngine.computeSegmentId(desiredSeekTimeMs, videoRendition);
|
|
120
|
+
if (segmentId === void 0) {
|
|
121
|
+
span.setAttribute("result", "no-segment-id");
|
|
122
|
+
return;
|
|
123
|
+
}
|
|
124
|
+
span.setAttribute("segmentId", segmentId);
|
|
125
|
+
const mainInput = await mainVideoInputCache.getOrCreateInput(mediaEngine.src, segmentId, videoRendition.id, async () => {
|
|
126
|
+
const [initSegment, mediaSegment] = await Promise.all([mediaEngine.fetchInitSegment(videoRendition, signal), mediaEngine.fetchMediaSegment(segmentId, videoRendition, signal)]);
|
|
127
|
+
if (!initSegment || !mediaSegment) return;
|
|
128
|
+
signal.throwIfAborted();
|
|
129
|
+
const startTimeOffsetMs = videoRendition?.startTimeOffsetMs;
|
|
130
|
+
return new BufferedSeekingInput(await new Blob([initSegment, mediaSegment]).arrayBuffer(), {
|
|
131
|
+
videoBufferSize: EFMedia.VIDEO_SAMPLE_BUFFER_SIZE,
|
|
132
|
+
audioBufferSize: EFMedia.AUDIO_SAMPLE_BUFFER_SIZE,
|
|
133
|
+
startTimeOffsetMs
|
|
134
|
+
});
|
|
135
|
+
});
|
|
136
|
+
if (!mainInput) {
|
|
137
|
+
span.setAttribute("result", "no-segments");
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
if (signal.aborted) {
|
|
141
|
+
span.setAttribute("result", "aborted-after-input");
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
const videoTrack = await mainInput.getFirstVideoTrack();
|
|
145
|
+
if (!videoTrack) {
|
|
146
|
+
span.setAttribute("result", "no-video-track");
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
if (signal.aborted) {
|
|
150
|
+
span.setAttribute("result", "aborted-after-track");
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
const sample = await mainInput.seek(videoTrack.id, desiredSeekTimeMs);
|
|
154
|
+
span.setAttribute("result", sample ? "success" : "no-sample");
|
|
155
|
+
return sample;
|
|
156
|
+
} catch (error) {
|
|
157
|
+
if (signal.aborted) {
|
|
158
|
+
span.setAttribute("result", "aborted");
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
throw error;
|
|
162
|
+
}
|
|
163
|
+
});
|
|
97
164
|
}
|
|
98
|
-
/**
|
|
99
|
-
* Start background upgrade to main quality (non-blocking)
|
|
100
|
-
*/
|
|
101
165
|
async function startMainQualityUpgrade(host, mediaEngine, targetSeekTimeMs, signal) {
|
|
102
166
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
103
167
|
if (signal.aborted || host.desiredSeekTimeMs !== targetSeekTimeMs) return;
|
|
@@ -21,8 +21,7 @@ const makeVideoBufferTask = (host) => {
|
|
|
21
21
|
},
|
|
22
22
|
task: async ([seekTimeMs], { signal }) => {
|
|
23
23
|
if (EF_RENDERING()) return currentState;
|
|
24
|
-
const
|
|
25
|
-
const engineConfig = mediaEngine.getBufferConfig();
|
|
24
|
+
const engineConfig = (await getLatestMediaEngine(host, signal)).getBufferConfig();
|
|
26
25
|
const bufferDurationMs = engineConfig.videoBufferDurationMs;
|
|
27
26
|
const maxParallelFetches = engineConfig.maxVideoBufferFetches;
|
|
28
27
|
const currentConfig = {
|
|
@@ -32,21 +31,18 @@ const makeVideoBufferTask = (host) => {
|
|
|
32
31
|
};
|
|
33
32
|
return manageMediaBuffer(seekTimeMs, currentConfig, currentState, host.intrinsicDurationMs || 1e4, signal, {
|
|
34
33
|
computeSegmentId: async (timeMs, rendition) => {
|
|
35
|
-
|
|
36
|
-
return mediaEngine$1.computeSegmentId(timeMs, rendition);
|
|
34
|
+
return (await getLatestMediaEngine(host, signal)).computeSegmentId(timeMs, rendition);
|
|
37
35
|
},
|
|
38
36
|
prefetchSegment: async (segmentId, rendition) => {
|
|
39
|
-
|
|
40
|
-
await mediaEngine$1.fetchMediaSegment(segmentId, rendition);
|
|
37
|
+
await (await getLatestMediaEngine(host, signal)).fetchMediaSegment(segmentId, rendition);
|
|
41
38
|
},
|
|
42
39
|
isSegmentCached: (segmentId, rendition) => {
|
|
43
|
-
const mediaEngine
|
|
44
|
-
if (!mediaEngine
|
|
45
|
-
return mediaEngine
|
|
40
|
+
const mediaEngine = host.mediaEngineTask.value;
|
|
41
|
+
if (!mediaEngine) return false;
|
|
42
|
+
return mediaEngine.isSegmentCached(segmentId, rendition);
|
|
46
43
|
},
|
|
47
44
|
getRendition: async () => {
|
|
48
|
-
|
|
49
|
-
return mediaEngine$1.getVideoRendition();
|
|
45
|
+
return (await getLatestMediaEngine(host, signal)).getVideoRendition();
|
|
50
46
|
},
|
|
51
47
|
logError: console.error
|
|
52
48
|
});
|
package/dist/elements/EFMedia.js
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import { __decorate } from "../_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js";
|
|
1
2
|
import { isContextMixin } from "../gui/ContextMixin.js";
|
|
3
|
+
import { withSpan } from "../otel/tracingHelpers.js";
|
|
2
4
|
import { UrlGenerator } from "../transcoding/utils/UrlGenerator.js";
|
|
3
5
|
import { makeMediaEngineTask } from "./EFMedia/tasks/makeMediaEngineTask.js";
|
|
4
6
|
import { makeAudioBufferTask } from "./EFMedia/audioTasks/makeAudioBufferTask.js";
|
|
@@ -16,8 +18,7 @@ import { FetchMixin } from "./FetchMixin.js";
|
|
|
16
18
|
import { EFTargetable } from "./TargetController.js";
|
|
17
19
|
import { LitElement, css } from "lit";
|
|
18
20
|
import { property, state } from "lit/decorators.js";
|
|
19
|
-
|
|
20
|
-
const freqWeightsCache = /* @__PURE__ */ new Map();
|
|
21
|
+
var freqWeightsCache = /* @__PURE__ */ new Map();
|
|
21
22
|
var IgnorableError = class extends Error {};
|
|
22
23
|
const deepGetMediaElements = (element, medias = []) => {
|
|
23
24
|
for (const child of Array.from(element.children)) if (child instanceof EFMedia) medias.push(child);
|
|
@@ -54,9 +55,8 @@ var EFMedia = class extends EFTargetable(EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
54
55
|
this.AUDIO_SAMPLE_BUFFER_SIZE = 120;
|
|
55
56
|
}
|
|
56
57
|
static get observedAttributes() {
|
|
57
|
-
const parentAttributes = super.observedAttributes || [];
|
|
58
58
|
return [
|
|
59
|
-
...
|
|
59
|
+
...super.observedAttributes || [],
|
|
60
60
|
"mute",
|
|
61
61
|
"fft-size",
|
|
62
62
|
"fft-decay",
|
|
@@ -108,14 +108,12 @@ var EFMedia = class extends EFTargetable(EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
108
108
|
const newCurrentSourceTimeMs = this.currentSourceTimeMs;
|
|
109
109
|
if (newCurrentSourceTimeMs !== this.desiredSeekTimeMs) this.executeSeek(newCurrentSourceTimeMs);
|
|
110
110
|
if (changedProperties.has("ownCurrentTimeMs")) this.executeSeek(this.currentSourceTimeMs);
|
|
111
|
-
|
|
111
|
+
if ([
|
|
112
112
|
"_trimStartMs",
|
|
113
113
|
"_trimEndMs",
|
|
114
114
|
"_sourceInMs",
|
|
115
115
|
"_sourceOutMs"
|
|
116
|
-
]
|
|
117
|
-
const hasDurationChange = durationAffectingProps.some((prop) => changedProperties.has(prop));
|
|
118
|
-
if (hasDurationChange) {
|
|
116
|
+
].some((prop) => changedProperties.has(prop))) {
|
|
119
117
|
if (this.parentTimegroup) {
|
|
120
118
|
this.parentTimegroup.requestUpdate("durationMs");
|
|
121
119
|
this.parentTimegroup.requestUpdate("currentTime");
|
|
@@ -142,56 +140,60 @@ var EFMedia = class extends EFTargetable(EFSourceMixin(EFTemporal(FetchMixin(Lit
|
|
|
142
140
|
async executeSeek(seekToMs) {
|
|
143
141
|
this.desiredSeekTimeMs = seekToMs;
|
|
144
142
|
}
|
|
145
|
-
/**
|
|
146
|
-
* Main integration method for EFTimegroup audio playback
|
|
147
|
-
* Now powered by clean, testable utility functions
|
|
148
|
-
* Returns undefined if no audio rendition is available
|
|
149
|
-
*/
|
|
150
143
|
async fetchAudioSpanningTime(fromMs, toMs, signal = new AbortController().signal) {
|
|
151
|
-
return fetchAudioSpanningTime
|
|
144
|
+
return withSpan("media.fetchAudioSpanningTime", {
|
|
145
|
+
elementId: this.id || "unknown",
|
|
146
|
+
tagName: this.tagName.toLowerCase(),
|
|
147
|
+
fromMs,
|
|
148
|
+
toMs,
|
|
149
|
+
durationMs: toMs - fromMs,
|
|
150
|
+
src: this.src || "none"
|
|
151
|
+
}, void 0, async () => {
|
|
152
|
+
return fetchAudioSpanningTime(this, fromMs, toMs, signal);
|
|
153
|
+
});
|
|
152
154
|
}
|
|
153
155
|
};
|
|
154
|
-
|
|
156
|
+
__decorate([property({
|
|
155
157
|
type: Number,
|
|
156
158
|
attribute: "audio-buffer-duration"
|
|
157
159
|
})], EFMedia.prototype, "audioBufferDurationMs", void 0);
|
|
158
|
-
|
|
160
|
+
__decorate([property({
|
|
159
161
|
type: Number,
|
|
160
162
|
attribute: "max-audio-buffer-fetches"
|
|
161
163
|
})], EFMedia.prototype, "maxAudioBufferFetches", void 0);
|
|
162
|
-
|
|
164
|
+
__decorate([property({
|
|
163
165
|
type: Boolean,
|
|
164
166
|
attribute: "enable-audio-buffering"
|
|
165
167
|
})], EFMedia.prototype, "enableAudioBuffering", void 0);
|
|
166
|
-
|
|
168
|
+
__decorate([property({
|
|
167
169
|
type: Boolean,
|
|
168
170
|
attribute: "mute",
|
|
169
171
|
reflect: true
|
|
170
172
|
})], EFMedia.prototype, "mute", void 0);
|
|
171
|
-
|
|
173
|
+
__decorate([property({
|
|
172
174
|
type: Number,
|
|
173
175
|
attribute: "fft-size",
|
|
174
176
|
reflect: true
|
|
175
177
|
})], EFMedia.prototype, "fftSize", void 0);
|
|
176
|
-
|
|
178
|
+
__decorate([property({
|
|
177
179
|
type: Number,
|
|
178
180
|
attribute: "fft-decay",
|
|
179
181
|
reflect: true
|
|
180
182
|
})], EFMedia.prototype, "fftDecay", void 0);
|
|
181
|
-
|
|
183
|
+
__decorate([property({
|
|
182
184
|
type: Number,
|
|
183
185
|
attribute: "fft-gain",
|
|
184
186
|
reflect: true
|
|
185
187
|
})], EFMedia.prototype, "fftGain", void 0);
|
|
186
|
-
|
|
188
|
+
__decorate([property({
|
|
187
189
|
type: Boolean,
|
|
188
190
|
attribute: "interpolate-frequencies",
|
|
189
191
|
reflect: true
|
|
190
192
|
})], EFMedia.prototype, "interpolateFrequencies", void 0);
|
|
191
|
-
|
|
193
|
+
__decorate([property({
|
|
192
194
|
type: String,
|
|
193
195
|
attribute: "asset-id",
|
|
194
196
|
reflect: true
|
|
195
197
|
})], EFMedia.prototype, "assetId", void 0);
|
|
196
|
-
|
|
198
|
+
__decorate([state()], EFMedia.prototype, "_desiredSeekTimeMs", void 0);
|
|
197
199
|
export { EFMedia, IgnorableError, deepGetMediaElements };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
+
import { __decorate } from "../_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js";
|
|
1
2
|
import { Task } from "@lit/task";
|
|
2
|
-
import _decorate from "@oxc-project/runtime/helpers/decorate";
|
|
3
3
|
import { property } from "lit/decorators/property.js";
|
|
4
4
|
function EFSourceMixin(superClass, options) {
|
|
5
5
|
class EFSourceElement extends superClass {
|
|
@@ -11,17 +11,15 @@ function EFSourceMixin(superClass, options) {
|
|
|
11
11
|
args: () => [this.src],
|
|
12
12
|
task: async ([src], { signal }) => {
|
|
13
13
|
const md5Path = `/@ef-asset/${src}`;
|
|
14
|
-
|
|
14
|
+
return (await fetch(md5Path, {
|
|
15
15
|
method: "HEAD",
|
|
16
16
|
signal
|
|
17
|
-
});
|
|
18
|
-
return response.headers.get("etag") ?? void 0;
|
|
17
|
+
})).headers.get("etag") ?? void 0;
|
|
19
18
|
}
|
|
20
19
|
});
|
|
21
20
|
}
|
|
22
21
|
get apiHost() {
|
|
23
|
-
|
|
24
|
-
return apiHost || "https://editframe.dev";
|
|
22
|
+
return (this.closest("ef-configuration")?.apiHost ?? this.closest("ef-workbench")?.apiHost ?? this.closest("ef-preview")?.apiHost) || "https://editframe.dev";
|
|
25
23
|
}
|
|
26
24
|
productionSrc() {
|
|
27
25
|
if (!this.md5SumLoader.value) throw new Error(`MD5 sum not available for ${this}. Cannot generate production URL`);
|
|
@@ -29,7 +27,7 @@ function EFSourceMixin(superClass, options) {
|
|
|
29
27
|
return `${this.apiHost}/api/v1/${options.assetType}/${this.md5SumLoader.value}`;
|
|
30
28
|
}
|
|
31
29
|
}
|
|
32
|
-
|
|
30
|
+
__decorate([property({ type: String })], EFSourceElement.prototype, "src", void 0);
|
|
33
31
|
return EFSourceElement;
|
|
34
32
|
}
|
|
35
33
|
export { EFSourceMixin };
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
+
import { __decorate } from "../_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js";
|
|
1
2
|
import { TargetController } from "./TargetController.js";
|
|
2
3
|
import { Task } from "@lit/task";
|
|
3
4
|
import { LitElement, css, html } from "lit";
|
|
4
5
|
import { customElement, property, state } from "lit/decorators.js";
|
|
5
|
-
import _decorate from "@oxc-project/runtime/helpers/decorate";
|
|
6
6
|
import { createRef, ref } from "lit/directives/ref.js";
|
|
7
|
-
|
|
7
|
+
var EFSurface = class EFSurface$1 extends LitElement {
|
|
8
8
|
constructor(..._args) {
|
|
9
9
|
super(..._args);
|
|
10
10
|
this.canvasRef = createRef();
|
|
@@ -70,10 +70,7 @@ let EFSurface = class EFSurface$1 extends LitElement {
|
|
|
70
70
|
const anyEl = from;
|
|
71
71
|
if ("canvasElement" in anyEl) return anyEl.canvasElement ?? null;
|
|
72
72
|
const sr = from.shadowRoot;
|
|
73
|
-
if (sr)
|
|
74
|
-
const c = sr.querySelector("canvas");
|
|
75
|
-
return c ?? null;
|
|
76
|
-
}
|
|
73
|
+
if (sr) return sr.querySelector("canvas") ?? null;
|
|
77
74
|
return null;
|
|
78
75
|
}
|
|
79
76
|
copyFromTarget(target) {
|
|
@@ -90,7 +87,7 @@ let EFSurface = class EFSurface$1 extends LitElement {
|
|
|
90
87
|
ctx.drawImage(src, 0, 0, dst.width, dst.height);
|
|
91
88
|
}
|
|
92
89
|
};
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
EFSurface =
|
|
90
|
+
__decorate([state()], EFSurface.prototype, "targetElement", void 0);
|
|
91
|
+
__decorate([property({ type: String })], EFSurface.prototype, "target", void 0);
|
|
92
|
+
EFSurface = __decorate([customElement("ef-surface")], EFSurface);
|
|
96
93
|
export { EFSurface };
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
|
|
2
|
+
import { __decorate } from "../_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js";
|
|
2
3
|
import { durationConverter } from "./durationConverter.js";
|
|
3
4
|
import { consume, createContext } from "@lit/context";
|
|
4
5
|
import { Task } from "@lit/task";
|
|
5
6
|
import { property, state } from "lit/decorators.js";
|
|
6
|
-
import _decorate from "@oxc-project/runtime/helpers/decorate";
|
|
7
7
|
const timegroupContext = createContext(Symbol("timeGroupContext"));
|
|
8
8
|
const isEFTemporal = (obj) => obj[EF_TEMPORAL];
|
|
9
|
-
|
|
9
|
+
var EF_TEMPORAL = Symbol("EF_TEMPORAL");
|
|
10
10
|
const deepGetTemporalElements = (element, temporals = []) => {
|
|
11
11
|
for (const child of element.children) {
|
|
12
12
|
if (isEFTemporal(child)) temporals.push(child);
|
|
@@ -21,7 +21,7 @@ const deepGetElementsWithFrameTasks = (element, elements = []) => {
|
|
|
21
21
|
}
|
|
22
22
|
return elements;
|
|
23
23
|
};
|
|
24
|
-
|
|
24
|
+
var temporalCache;
|
|
25
25
|
const resetTemporalCache = () => {
|
|
26
26
|
temporalCache = /* @__PURE__ */ new Map();
|
|
27
27
|
if (typeof requestAnimationFrame !== "undefined") requestAnimationFrame(resetTemporalCache);
|
|
@@ -48,8 +48,8 @@ var OwnCurrentTimeController = class {
|
|
|
48
48
|
this.host.removeController(this);
|
|
49
49
|
}
|
|
50
50
|
};
|
|
51
|
-
|
|
52
|
-
|
|
51
|
+
var startTimeMsCache = /* @__PURE__ */ new WeakMap();
|
|
52
|
+
var resetStartTimeMsCache = () => {
|
|
53
53
|
startTimeMsCache = /* @__PURE__ */ new WeakMap();
|
|
54
54
|
if (typeof requestAnimationFrame !== "undefined") requestAnimationFrame(resetStartTimeMsCache);
|
|
55
55
|
};
|
|
@@ -96,28 +96,28 @@ const EFTemporal = (superClass) => {
|
|
|
96
96
|
else this.removeAttribute("duration");
|
|
97
97
|
}
|
|
98
98
|
get trimStartMs() {
|
|
99
|
-
if (this._trimStartMs === void 0) return
|
|
99
|
+
if (this._trimStartMs === void 0) return;
|
|
100
100
|
return Math.min(Math.max(this._trimStartMs, 0), this.intrinsicDurationMs ?? 0);
|
|
101
101
|
}
|
|
102
102
|
set trimStartMs(value) {
|
|
103
103
|
this._trimStartMs = value;
|
|
104
104
|
}
|
|
105
105
|
get trimEndMs() {
|
|
106
|
-
if (this._trimEndMs === void 0) return
|
|
106
|
+
if (this._trimEndMs === void 0) return;
|
|
107
107
|
return Math.min(this._trimEndMs, this.intrinsicDurationMs ?? 0);
|
|
108
108
|
}
|
|
109
109
|
set trimEndMs(value) {
|
|
110
110
|
this._trimEndMs = value;
|
|
111
111
|
}
|
|
112
112
|
get sourceInMs() {
|
|
113
|
-
if (this._sourceInMs === void 0) return
|
|
113
|
+
if (this._sourceInMs === void 0) return;
|
|
114
114
|
return Math.max(this._sourceInMs, 0);
|
|
115
115
|
}
|
|
116
116
|
set sourceInMs(value) {
|
|
117
117
|
this._sourceInMs = value;
|
|
118
118
|
}
|
|
119
119
|
get sourceOutMs() {
|
|
120
|
-
if (this._sourceOutMs === void 0) return
|
|
120
|
+
if (this._sourceOutMs === void 0) return;
|
|
121
121
|
if (this.intrinsicDurationMs && this._sourceOutMs > this.intrinsicDurationMs) return this.intrinsicDurationMs;
|
|
122
122
|
return Math.max(this._sourceOutMs, 0);
|
|
123
123
|
}
|
|
@@ -137,14 +137,11 @@ const EFTemporal = (superClass) => {
|
|
|
137
137
|
}
|
|
138
138
|
get explicitDurationMs() {
|
|
139
139
|
if (this.hasExplicitDuration) return this._durationMs;
|
|
140
|
-
return void 0;
|
|
141
140
|
}
|
|
142
141
|
get hasOwnDuration() {
|
|
143
142
|
return this.intrinsicDurationMs !== void 0 || this.hasExplicitDuration;
|
|
144
143
|
}
|
|
145
|
-
get intrinsicDurationMs() {
|
|
146
|
-
return void 0;
|
|
147
|
-
}
|
|
144
|
+
get intrinsicDurationMs() {}
|
|
148
145
|
get durationMs() {
|
|
149
146
|
const baseDurationMs = this.intrinsicDurationMs ?? this._durationMs ?? this.parentTimegroup?.durationMs ?? 0;
|
|
150
147
|
if (baseDurationMs === 0) return 0;
|
|
@@ -172,9 +169,6 @@ const EFTemporal = (superClass) => {
|
|
|
172
169
|
while (parent && !isEFTemporal(parent)) parent = parent.parentElement;
|
|
173
170
|
return parent;
|
|
174
171
|
}
|
|
175
|
-
/**
|
|
176
|
-
* The start time of the element within its parent timegroup.
|
|
177
|
-
*/
|
|
178
172
|
get startTimeWithinParentMs() {
|
|
179
173
|
if (!this.parentTemporal) return 0;
|
|
180
174
|
return this.startTimeMs - this.parentTemporal.startTimeMs;
|
|
@@ -218,70 +212,58 @@ const EFTemporal = (superClass) => {
|
|
|
218
212
|
get endTimeMs() {
|
|
219
213
|
return this.startTimeMs + this.durationMs;
|
|
220
214
|
}
|
|
221
|
-
/**
|
|
222
|
-
* The current time of the element within itself.
|
|
223
|
-
* Compare with `currentTimeMs` to see the current time with respect to the root timegroup
|
|
224
|
-
*/
|
|
225
215
|
get ownCurrentTimeMs() {
|
|
226
216
|
if (this.rootTimegroup) return Math.min(Math.max(0, this.rootTimegroup.currentTimeMs - this.startTimeMs), this.durationMs);
|
|
227
217
|
return 0;
|
|
228
218
|
}
|
|
229
|
-
/**
|
|
230
|
-
* Element's current time for progress calculation.
|
|
231
|
-
* Non-timegroup temporal elements use their local time (ownCurrentTimeMs)
|
|
232
|
-
*/
|
|
233
219
|
get currentTimeMs() {
|
|
234
220
|
return this.ownCurrentTimeMs;
|
|
235
221
|
}
|
|
236
|
-
/**
|
|
237
|
-
* Used to calculate the internal currentTimeMs of the element. This is useful
|
|
238
|
-
* for mapping to internal media time codes for audio/video elements.
|
|
239
|
-
*/
|
|
240
222
|
get currentSourceTimeMs() {
|
|
241
223
|
const leadingTrimMs = this.sourceInMs || this.trimStartMs || 0;
|
|
242
224
|
return this.ownCurrentTimeMs + leadingTrimMs;
|
|
243
225
|
}
|
|
244
226
|
}
|
|
245
|
-
|
|
227
|
+
__decorate([consume({
|
|
246
228
|
context: timegroupContext,
|
|
247
229
|
subscribe: true
|
|
248
230
|
}), property({ attribute: false })], TemporalMixinClass.prototype, "parentTimegroup", null);
|
|
249
|
-
|
|
231
|
+
__decorate([property({
|
|
250
232
|
type: String,
|
|
251
233
|
attribute: "offset",
|
|
252
234
|
converter: durationConverter
|
|
253
235
|
})], TemporalMixinClass.prototype, "_offsetMs", void 0);
|
|
254
|
-
|
|
236
|
+
__decorate([property({
|
|
255
237
|
type: Number,
|
|
256
238
|
attribute: "duration",
|
|
257
239
|
converter: durationConverter
|
|
258
240
|
})], TemporalMixinClass.prototype, "_durationMs", void 0);
|
|
259
|
-
|
|
241
|
+
__decorate([property({
|
|
260
242
|
type: Number,
|
|
261
243
|
attribute: "trimstart",
|
|
262
244
|
converter: durationConverter
|
|
263
245
|
})], TemporalMixinClass.prototype, "_trimStartMs", void 0);
|
|
264
|
-
|
|
246
|
+
__decorate([property({
|
|
265
247
|
type: Number,
|
|
266
248
|
attribute: "trimend",
|
|
267
249
|
converter: durationConverter
|
|
268
250
|
})], TemporalMixinClass.prototype, "_trimEndMs", void 0);
|
|
269
|
-
|
|
251
|
+
__decorate([property({
|
|
270
252
|
type: Number,
|
|
271
253
|
attribute: "sourcein",
|
|
272
254
|
converter: durationConverter
|
|
273
255
|
})], TemporalMixinClass.prototype, "_sourceInMs", void 0);
|
|
274
|
-
|
|
256
|
+
__decorate([property({
|
|
275
257
|
type: Number,
|
|
276
258
|
attribute: "sourceout",
|
|
277
259
|
converter: durationConverter
|
|
278
260
|
})], TemporalMixinClass.prototype, "_sourceOutMs", void 0);
|
|
279
|
-
|
|
261
|
+
__decorate([property({
|
|
280
262
|
type: Number,
|
|
281
263
|
attribute: "startoffset",
|
|
282
264
|
converter: durationConverter
|
|
283
265
|
})], TemporalMixinClass.prototype, "_startOffsetMs", void 0);
|
|
284
|
-
|
|
266
|
+
__decorate([state()], TemporalMixinClass.prototype, "rootTimegroup", void 0);
|
|
285
267
|
Object.defineProperty(TemporalMixinClass.prototype, EF_TEMPORAL, { value: true });
|
|
286
268
|
return TemporalMixinClass;
|
|
287
269
|
};
|