@editframe/elements 0.16.7-beta.0 → 0.17.6-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/README.md +30 -0
  2. package/dist/DecoderResetFrequency.test.d.ts +1 -0
  3. package/dist/DecoderResetRecovery.test.d.ts +1 -0
  4. package/dist/DelayedLoadingState.d.ts +48 -0
  5. package/dist/DelayedLoadingState.integration.test.d.ts +1 -0
  6. package/dist/DelayedLoadingState.js +113 -0
  7. package/dist/DelayedLoadingState.test.d.ts +1 -0
  8. package/dist/EF_FRAMEGEN.d.ts +10 -1
  9. package/dist/EF_FRAMEGEN.js +199 -179
  10. package/dist/EF_INTERACTIVE.js +2 -6
  11. package/dist/EF_RENDERING.js +1 -3
  12. package/dist/JitTranscodingClient.browsertest.d.ts +1 -0
  13. package/dist/JitTranscodingClient.d.ts +167 -0
  14. package/dist/JitTranscodingClient.js +373 -0
  15. package/dist/JitTranscodingClient.test.d.ts +1 -0
  16. package/dist/LoadingDebounce.test.d.ts +1 -0
  17. package/dist/LoadingIndicator.browsertest.d.ts +0 -0
  18. package/dist/ManualScrubTest.test.d.ts +1 -0
  19. package/dist/ScrubResolvedFlashing.test.d.ts +1 -0
  20. package/dist/ScrubTrackIntegration.test.d.ts +1 -0
  21. package/dist/ScrubTrackManager.d.ts +96 -0
  22. package/dist/ScrubTrackManager.js +216 -0
  23. package/dist/ScrubTrackManager.test.d.ts +1 -0
  24. package/dist/SegmentSwitchLoading.test.d.ts +1 -0
  25. package/dist/VideoSeekFlashing.browsertest.d.ts +0 -0
  26. package/dist/VideoStuckDiagnostic.test.d.ts +1 -0
  27. package/dist/elements/CrossUpdateController.js +13 -15
  28. package/dist/elements/EFAudio.browsertest.d.ts +0 -0
  29. package/dist/elements/EFAudio.d.ts +1 -1
  30. package/dist/elements/EFAudio.js +30 -43
  31. package/dist/elements/EFCaptions.js +337 -373
  32. package/dist/elements/EFImage.js +64 -90
  33. package/dist/elements/EFMedia.d.ts +98 -33
  34. package/dist/elements/EFMedia.js +1169 -678
  35. package/dist/elements/EFSourceMixin.js +31 -48
  36. package/dist/elements/EFTemporal.d.ts +1 -0
  37. package/dist/elements/EFTemporal.js +266 -360
  38. package/dist/elements/EFTimegroup.d.ts +3 -1
  39. package/dist/elements/EFTimegroup.js +262 -323
  40. package/dist/elements/EFVideo.browsertest.d.ts +0 -0
  41. package/dist/elements/EFVideo.d.ts +90 -2
  42. package/dist/elements/EFVideo.js +408 -111
  43. package/dist/elements/EFWaveform.js +375 -411
  44. package/dist/elements/FetchMixin.js +14 -24
  45. package/dist/elements/MediaController.d.ts +30 -0
  46. package/dist/elements/TargetController.js +130 -156
  47. package/dist/elements/TimegroupController.js +17 -19
  48. package/dist/elements/durationConverter.js +15 -4
  49. package/dist/elements/parseTimeToMs.js +4 -10
  50. package/dist/elements/printTaskStatus.d.ts +2 -0
  51. package/dist/elements/printTaskStatus.js +11 -0
  52. package/dist/elements/updateAnimations.js +39 -59
  53. package/dist/getRenderInfo.js +58 -67
  54. package/dist/gui/ContextMixin.js +203 -288
  55. package/dist/gui/EFConfiguration.js +27 -43
  56. package/dist/gui/EFFilmstrip.js +440 -620
  57. package/dist/gui/EFFitScale.js +112 -135
  58. package/dist/gui/EFFocusOverlay.js +45 -61
  59. package/dist/gui/EFPreview.js +30 -49
  60. package/dist/gui/EFScrubber.js +78 -99
  61. package/dist/gui/EFTimeDisplay.js +49 -70
  62. package/dist/gui/EFToggleLoop.js +17 -34
  63. package/dist/gui/EFTogglePlay.js +37 -58
  64. package/dist/gui/EFWorkbench.js +66 -88
  65. package/dist/gui/TWMixin.js +2 -48
  66. package/dist/gui/TWMixin2.js +31 -0
  67. package/dist/gui/efContext.js +2 -6
  68. package/dist/gui/fetchContext.js +1 -3
  69. package/dist/gui/focusContext.js +1 -3
  70. package/dist/gui/focusedElementContext.js +2 -6
  71. package/dist/gui/playingContext.js +1 -4
  72. package/dist/index.js +5 -30
  73. package/dist/msToTimeCode.js +11 -13
  74. package/dist/style.css +2 -1
  75. package/package.json +3 -3
  76. package/src/elements/EFAudio.browsertest.ts +569 -0
  77. package/src/elements/EFAudio.ts +4 -6
  78. package/src/elements/EFCaptions.browsertest.ts +0 -1
  79. package/src/elements/EFImage.browsertest.ts +0 -1
  80. package/src/elements/EFMedia.browsertest.ts +147 -115
  81. package/src/elements/EFMedia.ts +1339 -307
  82. package/src/elements/EFTemporal.browsertest.ts +0 -1
  83. package/src/elements/EFTemporal.ts +11 -0
  84. package/src/elements/EFTimegroup.ts +73 -10
  85. package/src/elements/EFVideo.browsertest.ts +680 -0
  86. package/src/elements/EFVideo.ts +729 -50
  87. package/src/elements/EFWaveform.ts +4 -4
  88. package/src/elements/MediaController.ts +108 -0
  89. package/src/elements/__screenshots__/EFMedia.browsertest.ts/EFMedia-JIT-audio-playback-audioBufferTask-should-work-in-JIT-mode-without-URL-errors-1.png +0 -0
  90. package/src/elements/printTaskStatus.ts +16 -0
  91. package/src/elements/updateAnimations.ts +6 -0
  92. package/src/gui/TWMixin.ts +10 -3
  93. package/test/EFVideo.frame-tasks.browsertest.ts +524 -0
  94. package/test/EFVideo.framegen.browsertest.ts +118 -0
  95. package/test/createJitTestClips.ts +293 -0
  96. package/test/useAssetMSW.ts +49 -0
  97. package/test/useMSW.ts +31 -0
  98. package/types.json +1 -1
  99. package/dist/gui/TWMixin.css.js +0 -4
  100. /package/dist/elements/{TargetController.test.d.ts → TargetController.browsertest.d.ts} +0 -0
  101. /package/src/elements/{TargetController.test.ts → TargetController.browsertest.ts} +0 -0
@@ -1,697 +1,1188 @@
1
- import { Task } from "@lit/task";
2
- import { deepArrayEquals } from "@lit/task/deep-equals.js";
3
- import debug from "debug";
4
- import { css, LitElement } from "lit";
5
- import { property, state } from "lit/decorators.js";
6
- import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
7
- import { MP4File } from "@editframe/assets/MP4File.js";
8
1
  import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
2
+ import { JitTranscodingClient } from "../JitTranscodingClient.js";
9
3
  import { EFSourceMixin } from "./EFSourceMixin.js";
10
4
  import { EFTemporal } from "./EFTemporal.js";
11
5
  import { FetchMixin } from "./FetchMixin.js";
12
6
  import { EFTargetable } from "./TargetController.js";
13
7
  import { updateAnimations } from "./updateAnimations.js";
14
- var __defProp = Object.defineProperty;
15
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
16
- var __decorateClass = (decorators, target, key, kind) => {
17
- var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
18
- for (var i = decorators.length - 1, decorator; i >= 0; i--)
19
- if (decorator = decorators[i])
20
- result = (kind ? decorator(target, key, result) : decorator(result)) || result;
21
- if (kind && result) __defProp(target, key, result);
22
- return result;
23
- };
8
+ import { Task } from "@lit/task";
9
+ import debug from "debug";
10
+ import { LitElement, css } from "lit";
11
+ import { property, state } from "lit/decorators.js";
12
+ import _decorate from "@oxc-project/runtime/helpers/decorate";
13
+ import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
14
+ import { MP4File } from "@editframe/assets/MP4File.js";
15
+ import { deepArrayEquals } from "@lit/task/deep-equals.js";
24
16
  const log = debug("ef:elements:EFMedia");
25
17
  const freqWeightsCache = /* @__PURE__ */ new Map();
26
- class LRUCache {
27
- constructor(maxSize) {
28
- this.cache = /* @__PURE__ */ new Map();
29
- this.maxSize = maxSize;
30
- }
31
- get(key) {
32
- const value = this.cache.get(key);
33
- if (value) {
34
- this.cache.delete(key);
35
- this.cache.set(key, value);
36
- }
37
- return value;
38
- }
39
- set(key, value) {
40
- if (this.cache.has(key)) {
41
- this.cache.delete(key);
42
- } else if (this.cache.size >= this.maxSize) {
43
- const firstKey = this.cache.keys().next().value;
44
- if (firstKey) {
45
- this.cache.delete(firstKey);
46
- }
47
- }
48
- this.cache.set(key, value);
49
- }
50
- }
18
+ var LRUCache = class {
19
+ constructor(maxSize) {
20
+ this.cache = /* @__PURE__ */ new Map();
21
+ this.maxSize = maxSize;
22
+ }
23
+ get(key) {
24
+ const value = this.cache.get(key);
25
+ if (value) {
26
+ this.cache.delete(key);
27
+ this.cache.set(key, value);
28
+ }
29
+ return value;
30
+ }
31
+ set(key, value) {
32
+ if (this.cache.has(key)) this.cache.delete(key);
33
+ else if (this.cache.size >= this.maxSize) {
34
+ const firstKey = this.cache.keys().next().value;
35
+ if (firstKey) this.cache.delete(firstKey);
36
+ }
37
+ this.cache.set(key, value);
38
+ }
39
+ };
51
40
  const deepGetMediaElements = (element, medias = []) => {
52
- for (const child of Array.from(element.children)) {
53
- if (child instanceof EFMedia) {
54
- medias.push(child);
55
- } else {
56
- deepGetMediaElements(child, medias);
57
- }
58
- }
59
- return medias;
41
+ for (const child of Array.from(element.children)) if (child instanceof EFMedia) medias.push(child);
42
+ else deepGetMediaElements(child, medias);
43
+ return medias;
60
44
  };
61
- const _EFMedia = class _EFMedia2 extends EFTargetable(
62
- EFSourceMixin(EFTemporal(FetchMixin(LitElement)), {
63
- assetType: "isobmff_files"
64
- })
65
- ) {
66
- constructor() {
67
- super(...arguments);
68
- this.currentTimeMs = 0;
69
- this.#assetId = null;
70
- this.trackFragmentIndexLoader = new Task(this, {
71
- args: () => [this.fragmentIndexPath(), this.fetch],
72
- task: async ([fragmentIndexPath, fetch], { signal }) => {
73
- try {
74
- const response = await fetch(fragmentIndexPath, { signal });
75
- return await response.json();
76
- } catch (error) {
77
- log("Failed to load track fragment index", error);
78
- return void 0;
79
- }
80
- },
81
- onComplete: () => {
82
- this.requestUpdate("intrinsicDurationMs");
83
- this.requestUpdate("ownCurrentTimeMs");
84
- this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
85
- this.rootTimegroup?.requestUpdate("durationMs");
86
- }
87
- });
88
- this.initSegmentsLoader = new Task(this, {
89
- autoRun: EF_INTERACTIVE,
90
- args: () => [this.trackFragmentIndexLoader.value, this.src, this.fetch],
91
- task: async ([fragmentIndex, _src, fetch], { signal }) => {
92
- if (!fragmentIndex) {
93
- return;
94
- }
95
- return await Promise.all(
96
- Object.entries(fragmentIndex).map(async ([trackId, track]) => {
97
- const start = track.initSegment.offset;
98
- const end = track.initSegment.offset + track.initSegment.size;
99
- const response = await fetch(this.fragmentTrackPath(trackId), {
100
- signal,
101
- headers: { Range: `bytes=${start}-${end - 1}` }
102
- });
103
- const buffer = await response.arrayBuffer();
104
- buffer.fileStart = 0;
105
- const mp4File = new MP4File();
106
- mp4File.appendBuffer(buffer, true);
107
- mp4File.flush();
108
- await mp4File.readyPromise;
109
- return { trackId, buffer, mp4File };
110
- })
111
- );
112
- }
113
- });
114
- this.seekTask = new Task(this, {
115
- autoRun: EF_INTERACTIVE,
116
- args: () => [
117
- this.desiredSeekTimeMs,
118
- this.trackFragmentIndexLoader.value,
119
- this.initSegmentsLoader.value
120
- ],
121
- task: async ([seekToMs, fragmentIndex, initSegments], { signal: _signal }) => {
122
- if (fragmentIndex === void 0) {
123
- return;
124
- }
125
- if (initSegments === void 0) {
126
- return;
127
- }
128
- const result = {};
129
- for (const index of Object.values(fragmentIndex)) {
130
- const track = initSegments.find((segment2) => segment2.trackId === String(index.track))?.mp4File.getInfo().tracks[0];
131
- if (!track) {
132
- throw new Error("Could not finding matching track");
133
- }
134
- const segment = index.segments.toReversed().find((segment2) => {
135
- return segment2.dts / track.timescale * 1e3 <= seekToMs;
136
- });
137
- const nextSegment = index.segments.find((segment2) => {
138
- return segment2.dts / track.timescale * 1e3 > seekToMs;
139
- });
140
- if (!segment) {
141
- return;
142
- }
143
- result[index.track] = { segment, track, nextSegment };
144
- }
145
- return result;
146
- }
147
- });
148
- this.fetchSeekTask = new Task(this, {
149
- autoRun: EF_INTERACTIVE,
150
- argsEqual: deepArrayEquals,
151
- args: () => [this.initSegmentsLoader.value, this.seekTask.value, this.fetch],
152
- task: async ([initSegments, seekResult, fetch], { signal }) => {
153
- if (!initSegments) {
154
- return;
155
- }
156
- if (!seekResult) {
157
- return;
158
- }
159
- const files = {};
160
- for (const [trackId, { segment, track, nextSegment }] of Object.entries(
161
- seekResult
162
- )) {
163
- const start = segment.offset;
164
- const end = segment.offset + segment.size;
165
- const response = await fetch(this.fragmentTrackPath(trackId), {
166
- signal,
167
- headers: { Range: `bytes=${start}-${end - 1}` }
168
- });
169
- if (nextSegment) {
170
- const nextStart = nextSegment.offset;
171
- const nextEnd = nextSegment.offset + nextSegment.size;
172
- fetch(this.fragmentTrackPath(trackId), {
173
- signal,
174
- headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` }
175
- }).then(() => {
176
- log("Prefetched next segment");
177
- }).catch((error) => {
178
- log("Failed to prefetch next segment", error);
179
- });
180
- }
181
- const initSegment = Object.values(initSegments).find(
182
- (initSegment2) => initSegment2.trackId === String(track.id)
183
- );
184
- if (!initSegment) {
185
- throw new Error("Could not find matching init segment");
186
- }
187
- const initBuffer = initSegment.buffer;
188
- const mediaBuffer = await response.arrayBuffer();
189
- files[trackId] = new File([initBuffer, mediaBuffer], "video.mp4", {
190
- type: "video/mp4"
191
- });
192
- }
193
- return files;
194
- }
195
- });
196
- this.videoAssetTask = new Task(this, {
197
- autoRun: EF_INTERACTIVE,
198
- args: () => [this.fetchSeekTask.value],
199
- task: async ([files], { signal: _signal }) => {
200
- if (!files) {
201
- return;
202
- }
203
- if (!this.defaultVideoTrackId) {
204
- return;
205
- }
206
- const videoFile = files[this.defaultVideoTrackId];
207
- if (!videoFile) {
208
- return;
209
- }
210
- for (const frame of this.videoAssetTask.value?.decodedFrames || []) {
211
- frame.close();
212
- }
213
- this.videoAssetTask.value?.videoDecoder?.close();
214
- return await VideoAsset.createFromReadableStream(
215
- "video.mp4",
216
- videoFile.stream(),
217
- videoFile
218
- );
219
- }
220
- });
221
- this.desiredSeekTimeMs = 0;
222
- this.#audioContext = new OfflineAudioContext(2, 48e3 / 30, 48e3);
223
- this.audioBufferTask = new Task(this, {
224
- autoRun: EF_INTERACTIVE,
225
- args: () => [this.fetchSeekTask.value, this.seekTask.value],
226
- task: async ([files, segments], { signal: _signal }) => {
227
- if (!files) {
228
- return;
229
- }
230
- if (!segments) {
231
- return;
232
- }
233
- if (!this.defaultAudioTrackId) {
234
- return;
235
- }
236
- const segment = segments[this.defaultAudioTrackId];
237
- if (!segment) {
238
- return;
239
- }
240
- const audioFile = files[this.defaultAudioTrackId];
241
- if (!audioFile) {
242
- return;
243
- }
244
- return {
245
- buffer: await this.#audioContext.decodeAudioData(
246
- await audioFile.arrayBuffer()
247
- ),
248
- startOffsetMs: segment.segment.cts / segment.track.timescale * 1e3
249
- };
250
- }
251
- });
252
- this.#byteTimeDomainCache = new LRUCache(100);
253
- this.byteTimeDomainTask = new Task(this, {
254
- autoRun: EF_INTERACTIVE,
255
- args: () => [
256
- this.audioBufferTask.status,
257
- this.currentSourceTimeMs,
258
- this.fftSize,
259
- this.fftDecay,
260
- this.fftGain,
261
- this.shouldInterpolateFrequencies
262
- ],
263
- task: async () => {
264
- await this.audioBufferTask.taskComplete;
265
- if (!this.audioBufferTask.value) return null;
266
- if (this.currentSourceTimeMs <= 0) return null;
267
- const currentTimeMs = this.currentSourceTimeMs;
268
- const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
269
- const audioBuffer = this.audioBufferTask.value.buffer;
270
- const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
271
- const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
272
- if (cachedData) return cachedData;
273
- const framesData = await Promise.all(
274
- Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
275
- const frameOffset = frameIndex * (1e3 / 30);
276
- const startTime = Math.max(
277
- 0,
278
- (currentTimeMs - frameOffset - startOffsetMs) / 1e3
279
- );
280
- const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
281
- const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
282
- if (cachedFrame) return cachedFrame;
283
- const audioContext = new OfflineAudioContext(
284
- 2,
285
- 48e3 * (1 / 30),
286
- 48e3
287
- );
288
- const source = audioContext.createBufferSource();
289
- source.buffer = audioBuffer;
290
- const analyser = audioContext.createAnalyser();
291
- analyser.fftSize = this.fftSize;
292
- analyser.minDecibels = -90;
293
- analyser.maxDecibels = -20;
294
- const gainNode = audioContext.createGain();
295
- gainNode.gain.value = this.fftGain;
296
- source.connect(gainNode);
297
- gainNode.connect(analyser);
298
- analyser.connect(audioContext.destination);
299
- source.start(0, startTime, 1 / 30);
300
- const dataLength = analyser.fftSize / 2;
301
- try {
302
- await audioContext.startRendering();
303
- const frameData = new Uint8Array(dataLength);
304
- analyser.getByteTimeDomainData(frameData);
305
- const points = new Uint8Array(dataLength);
306
- for (let i = 0; i < dataLength; i++) {
307
- const pointSamples = frameData.slice(
308
- i * (frameData.length / dataLength),
309
- (i + 1) * (frameData.length / dataLength)
310
- );
311
- const rms = Math.sqrt(
312
- pointSamples.reduce((sum, sample) => {
313
- const normalized = (sample - 128) / 128;
314
- return sum + normalized * normalized;
315
- }, 0) / pointSamples.length
316
- );
317
- const avgSign = Math.sign(
318
- pointSamples.reduce((sum, sample) => sum + (sample - 128), 0)
319
- );
320
- points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
321
- }
322
- this.#byteTimeDomainCache.set(cacheKey, points);
323
- return points;
324
- } finally {
325
- source.disconnect();
326
- analyser.disconnect();
327
- }
328
- })
329
- );
330
- const frameLength = framesData[0]?.length ?? 0;
331
- const smoothedData = new Uint8Array(frameLength);
332
- for (let i = 0; i < frameLength; i++) {
333
- let weightedSum = 0;
334
- let weightSum = 0;
335
- framesData.forEach((frame, frameIndex) => {
336
- const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
337
- weightedSum += (frame[i] ?? 0) * decayWeight;
338
- weightSum += decayWeight;
339
- });
340
- smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
341
- }
342
- this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
343
- return smoothedData;
344
- }
345
- });
346
- this.#frequencyDataCache = new LRUCache(100);
347
- this.frequencyDataTask = new Task(this, {
348
- autoRun: EF_INTERACTIVE,
349
- args: () => [
350
- this.audioBufferTask.status,
351
- this.currentSourceTimeMs,
352
- this.fftSize,
353
- this.fftDecay,
354
- this.fftGain,
355
- this.shouldInterpolateFrequencies
356
- ],
357
- task: async () => {
358
- await this.audioBufferTask.taskComplete;
359
- if (!this.audioBufferTask.value) return null;
360
- if (this.currentSourceTimeMs <= 0) return null;
361
- const currentTimeMs = this.currentSourceTimeMs;
362
- const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
363
- const audioBuffer = this.audioBufferTask.value.buffer;
364
- const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
365
- const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
366
- if (cachedSmoothedData) {
367
- return cachedSmoothedData;
368
- }
369
- const framesData = await Promise.all(
370
- Array.from({ length: this.fftDecay }, async (_, i) => {
371
- const frameOffset = i * (1e3 / 30);
372
- const startTime = Math.max(
373
- 0,
374
- (currentTimeMs - frameOffset - startOffsetMs) / 1e3
375
- );
376
- const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
377
- const cachedFrame = this.#frequencyDataCache.get(cacheKey);
378
- if (cachedFrame) {
379
- return cachedFrame;
380
- }
381
- const audioContext = new OfflineAudioContext(
382
- 2,
383
- 48e3 * (1 / 30),
384
- 48e3
385
- );
386
- const analyser = audioContext.createAnalyser();
387
- analyser.fftSize = this.fftSize;
388
- analyser.minDecibels = -90;
389
- analyser.maxDecibels = -10;
390
- const gainNode = audioContext.createGain();
391
- gainNode.gain.value = this.fftGain;
392
- const filter = audioContext.createBiquadFilter();
393
- filter.type = "bandpass";
394
- filter.frequency.value = 15e3;
395
- filter.Q.value = 0.05;
396
- const audioBufferSource = audioContext.createBufferSource();
397
- audioBufferSource.buffer = audioBuffer;
398
- audioBufferSource.connect(filter);
399
- filter.connect(gainNode);
400
- gainNode.connect(analyser);
401
- analyser.connect(audioContext.destination);
402
- audioBufferSource.start(0, startTime, 1 / 30);
403
- try {
404
- await audioContext.startRendering();
405
- const frameData = new Uint8Array(this.fftSize / 2);
406
- analyser.getByteFrequencyData(frameData);
407
- this.#frequencyDataCache.set(cacheKey, frameData);
408
- return frameData;
409
- } finally {
410
- audioBufferSource.disconnect();
411
- analyser.disconnect();
412
- }
413
- })
414
- );
415
- const frameLength = framesData[0]?.length ?? 0;
416
- const smoothedData = new Uint8Array(frameLength);
417
- for (let i = 0; i < frameLength; i++) {
418
- let weightedSum = 0;
419
- let weightSum = 0;
420
- framesData.forEach((frame, frameIndex) => {
421
- const decayWeight = _EFMedia2.DECAY_WEIGHT ** frameIndex;
422
- weightedSum += frame[i] * decayWeight;
423
- weightSum += decayWeight;
424
- });
425
- smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
426
- }
427
- smoothedData.forEach((value, i) => {
428
- const freqWeight = this.FREQ_WEIGHTS[i];
429
- smoothedData[i] = Math.min(255, Math.round(value * freqWeight));
430
- });
431
- const slicedData = smoothedData.slice(
432
- 0,
433
- Math.floor(smoothedData.length / 2)
434
- );
435
- const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
436
- this.#frequencyDataCache.set(smoothedKey, processedData);
437
- return processedData;
438
- }
439
- });
440
- }
441
- static {
442
- this.styles = [
443
- css`
45
+ var EFMedia = class EFMedia extends EFTargetable(EFSourceMixin(EFTemporal(FetchMixin(LitElement)), { assetType: "isobmff_files" })) {
46
+ constructor(..._args) {
47
+ super(..._args);
48
+ this.currentTimeMs = 0;
49
+ this._mode = "auto";
50
+ this.prefetchSegments = 3;
51
+ this.cacheSize = 20;
52
+ this.enablePrefetch = true;
53
+ this.jitLoadingState = "idle";
54
+ this.jitErrorMessage = null;
55
+ this.jitCacheStats = null;
56
+ this.jitClientTask = new Task(this, {
57
+ autoRun: EF_INTERACTIVE,
58
+ onError: (error) => {
59
+ console.error("jitClientTask error", error);
60
+ },
61
+ args: () => [
62
+ this.apiHost,
63
+ this.cacheSize,
64
+ this.enablePrefetch,
65
+ this.prefetchSegments
66
+ ],
67
+ task: ([apiHost, cacheSize, enablePrefetch, prefetchSegments]) => {
68
+ const baseUrl = apiHost && apiHost !== "https://editframe.dev" ? apiHost : "http://localhost:3000";
69
+ return new JitTranscodingClient({
70
+ baseUrl,
71
+ segmentCacheSize: cacheSize,
72
+ enableNetworkAdaptation: enablePrefetch,
73
+ enablePrefetch,
74
+ prefetchSegments
75
+ });
76
+ }
77
+ });
78
+ this.jitMetadataLoader = new Task(this, {
79
+ autoRun: EF_INTERACTIVE,
80
+ onError: (error) => {
81
+ console.error("jitMetadataLoader error", error);
82
+ },
83
+ args: () => [this.src, this.jitClientTask.value],
84
+ task: async ([src, _jitClient], { signal: _signal }) => {
85
+ if (this.effectiveMode !== "jit-transcode") return null;
86
+ await this.jitClientTask.taskComplete;
87
+ const jitClient = this.jitClientTask.value;
88
+ if (!src || !jitClient) return null;
89
+ try {
90
+ this.jitLoadingState = "metadata";
91
+ this.jitErrorMessage = null;
92
+ const metadata = await jitClient.loadVideoMetadata(src);
93
+ this.jitLoadingState = "idle";
94
+ return metadata;
95
+ } catch (error) {
96
+ this.jitLoadingState = "error";
97
+ this.jitErrorMessage = error instanceof Error ? error.message : "Failed to load video metadata";
98
+ log("Failed to load JIT metadata:", error);
99
+ return null;
100
+ }
101
+ },
102
+ onComplete: () => {
103
+ if (this.jitLoadingState === "metadata") this.jitLoadingState = "idle";
104
+ this.requestUpdate("intrinsicDurationMs");
105
+ this.requestUpdate("ownCurrentTimeMs");
106
+ this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
107
+ this.rootTimegroup?.requestUpdate("durationMs");
108
+ }
109
+ });
110
+ this.audioBufferTask = new Task(this, {
111
+ autoRun: EF_INTERACTIVE,
112
+ onError: (error) => {
113
+ console.error("audioBufferTask error", error);
114
+ },
115
+ args: () => [this.mediaSegmentsTask.value, this.seekTask.value],
116
+ task: async ([files, segments], { signal: _signal }) => {
117
+ if (!files || !segments) return;
118
+ if (!this.defaultAudioTrackId) return;
119
+ const segment = segments[this.defaultAudioTrackId];
120
+ if (!segment) return;
121
+ const audioFile = files[this.defaultAudioTrackId];
122
+ if (!audioFile) return;
123
+ return {
124
+ buffer: await this.#audioContext.decodeAudioData(await audioFile.arrayBuffer()),
125
+ startOffsetMs: segment.segment.cts / segment.track.timescale * 1e3
126
+ };
127
+ }
128
+ });
129
+ this.byteTimeDomainTask = new Task(this, {
130
+ autoRun: EF_INTERACTIVE,
131
+ onError: (error) => {
132
+ console.error("byteTimeDomainTask error", error);
133
+ },
134
+ args: () => [
135
+ this.audioBufferTask.status,
136
+ this.currentSourceTimeMs,
137
+ this.fftSize,
138
+ this.fftDecay,
139
+ this.fftGain,
140
+ this.shouldInterpolateFrequencies
141
+ ],
142
+ task: async () => {
143
+ await this.audioBufferTask.taskComplete;
144
+ if (!this.audioBufferTask.value) return null;
145
+ if (this.currentSourceTimeMs < 0) return null;
146
+ const currentTimeMs = this.currentSourceTimeMs;
147
+ const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
148
+ const audioBuffer = this.audioBufferTask.value.buffer;
149
+ const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
150
+ const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
151
+ if (cachedData) return cachedData;
152
+ const framesData = await Promise.all(Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
153
+ const frameOffset = frameIndex * (1e3 / 30);
154
+ const startTime = Math.max(0, (currentTimeMs - frameOffset - startOffsetMs) / 1e3);
155
+ const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
156
+ const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
157
+ if (cachedFrame) return cachedFrame;
158
+ let audioContext;
159
+ try {
160
+ audioContext = new OfflineAudioContext(2, 48e3 * (1 / 30), 48e3);
161
+ } catch (error) {
162
+ throw new Error(`[EFMedia.byteTimeDomainTask] Failed to create OfflineAudioContext(2, ${48e3 * (1 / 30)}, 48000) for frame ${frameIndex} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio time domain analysis.`);
163
+ }
164
+ const source = audioContext.createBufferSource();
165
+ source.buffer = audioBuffer;
166
+ const analyser = audioContext.createAnalyser();
167
+ analyser.fftSize = this.fftSize;
168
+ analyser.minDecibels = -90;
169
+ analyser.maxDecibels = -20;
170
+ const gainNode = audioContext.createGain();
171
+ gainNode.gain.value = this.fftGain;
172
+ source.connect(gainNode);
173
+ gainNode.connect(analyser);
174
+ analyser.connect(audioContext.destination);
175
+ source.start(0, startTime, 1 / 30);
176
+ const dataLength = analyser.fftSize / 2;
177
+ try {
178
+ await audioContext.startRendering();
179
+ const frameData = new Uint8Array(dataLength);
180
+ analyser.getByteTimeDomainData(frameData);
181
+ const points = new Uint8Array(dataLength);
182
+ for (let i = 0; i < dataLength; i++) {
183
+ const pointSamples = frameData.slice(i * (frameData.length / dataLength), (i + 1) * (frameData.length / dataLength));
184
+ const rms = Math.sqrt(pointSamples.reduce((sum, sample) => {
185
+ const normalized = (sample - 128) / 128;
186
+ return sum + normalized * normalized;
187
+ }, 0) / pointSamples.length);
188
+ const avgSign = Math.sign(pointSamples.reduce((sum, sample) => sum + (sample - 128), 0));
189
+ points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
190
+ }
191
+ this.#byteTimeDomainCache.set(cacheKey, points);
192
+ return points;
193
+ } finally {
194
+ source.disconnect();
195
+ analyser.disconnect();
196
+ }
197
+ }));
198
+ const frameLength = framesData[0]?.length ?? 0;
199
+ const smoothedData = new Uint8Array(frameLength);
200
+ for (let i = 0; i < frameLength; i++) {
201
+ let weightedSum = 0;
202
+ let weightSum = 0;
203
+ framesData.forEach((frame, frameIndex) => {
204
+ const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
205
+ weightedSum += (frame[i] ?? 0) * decayWeight;
206
+ weightSum += decayWeight;
207
+ });
208
+ smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
209
+ }
210
+ this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
211
+ return smoothedData;
212
+ }
213
+ });
214
+ this.frequencyDataTask = new Task(this, {
215
+ autoRun: EF_INTERACTIVE,
216
+ onError: (error) => {
217
+ console.error("frequencyDataTask error", error);
218
+ },
219
+ args: () => [
220
+ this.audioBufferTask.status,
221
+ this.currentSourceTimeMs,
222
+ this.fftSize,
223
+ this.fftDecay,
224
+ this.fftGain,
225
+ this.shouldInterpolateFrequencies
226
+ ],
227
+ task: async () => {
228
+ await this.audioBufferTask.taskComplete;
229
+ if (!this.audioBufferTask.value) return null;
230
+ if (this.currentSourceTimeMs < 0) return null;
231
+ const currentTimeMs = this.currentSourceTimeMs;
232
+ const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
233
+ const audioBuffer = this.audioBufferTask.value.buffer;
234
+ const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
235
+ const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
236
+ if (cachedSmoothedData) return cachedSmoothedData;
237
+ const framesData = await Promise.all(Array.from({ length: this.fftDecay }, async (_, i) => {
238
+ const frameOffset = i * (1e3 / 30);
239
+ const startTime = Math.max(0, (currentTimeMs - frameOffset - startOffsetMs) / 1e3);
240
+ const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
241
+ const cachedFrame = this.#frequencyDataCache.get(cacheKey);
242
+ if (cachedFrame) return cachedFrame;
243
+ const SIZE = 48e3 / 30;
244
+ let audioContext;
245
+ try {
246
+ audioContext = new OfflineAudioContext(2, SIZE, 48e3);
247
+ } catch (error) {
248
+ throw new Error(`[EFMedia.frequencyDataTask] Failed to create OfflineAudioContext(2, ${SIZE}, 48000) for frame ${i} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio frequency analysis.`);
249
+ }
250
+ const analyser = audioContext.createAnalyser();
251
+ analyser.fftSize = this.fftSize;
252
+ analyser.minDecibels = -90;
253
+ analyser.maxDecibels = -10;
254
+ const gainNode = audioContext.createGain();
255
+ gainNode.gain.value = this.fftGain;
256
+ const filter = audioContext.createBiquadFilter();
257
+ filter.type = "bandpass";
258
+ filter.frequency.value = 15e3;
259
+ filter.Q.value = .05;
260
+ const audioBufferSource = audioContext.createBufferSource();
261
+ audioBufferSource.buffer = audioBuffer;
262
+ audioBufferSource.connect(filter);
263
+ filter.connect(gainNode);
264
+ gainNode.connect(analyser);
265
+ analyser.connect(audioContext.destination);
266
+ audioBufferSource.start(0, startTime, 1 / 30);
267
+ try {
268
+ await audioContext.startRendering();
269
+ const frameData = new Uint8Array(this.fftSize / 2);
270
+ analyser.getByteFrequencyData(frameData);
271
+ this.#frequencyDataCache.set(cacheKey, frameData);
272
+ return frameData;
273
+ } finally {
274
+ audioBufferSource.disconnect();
275
+ analyser.disconnect();
276
+ }
277
+ }));
278
+ const frameLength = framesData[0]?.length ?? 0;
279
+ const smoothedData = new Uint8Array(frameLength);
280
+ for (let i = 0; i < frameLength; i++) {
281
+ let weightedSum = 0;
282
+ let weightSum = 0;
283
+ framesData.forEach((frame, frameIndex) => {
284
+ const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
285
+ weightedSum += (frame[i] ?? 0) * decayWeight;
286
+ weightSum += decayWeight;
287
+ });
288
+ smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
289
+ }
290
+ smoothedData.forEach((value, i) => {
291
+ const freqWeight = this.FREQ_WEIGHTS[i] ?? 0;
292
+ smoothedData[i] = Math.min(255, Math.round(value * freqWeight));
293
+ });
294
+ const slicedData = smoothedData.slice(0, Math.floor(smoothedData.length / 2));
295
+ const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
296
+ this.#frequencyDataCache.set(smoothedKey, processedData);
297
+ return processedData;
298
+ }
299
+ });
300
+ this.videoAssetTask = new Task(this, {
301
+ autoRun: EF_INTERACTIVE,
302
+ onError: (error) => {
303
+ console.error("videoAssetTask error", error);
304
+ },
305
+ args: () => [this.effectiveMode, this.mediaSegmentsTask.value],
306
+ task: async ([mode, files], { signal: _signal }) => {
307
+ if (!files) return;
308
+ const fragmentIndex = this.fragmentIndexTask.value;
309
+ const computedVideoTrackId = Object.values(fragmentIndex ?? {}).find((track) => track.type === "video")?.track;
310
+ if (computedVideoTrackId === void 0) return;
311
+ const videoFile = files[computedVideoTrackId];
312
+ if (!videoFile) return;
313
+ const existingAsset = this.videoAssetTask.value;
314
+ if (existingAsset) {
315
+ for (const frame of existingAsset?.decodedFrames || []) frame.close();
316
+ const maybeDecoder = existingAsset?.videoDecoder;
317
+ if (maybeDecoder?.state !== "closed") maybeDecoder.close();
318
+ }
319
+ if (mode === "jit-transcode") return await VideoAsset.createFromCompleteMP4(`jit-segment-${computedVideoTrackId}`, videoFile);
320
+ return await VideoAsset.createFromReadableStream("video.mp4", videoFile.stream(), videoFile);
321
+ }
322
+ });
323
+ this._desiredSeekTimeMs = -1;
324
+ this.assetIndexLoader = new Task(this, {
325
+ autoRun: EF_INTERACTIVE,
326
+ onError: (error) => {
327
+ console.error("assetIndexLoader error", error);
328
+ },
329
+ args: () => [this.effectiveMode === "asset" ? this.fragmentIndexPath() : null, this.fetch],
330
+ task: async ([path, fetch], { signal }) => {
331
+ if (!path) return null;
332
+ try {
333
+ const response = await fetch(path, { signal });
334
+ return await response.json();
335
+ } catch (error) {
336
+ console.error("Failed to load asset fragment index", error);
337
+ return null;
338
+ }
339
+ },
340
+ onComplete: () => {
341
+ this.requestUpdate("intrinsicDurationMs");
342
+ this.requestUpdate("ownCurrentTimeMs");
343
+ this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
344
+ this.rootTimegroup?.requestUpdate("durationMs");
345
+ }
346
+ });
347
+ this.assetSegmentKeysTask = new Task(this, {
348
+ autoRun: EF_INTERACTIVE,
349
+ onError: (error) => {
350
+ console.error("assetSegmentKeysTask error", error);
351
+ },
352
+ args: () => [this.effectiveMode === "asset" ? this.assetIndexLoader.value : null, this.desiredSeekTimeMs],
353
+ task: async ([fragmentIndex, seekMs]) => {
354
+ if (this.effectiveMode === "asset") {
355
+ await this.assetIndexLoader.taskComplete;
356
+ fragmentIndex = this.assetIndexLoader.value;
357
+ }
358
+ if (!fragmentIndex || seekMs == null) return null;
359
+ return this.calculateAssetSegmentKeys(fragmentIndex, seekMs);
360
+ }
361
+ });
362
+ this.assetInitSegmentsTask = new Task(this, {
363
+ autoRun: EF_INTERACTIVE,
364
+ onError: (error) => {
365
+ console.error("assetInitSegmentsTask error", error);
366
+ },
367
+ args: () => [this.effectiveMode === "asset" ? this.assetIndexLoader.value : null, this.fetch],
368
+ task: async ([fragmentIndex, fetch], { signal }) => {
369
+ if (this.effectiveMode === "asset") {
370
+ await this.assetIndexLoader.taskComplete;
371
+ fragmentIndex = this.assetIndexLoader.value;
372
+ }
373
+ if (!fragmentIndex) return null;
374
+ return await Promise.all(Object.entries(fragmentIndex).map(async ([trackId, track]) => {
375
+ const start = track.initSegment.offset;
376
+ const end = track.initSegment.offset + track.initSegment.size;
377
+ const response = await fetch(this.fragmentTrackPath(trackId), {
378
+ signal,
379
+ headers: { Range: `bytes=${start}-${end - 1}` }
380
+ });
381
+ const buffer = await response.arrayBuffer();
382
+ buffer.fileStart = 0;
383
+ const mp4File = new MP4File();
384
+ mp4File.appendBuffer(buffer, true);
385
+ mp4File.flush();
386
+ await mp4File.readyPromise;
387
+ return {
388
+ trackId,
389
+ buffer,
390
+ mp4File
391
+ };
392
+ }));
393
+ }
394
+ });
395
+ this.assetSegmentLoader = new Task(this, {
396
+ autoRun: EF_INTERACTIVE,
397
+ onError: (error) => {
398
+ console.error("assetSegmentLoader error", error);
399
+ },
400
+ argsEqual: deepArrayEquals,
401
+ args: () => [
402
+ this.assetIndexLoader.value,
403
+ this.assetSegmentKeysTask.value,
404
+ this.assetInitSegmentsTask.value,
405
+ this.fetch
406
+ ],
407
+ task: async ([fragmentIndex, segmentKeys, initSegments, fetch], { signal }) => {
408
+ if (this.effectiveMode === "asset") {
409
+ await this.assetIndexLoader.taskComplete;
410
+ fragmentIndex = this.assetIndexLoader.value;
411
+ await this.assetSegmentKeysTask.taskComplete;
412
+ segmentKeys = this.assetSegmentKeysTask.value;
413
+ await this.assetInitSegmentsTask.taskComplete;
414
+ initSegments = this.assetInitSegmentsTask.value;
415
+ }
416
+ if (!fragmentIndex || !segmentKeys || !initSegments) return null;
417
+ const seekMs = this.desiredSeekTimeMs;
418
+ if (seekMs == null) return null;
419
+ const files = {};
420
+ const seekResult = this.calculateAssetSeekResult(fragmentIndex, initSegments, seekMs);
421
+ if (!seekResult) return null;
422
+ for (const [trackId, { segment, track, nextSegment }] of Object.entries(seekResult)) {
423
+ const start = segment.offset;
424
+ const end = segment.offset + segment.size;
425
+ const response = await fetch(this.fragmentTrackPath(trackId), {
426
+ signal,
427
+ headers: { Range: `bytes=${start}-${end - 1}` }
428
+ });
429
+ if (nextSegment) {
430
+ const nextStart = nextSegment.offset;
431
+ const nextEnd = nextSegment.offset + nextSegment.size;
432
+ fetch(this.fragmentTrackPath(trackId), {
433
+ signal,
434
+ headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` }
435
+ }).catch(() => {});
436
+ }
437
+ const initSegment = initSegments.find((seg) => seg.trackId === String(track.id));
438
+ if (!initSegment) continue;
439
+ const mediaBuffer = await response.arrayBuffer();
440
+ files[trackId] = new File([initSegment.buffer, mediaBuffer], "video.mp4", { type: "video/mp4" });
441
+ }
442
+ return files;
443
+ }
444
+ });
445
+ this.jitSegmentKeysTask = new Task(this, {
446
+ autoRun: EF_INTERACTIVE,
447
+ onError: (error) => {
448
+ console.error("jitSegmentKeysTask error", error);
449
+ },
450
+ args: () => [this.effectiveMode === "jit-transcode" ? this.jitMetadataLoader.value : null, this.desiredSeekTimeMs],
451
+ task: ([metadata, seekMs]) => {
452
+ if (!metadata || seekMs == null) return null;
453
+ return this.calculateJitSegmentKeys(metadata, seekMs);
454
+ }
455
+ });
456
+ this.jitSegmentLoader = new Task(this, {
457
+ autoRun: EF_INTERACTIVE,
458
+ onError: (error) => {
459
+ console.error("jitSegmentLoader error", error);
460
+ },
461
+ argsEqual: deepArrayEquals,
462
+ args: () => [
463
+ this.src,
464
+ this.jitSegmentKeysTask.value,
465
+ this.jitMetadataLoader.value
466
+ ],
467
+ task: async ([src, segmentKeys, metadata], { signal: _signal }) => {
468
+ await this.jitSegmentKeysTask.taskComplete;
469
+ await this.jitMetadataLoader.taskComplete;
470
+ if (!src || !segmentKeys || !metadata || !this.jitClientTask.value) return null;
471
+ const seekMs = this.desiredSeekTimeMs;
472
+ if (seekMs == null) return null;
473
+ try {
474
+ this.jitLoadingState = "segments";
475
+ this.jitErrorMessage = null;
476
+ const files = {};
477
+ const quality = await this.jitClientTask.value.getAdaptiveQuality();
478
+ const fragmentIndex = this.synthesizeFragmentIndex(metadata);
479
+ const seekResult = this.calculateJitSeekResult(fragmentIndex, seekMs);
480
+ for (const [trackId, { segment, track, nextSegment }] of Object.entries(seekResult)) {
481
+ const startTimeMs = segment.dts / track.timescale * 1e3;
482
+ const segmentBuffer = await this.jitClientTask.value.fetchSegment(src, startTimeMs, quality);
483
+ files[trackId] = new File([segmentBuffer], "segment.mp4", { type: "video/mp4" });
484
+ if (nextSegment && this.enablePrefetch) {
485
+ const nextStartTimeMs = nextSegment.dts / track.timescale * 1e3;
486
+ this.jitClientTask.value.fetchSegment(src, nextStartTimeMs, quality).catch(() => {});
487
+ }
488
+ }
489
+ this.jitCacheStats = this.jitClientTask.value.getCacheStats();
490
+ this.jitLoadingState = "idle";
491
+ return files;
492
+ } catch (error) {
493
+ this.jitLoadingState = "error";
494
+ this.jitErrorMessage = error instanceof Error ? error.message : "Failed to load video segments";
495
+ throw error;
496
+ }
497
+ }
498
+ });
499
+ this.fragmentIndexTask = new Task(this, {
500
+ autoRun: EF_INTERACTIVE,
501
+ onError: (error) => {
502
+ console.error("fragmentIndexTask error", error);
503
+ },
504
+ args: () => [this.assetIndexLoader.value, this.jitMetadataLoader.value],
505
+ task: async ([assetIndex, jitMetadata]) => {
506
+ await this.assetIndexLoader.taskComplete;
507
+ await this.jitMetadataLoader.taskComplete;
508
+ if (assetIndex) return assetIndex;
509
+ if (jitMetadata) return this.synthesizeFragmentIndex(jitMetadata);
510
+ return null;
511
+ }
512
+ });
513
+ this.mediaSegmentsTask = new Task(this, {
514
+ autoRun: EF_INTERACTIVE,
515
+ onError: (error) => {
516
+ console.error("mediaSegmentsTask error", error);
517
+ },
518
+ args: () => [this.assetSegmentLoader.value, this.jitSegmentLoader.value],
519
+ task: async ([_assetFiles, _jitFiles], { signal }) => {
520
+ log("🔍 SIGNAL: mediaSegmentsTask starting", { signalAborted: signal.aborted });
521
+ await this.assetSegmentLoader.taskComplete;
522
+ if (signal.aborted) {
523
+ log("🔍 SIGNAL: mediaSegmentsTask aborted after assetSegmentLoader.taskComplete");
524
+ return null;
525
+ }
526
+ await this.jitSegmentLoader.taskComplete;
527
+ if (signal.aborted) {
528
+ log("🔍 SIGNAL: mediaSegmentsTask aborted after jitSegmentLoader.taskComplete");
529
+ return null;
530
+ }
531
+ const assetFiles = this.assetSegmentLoader.value;
532
+ const jitFiles = this.jitSegmentLoader.value;
533
+ log("🔍 SIGNAL: mediaSegmentsTask using fresh values", {
534
+ hasAssetFiles: !!assetFiles,
535
+ hasJitFiles: !!jitFiles,
536
+ signalAborted: signal.aborted
537
+ });
538
+ const result = assetFiles || jitFiles || null;
539
+ log("🔍 SIGNAL: mediaSegmentsTask resolved", {
540
+ hasResult: !!result,
541
+ signalAborted: signal.aborted
542
+ });
543
+ return result;
544
+ }
545
+ });
546
+ this.seekTask = new Task(this, {
547
+ autoRun: EF_INTERACTIVE,
548
+ onError: (error) => {
549
+ console.error("seekTask error", error);
550
+ },
551
+ args: () => [
552
+ this.fragmentIndexTask.value,
553
+ this.mediaSegmentsTask.value,
554
+ this.desiredSeekTimeMs
555
+ ],
556
+ task: async ([_fragmentIndex, _files, seekMs], { signal }) => {
557
+ log("🔍 SIGNAL: seekTask starting", {
558
+ seekMs,
559
+ signalAborted: signal.aborted
560
+ });
561
+ await this.fragmentIndexTask.taskComplete;
562
+ if (signal.aborted) {
563
+ log("🔍 SIGNAL: seekTask aborted after fragmentIndexTask.taskComplete");
564
+ return null;
565
+ }
566
+ await this.mediaSegmentsTask.taskComplete;
567
+ if (signal.aborted) {
568
+ log("🔍 SIGNAL: seekTask aborted after mediaSegmentsTask.taskComplete");
569
+ return null;
570
+ }
571
+ const fragmentIndex = this.fragmentIndexTask.value;
572
+ const files = this.mediaSegmentsTask.value;
573
+ log("🔍 SIGNAL: seekTask using fresh values", {
574
+ hasFragmentIndex: !!fragmentIndex,
575
+ hasFiles: !!files,
576
+ seekMs,
577
+ signalAborted: signal.aborted
578
+ });
579
+ const typedFragmentIndex = fragmentIndex;
580
+ if (!typedFragmentIndex || !files) {
581
+ log("🔍 SIGNAL: seekTask calculation aborted - missing required data");
582
+ return null;
583
+ }
584
+ const result = {};
585
+ for (const index of Object.values(typedFragmentIndex)) {
586
+ const track = this.createTrackInfo(index);
587
+ log("trace: processing track", {
588
+ trackId: index.track,
589
+ type: index.type
590
+ });
591
+ const segment = index.segments.toReversed().find((segment$1) => {
592
+ const segmentStartMs = segment$1.dts / track.timescale * 1e3;
593
+ return segmentStartMs <= seekMs;
594
+ });
595
+ const nextSegment = index.segments.find((segment$1) => {
596
+ const segmentStartMs = segment$1.dts / track.timescale * 1e3;
597
+ return segmentStartMs > seekMs;
598
+ });
599
+ if (segment) {
600
+ result[index.track] = {
601
+ segment,
602
+ track,
603
+ nextSegment
604
+ };
605
+ log("trace: found segment for track", {
606
+ trackId: index.track,
607
+ segmentDts: segment.dts,
608
+ hasNextSegment: !!nextSegment
609
+ });
610
+ }
611
+ }
612
+ log("🔍 SIGNAL: seekTask calculation complete", {
613
+ trackCount: Object.keys(result).length,
614
+ signalAborted: signal.aborted
615
+ });
616
+ return result;
617
+ }
618
+ });
619
+ }
620
+ static {
621
+ this.styles = [css`
444
622
  :host {
445
623
  display: block;
446
624
  position: relative;
447
625
  overflow: hidden;
448
626
  }
449
- `
450
- ];
451
- }
452
- #assetId;
453
- set assetId(value) {
454
- this.#assetId = value;
455
- }
456
- get assetId() {
457
- return this.#assetId || this.getAttribute("asset-id");
458
- }
459
- fragmentIndexPath() {
460
- if (this.assetId) {
461
- return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
462
- }
463
- return `/@ef-track-fragment-index/${this.src ?? ""}`;
464
- }
465
- fragmentTrackPath(trackId) {
466
- if (this.assetId) {
467
- return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
468
- }
469
- return `/@ef-track/${this.src ?? ""}?trackId=${trackId}`;
470
- }
471
- get defaultVideoTrackId() {
472
- return Object.values(this.trackFragmentIndexLoader.value ?? {}).find(
473
- (track) => track.type === "video"
474
- )?.track;
475
- }
476
- get defaultAudioTrackId() {
477
- return Object.values(this.trackFragmentIndexLoader.value ?? {}).find(
478
- (track) => track.type === "audio"
479
- )?.track;
480
- }
481
- async executeSeek(seekToMs) {
482
- this.desiredSeekTimeMs = seekToMs;
483
- }
484
- updated(changedProperties) {
485
- if (changedProperties.has("ownCurrentTimeMs")) {
486
- this.executeSeek(this.currentSourceTimeMs);
487
- }
488
- if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) {
489
- updateAnimations(this);
490
- }
491
- }
492
- get hasOwnDuration() {
493
- return true;
494
- }
495
- get intrinsicDurationMs() {
496
- if (!this.trackFragmentIndexLoader.value) {
497
- return 0;
498
- }
499
- const durations = Object.values(this.trackFragmentIndexLoader.value).map(
500
- (track) => {
501
- return track.duration / track.timescale * 1e3;
502
- }
503
- );
504
- if (durations.length === 0) {
505
- return 0;
506
- }
507
- return Math.max(...durations);
508
- }
509
- #audioContext;
510
- async fetchAudioSpanningTime(fromMs, toMs) {
511
- if (this.sourceInMs) {
512
- fromMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceInMs ?? 0);
513
- }
514
- if (this.sourceOutMs) {
515
- toMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceOutMs ?? 0);
516
- }
517
- fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
518
- toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
519
- await this.trackFragmentIndexLoader.taskComplete;
520
- const audioTrackId = this.defaultAudioTrackId;
521
- if (!audioTrackId) {
522
- log("No audio track found");
523
- return;
524
- }
525
- const audioTrackIndex = this.trackFragmentIndexLoader.value?.[audioTrackId];
526
- if (!audioTrackIndex) {
527
- log("No audio track found");
528
- return;
529
- }
530
- const start = audioTrackIndex.initSegment.offset;
531
- const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
532
- const audioInitFragmentRequest = this.fetch(
533
- this.fragmentTrackPath(String(audioTrackId)),
534
- {
535
- headers: { Range: `bytes=${start}-${end - 1}` }
536
- }
537
- );
538
- const fragments = Object.values(audioTrackIndex.segments).filter(
539
- (segment) => {
540
- const segmentStartsBeforeEnd = segment.dts <= toMs * audioTrackIndex.timescale / 1e3;
541
- const segmentEndsAfterStart = segment.dts + segment.duration >= fromMs * audioTrackIndex.timescale / 1e3;
542
- return segmentStartsBeforeEnd && segmentEndsAfterStart;
543
- }
544
- );
545
- const firstFragment = fragments[0];
546
- if (!firstFragment) {
547
- log("No audio fragments found");
548
- return;
549
- }
550
- const lastFragment = fragments[fragments.length - 1];
551
- if (!lastFragment) {
552
- log("No audio fragments found");
553
- return;
554
- }
555
- const fragmentStart = firstFragment.offset;
556
- const fragmentEnd = lastFragment.offset + lastFragment.size;
557
- const audioFragmentRequest = this.fetch(
558
- this.fragmentTrackPath(String(audioTrackId)),
559
- {
560
- headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` }
561
- }
562
- );
563
- const initResponse = await audioInitFragmentRequest;
564
- const dataResponse = await audioFragmentRequest;
565
- const initBuffer = await initResponse.arrayBuffer();
566
- const dataBuffer = await dataResponse.arrayBuffer();
567
- const audioBlob = new Blob([initBuffer, dataBuffer], {
568
- type: "audio/mp4"
569
- });
570
- return {
571
- blob: audioBlob,
572
- startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - (this.trimStartMs ?? 0),
573
- endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - (this.trimEndMs ?? 0)
574
- };
575
- }
576
- set fftSize(value) {
577
- const oldValue = this.fftSize;
578
- this.setAttribute("fft-size", String(value));
579
- this.requestUpdate("fft-size", oldValue);
580
- }
581
- set fftDecay(value) {
582
- const oldValue = this.fftDecay;
583
- this.setAttribute("fft-decay", String(value));
584
- this.requestUpdate("fft-decay", oldValue);
585
- }
586
- get fftSize() {
587
- return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
588
- }
589
- get fftDecay() {
590
- return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
591
- }
592
- set interpolateFrequencies(value) {
593
- const oldValue = this.interpolateFrequencies;
594
- this.setAttribute("interpolate-frequencies", String(value));
595
- this.requestUpdate("interpolate-frequencies", oldValue);
596
- }
597
- get interpolateFrequencies() {
598
- return this.getAttribute("interpolate-frequencies") !== "false";
599
- }
600
- get shouldInterpolateFrequencies() {
601
- if (this.hasAttribute("interpolate-frequencies")) {
602
- return this.getAttribute("interpolate-frequencies") !== "false";
603
- }
604
- return false;
605
- }
606
- static {
607
- this.DECAY_WEIGHT = 0.7;
608
- }
609
- // Update FREQ_WEIGHTS to use the instance fftSize instead of a static value
610
- get FREQ_WEIGHTS() {
611
- if (freqWeightsCache.has(this.fftSize)) {
612
- return freqWeightsCache.get(this.fftSize);
613
- }
614
- const weights = new Float32Array(this.fftSize / 2).map((_, i) => {
615
- const frequency = i * 48e3 / this.fftSize;
616
- if (frequency < 60) return 0.3;
617
- if (frequency < 250) return 0.4;
618
- if (frequency < 500) return 0.6;
619
- if (frequency < 2e3) return 0.8;
620
- if (frequency < 4e3) return 1.2;
621
- if (frequency < 8e3) return 1.6;
622
- return 2;
623
- });
624
- freqWeightsCache.set(this.fftSize, weights);
625
- return weights;
626
- }
627
- #byteTimeDomainCache;
628
- #frequencyDataCache;
629
- set fftGain(value) {
630
- const oldValue = this.fftGain;
631
- this.setAttribute("fft-gain", String(value));
632
- this.requestUpdate("fft-gain", oldValue);
633
- }
634
- get fftGain() {
635
- return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
636
- }
627
+ `];
628
+ }
629
+ /**
630
+ * Get the mode, prioritizing attribute values over property values
631
+ */
632
+ get mode() {
633
+ const attr = this.getAttribute("mode");
634
+ return attr || this._mode || "auto";
635
+ }
636
+ set mode(value) {
637
+ const oldValue = this.mode;
638
+ this._mode = value;
639
+ this.setAttribute("mode", value);
640
+ this.requestUpdate("mode", oldValue);
641
+ }
642
+ connectedCallback() {
643
+ super.connectedCallback();
644
+ const modeAttr = this.getAttribute("mode");
645
+ if (modeAttr && modeAttr !== this._mode) {
646
+ this._mode = modeAttr;
647
+ this.requestUpdate("mode");
648
+ }
649
+ const prefetchSegmentsAttr = this.getAttribute("prefetch-segments");
650
+ if (prefetchSegmentsAttr !== null) this.prefetchSegments = Number.parseInt(prefetchSegmentsAttr, 10) || 3;
651
+ const cacheSizeAttr = this.getAttribute("cache-size");
652
+ if (cacheSizeAttr !== null) this.cacheSize = Number.parseInt(cacheSizeAttr, 10) || 20;
653
+ const enablePrefetchAttr = this.getAttribute("enable-prefetch");
654
+ if (enablePrefetchAttr !== null) this.enablePrefetch = enablePrefetchAttr === "true";
655
+ }
656
+ /**
657
+ * Detected loading mode based on URL patterns and manual override
658
+ */
659
+ get effectiveMode() {
660
+ const actualMode = this.mode;
661
+ if (actualMode === "asset" || actualMode === "jit-transcode") return actualMode;
662
+ if (this.assetId) return "asset";
663
+ if (!this.src) return "asset";
664
+ if (JitTranscodingClient.isJitTranscodeEligible(this.src)) return "jit-transcode";
665
+ return "asset";
666
+ }
667
+ #assetId = null;
668
+ /**
669
+ * The unique identifier for the media asset.
670
+ * This property can be set programmatically or via the "asset-id" attribute.
671
+ * @domAttribute "asset-id"
672
+ */
673
+ set assetId(value) {
674
+ this.#assetId = value;
675
+ }
676
+ get assetId() {
677
+ return this.#assetId || this.getAttribute("asset-id");
678
+ }
679
+ fragmentIndexPath() {
680
+ if (this.assetId) return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
681
+ const src = this.src ?? "";
682
+ if (!src) return "/@ef-track-fragment-index/no-src-available";
683
+ const normalizedSrc = src.startsWith("/") ? src.slice(1) : src;
684
+ if (normalizedSrc.startsWith("@ef-")) return `/@ef-track-fragment-index/${normalizedSrc}`;
685
+ return `/@ef-track-fragment-index/${normalizedSrc}`;
686
+ }
687
+ fragmentTrackPath(trackId) {
688
+ if (this.assetId) return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
689
+ const src = this.src ?? "";
690
+ if (!src) return `/@ef-track/no-src-available?trackId=${trackId}`;
691
+ const normalizedSrc = src.startsWith("/") ? src.slice(1) : src;
692
+ if (normalizedSrc.startsWith("@ef-")) return `/@ef-track/${normalizedSrc}?trackId=${trackId}`;
693
+ return `/@ef-track/${normalizedSrc}?trackId=${trackId}`;
694
+ }
695
+ get mediaDurationTask() {
696
+ return this.fragmentIndexTask;
697
+ }
698
+ get defaultVideoTrackId() {
699
+ const fragmentIndex = this.fragmentIndexTask.value;
700
+ return Object.values(fragmentIndex ?? {}).find((track) => track.type === "video")?.track;
701
+ }
702
+ get defaultAudioTrackId() {
703
+ const fragmentIndex = this.fragmentIndexTask.value;
704
+ return Object.values(fragmentIndex ?? {}).find((track) => track.type === "audio")?.track;
705
+ }
706
+ get intrinsicDurationMs() {
707
+ const fragmentIndex = this.fragmentIndexTask.value;
708
+ if (!fragmentIndex) return 0;
709
+ const durations = Object.values(fragmentIndex).map((track) => track.duration / track.timescale * 1e3);
710
+ if (durations.length === 0) return 0;
711
+ return Math.max(...durations);
712
+ }
713
+ #audioContext = (() => {
714
+ try {
715
+ return new OfflineAudioContext(2, 48e3 / 30, 48e3);
716
+ } catch (error) {
717
+ throw new Error(`[EFMedia.audioBufferTask] Failed to create OfflineAudioContext(2, ${48e3 / 30}, 48000): ${error instanceof Error ? error.message : String(error)}. This is the class field audioContext for audio buffer task processing.`);
718
+ }
719
+ })();
720
+ async fetchAudioSpanningTime(fromMs, toMs) {
721
+ toMs = Math.min(toMs, this.durationMs);
722
+ if (this.sourceInMs) fromMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceInMs ?? 0);
723
+ if (this.sourceOutMs) toMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceOutMs ?? 0);
724
+ fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
725
+ toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
726
+ await this.fragmentIndexTask.taskComplete;
727
+ const fragmentIndex = this.fragmentIndexTask.value;
728
+ const audioTrackId = this.defaultAudioTrackId;
729
+ if (!audioTrackId) return void 0;
730
+ const audioTrackIndex = fragmentIndex?.[audioTrackId];
731
+ if (!audioTrackIndex) return void 0;
732
+ if (this.effectiveMode === "jit-transcode" && this.src) {
733
+ const jitClient = this.jitClientTask.value;
734
+ if (!jitClient) return void 0;
735
+ try {
736
+ const segmentDuration = 2e3;
737
+ const startSegmentIndex = Math.floor(fromMs / segmentDuration);
738
+ const maxSegmentIndex = Math.floor(this.durationMs / segmentDuration) - 1;
739
+ const endSegmentIndex = Math.min(Math.floor(toMs / segmentDuration), maxSegmentIndex);
740
+ const quality = await jitClient.getAdaptiveQuality();
741
+ const segmentPromises = [];
742
+ for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
743
+ const segmentStartMs = i * segmentDuration;
744
+ const segmentEndMs = (i + 1) * segmentDuration;
745
+ segmentPromises.push(jitClient.fetchSegment(this.src, segmentStartMs, quality).then((buffer) => ({
746
+ buffer,
747
+ startMs: segmentStartMs,
748
+ endMs: segmentEndMs
749
+ })));
750
+ }
751
+ const segments = await Promise.all(segmentPromises);
752
+ const audioBuffers = [];
753
+ for (const segment of segments) try {
754
+ let tempContext;
755
+ try {
756
+ tempContext = new OfflineAudioContext(2, 48e3, 48e3);
757
+ } catch (error) {
758
+ throw new Error(`[EFMedia.fetchAudioSpanningTime JIT] Failed to create temp OfflineAudioContext(2, 48000, 48000) for segment ${segment.startMs}-${segment.endMs}ms: ${error instanceof Error ? error.message : String(error)}. This is for decoding audio from JIT video segments.`);
759
+ }
760
+ const clonedBuffer = segment.buffer.slice(0);
761
+ const audioBuffer = await tempContext.decodeAudioData(clonedBuffer);
762
+ audioBuffers.push({
763
+ buffer: audioBuffer,
764
+ startMs: segment.startMs,
765
+ endMs: segment.endMs
766
+ });
767
+ } catch (error) {
768
+ log(`Failed to decode audio from segment ${segment.startMs}-${segment.endMs}ms:`, error);
769
+ throw error;
770
+ }
771
+ if (audioBuffers.length === 0) return void 0;
772
+ const firstAudioBuffer = audioBuffers[0];
773
+ const lastAudioBuffer = audioBuffers[audioBuffers.length - 1];
774
+ if (!firstAudioBuffer || !lastAudioBuffer) return void 0;
775
+ const sampleRate = firstAudioBuffer.buffer.sampleRate;
776
+ const numberOfChannels = firstAudioBuffer.buffer.numberOfChannels;
777
+ const actualStartMs = Math.max(fromMs, firstAudioBuffer.startMs);
778
+ const actualEndMs = Math.min(toMs, lastAudioBuffer.endMs);
779
+ const totalDurationMs = actualEndMs - actualStartMs;
780
+ const totalSamples = Math.floor(totalDurationMs / 1e3 * sampleRate);
781
+ if (totalSamples <= 0) return void 0;
782
+ let finalContext;
783
+ try {
784
+ finalContext = new OfflineAudioContext(numberOfChannels, totalSamples, sampleRate);
785
+ } catch (error) {
786
+ throw new Error(`[EFMedia.fetchAudioSpanningTime final] Failed to create final OfflineAudioContext(${numberOfChannels}, ${totalSamples}, ${sampleRate}) for time range ${actualStartMs}-${actualEndMs}ms: ${error instanceof Error ? error.message : String(error)}. This is for creating the final concatenated audio buffer.`);
787
+ }
788
+ const finalBuffer = finalContext.createBuffer(numberOfChannels, totalSamples, sampleRate);
789
+ let outputOffset = 0;
790
+ for (const { buffer: audioBuffer, startMs: segmentStartMs, endMs: segmentEndMs } of audioBuffers) {
791
+ const segmentNeedStart = Math.max(actualStartMs, segmentStartMs);
792
+ const segmentNeedEnd = Math.min(actualEndMs, segmentEndMs);
793
+ if (segmentNeedStart >= segmentNeedEnd) continue;
794
+ const segmentStartSample = Math.floor((segmentNeedStart - segmentStartMs) / 1e3 * sampleRate);
795
+ const segmentDurationSamples = Math.floor((segmentNeedEnd - segmentNeedStart) / 1e3 * sampleRate);
796
+ const actualSamples = Math.min(segmentDurationSamples, audioBuffer.length - segmentStartSample, totalSamples - outputOffset);
797
+ if (actualSamples <= 0) continue;
798
+ for (let channel = 0; channel < numberOfChannels; channel++) {
799
+ const sourceData = audioBuffer.getChannelData(channel);
800
+ const targetData = finalBuffer.getChannelData(channel);
801
+ for (let i = 0; i < actualSamples; i++) {
802
+ const sourceIndex = segmentStartSample + i;
803
+ const targetIndex = outputOffset + i;
804
+ if (sourceIndex < sourceData.length && targetIndex < targetData.length) {
805
+ const sample = sourceData[sourceIndex];
806
+ if (sample !== void 0) targetData[targetIndex] = sample;
807
+ }
808
+ }
809
+ }
810
+ outputOffset += actualSamples;
811
+ }
812
+ const wavBlob = this.encodeWAVBuffer(finalBuffer);
813
+ const result = {
814
+ blob: wavBlob,
815
+ startMs: actualStartMs - (this.trimStartMs ?? 0),
816
+ endMs: actualEndMs - (this.trimEndMs ?? 0)
817
+ };
818
+ return result;
819
+ } catch (error) {
820
+ log("Failed to extract and concatenate audio from JIT video segments:", error);
821
+ return void 0;
822
+ }
823
+ }
824
+ const start = audioTrackIndex.initSegment.offset;
825
+ const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
826
+ const audioInitFragmentRequest = this.fetch(this.fragmentTrackPath(String(audioTrackId)), { headers: { Range: `bytes=${start}-${end - 1}` } });
827
+ const fragments = Object.values(audioTrackIndex.segments).filter((segment) => {
828
+ const segmentStartsBeforeEnd = segment.dts <= toMs * audioTrackIndex.timescale / 1e3;
829
+ const segmentEndsAfterStart = segment.dts + segment.duration >= fromMs * audioTrackIndex.timescale / 1e3;
830
+ return segmentStartsBeforeEnd && segmentEndsAfterStart;
831
+ });
832
+ const firstFragment = fragments[0];
833
+ if (!firstFragment) return void 0;
834
+ const lastFragment = fragments[fragments.length - 1];
835
+ if (!lastFragment) return void 0;
836
+ const fragmentStart = firstFragment.offset;
837
+ const fragmentEnd = lastFragment.offset + lastFragment.size;
838
+ const audioFragmentRequest = this.fetch(this.fragmentTrackPath(String(audioTrackId)), { headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` } });
839
+ const initResponse = await audioInitFragmentRequest;
840
+ const dataResponse = await audioFragmentRequest;
841
+ const initBuffer = await initResponse.arrayBuffer();
842
+ const dataBuffer = await dataResponse.arrayBuffer();
843
+ const audioBlob = new Blob([initBuffer, dataBuffer], { type: "audio/mp4" });
844
+ return {
845
+ blob: audioBlob,
846
+ startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - (this.trimStartMs ?? 0),
847
+ endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - (this.trimEndMs ?? 0)
848
+ };
849
+ }
850
+ /**
851
+ * Encode an AudioBuffer to a WAV blob
852
+ */
853
+ encodeWAVBuffer(audioBuffer) {
854
+ const numberOfChannels = audioBuffer.numberOfChannels;
855
+ const sampleRate = audioBuffer.sampleRate;
856
+ const length = audioBuffer.length;
857
+ const bytesPerSample = 2;
858
+ const blockAlign = numberOfChannels * bytesPerSample;
859
+ const byteRate = sampleRate * blockAlign;
860
+ const dataSize = length * blockAlign;
861
+ const fileSize = 36 + dataSize;
862
+ const buffer = new ArrayBuffer(44 + dataSize);
863
+ const view = new DataView(buffer);
864
+ let offset = 0;
865
+ view.setUint32(offset, 1380533830, false);
866
+ offset += 4;
867
+ view.setUint32(offset, fileSize, true);
868
+ offset += 4;
869
+ view.setUint32(offset, 1463899717, false);
870
+ offset += 4;
871
+ view.setUint32(offset, 1718449184, false);
872
+ offset += 4;
873
+ view.setUint32(offset, 16, true);
874
+ offset += 4;
875
+ view.setUint16(offset, 1, true);
876
+ offset += 2;
877
+ view.setUint16(offset, numberOfChannels, true);
878
+ offset += 2;
879
+ view.setUint32(offset, sampleRate, true);
880
+ offset += 4;
881
+ view.setUint32(offset, byteRate, true);
882
+ offset += 4;
883
+ view.setUint16(offset, blockAlign, true);
884
+ offset += 2;
885
+ view.setUint16(offset, 16, true);
886
+ offset += 2;
887
+ view.setUint32(offset, 1684108385, false);
888
+ offset += 4;
889
+ view.setUint32(offset, dataSize, true);
890
+ offset += 4;
891
+ for (let i = 0; i < length; i++) for (let channel = 0; channel < numberOfChannels; channel++) {
892
+ const sample = audioBuffer.getChannelData(channel)[i] || 0;
893
+ const pcmSample = Math.max(-32768, Math.min(32767, Math.floor(sample * 32767)));
894
+ view.setInt16(offset, pcmSample, true);
895
+ offset += 2;
896
+ }
897
+ return new Blob([buffer], { type: "audio/wav" });
898
+ }
899
+ set fftSize(value) {
900
+ const oldValue = this.fftSize;
901
+ this.setAttribute("fft-size", String(value));
902
+ this.requestUpdate("fft-size", oldValue);
903
+ }
904
+ set fftDecay(value) {
905
+ const oldValue = this.fftDecay;
906
+ this.setAttribute("fft-decay", String(value));
907
+ this.requestUpdate("fft-decay", oldValue);
908
+ }
909
+ get fftSize() {
910
+ return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
911
+ }
912
+ get fftDecay() {
913
+ return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
914
+ }
915
+ set interpolateFrequencies(value) {
916
+ const oldValue = this.interpolateFrequencies;
917
+ this.setAttribute("interpolate-frequencies", String(value));
918
+ this.requestUpdate("interpolate-frequencies", oldValue);
919
+ }
920
+ get interpolateFrequencies() {
921
+ return this.getAttribute("interpolate-frequencies") !== "false";
922
+ }
923
+ get shouldInterpolateFrequencies() {
924
+ if (this.hasAttribute("interpolate-frequencies")) return this.getAttribute("interpolate-frequencies") !== "false";
925
+ return false;
926
+ }
927
+ static {
928
+ this.DECAY_WEIGHT = .7;
929
+ }
930
+ get FREQ_WEIGHTS() {
931
+ if (freqWeightsCache.has(this.fftSize)) return freqWeightsCache.get(this.fftSize);
932
+ const weights = new Float32Array(this.fftSize / 2).map((_, i) => {
933
+ const frequency = i * 48e3 / this.fftSize;
934
+ if (frequency < 60) return .3;
935
+ if (frequency < 250) return .4;
936
+ if (frequency < 500) return .6;
937
+ if (frequency < 2e3) return .8;
938
+ if (frequency < 4e3) return 1.2;
939
+ if (frequency < 8e3) return 1.6;
940
+ return 2;
941
+ });
942
+ freqWeightsCache.set(this.fftSize, weights);
943
+ return weights;
944
+ }
945
+ #byteTimeDomainCache = new LRUCache(100);
946
+ #frequencyDataCache = new LRUCache(100);
947
+ set fftGain(value) {
948
+ const oldValue = this.fftGain;
949
+ this.setAttribute("fft-gain", String(value));
950
+ this.requestUpdate("fft-gain", oldValue);
951
+ }
952
+ get fftGain() {
953
+ return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
954
+ }
955
+ synthesizeFragmentIndex(jitMetadata) {
956
+ const segmentDuration = jitMetadata.segmentDuration || 2e3;
957
+ const numSegments = Math.ceil(jitMetadata.durationMs / segmentDuration);
958
+ const fragmentIndex = {};
959
+ const videoStream = jitMetadata.streams.find((s) => s.type === "video");
960
+ if (videoStream) {
961
+ const segments = [];
962
+ for (let i = 0; i < numSegments; i++) {
963
+ const startMs = i * segmentDuration;
964
+ const endMs = Math.min(startMs + segmentDuration, jitMetadata.durationMs);
965
+ segments.push({
966
+ dts: Math.floor(startMs * 90),
967
+ cts: Math.floor(startMs * 90),
968
+ duration: Math.floor((endMs - startMs) * 90),
969
+ offset: 0,
970
+ size: 0
971
+ });
972
+ }
973
+ fragmentIndex[videoStream.index] = {
974
+ track: videoStream.index,
975
+ type: "video",
976
+ timescale: 9e4,
977
+ duration: Math.floor(jitMetadata.durationMs * 90),
978
+ width: videoStream.width || 1920,
979
+ height: videoStream.height || 1080,
980
+ sample_count: numSegments * 50,
981
+ codec: videoStream.codecName || "h264",
982
+ segments,
983
+ initSegment: {
984
+ offset: 0,
985
+ size: 0
986
+ }
987
+ };
988
+ }
989
+ const audioStream = jitMetadata.streams.find((s) => s.type === "audio");
990
+ if (audioStream) {
991
+ const segments = [];
992
+ const audioTimescale = audioStream.sampleRate || 48e3;
993
+ for (let i = 0; i < numSegments; i++) {
994
+ const startMs = i * segmentDuration;
995
+ const endMs = Math.min(startMs + segmentDuration, jitMetadata.durationMs);
996
+ segments.push({
997
+ dts: Math.floor(startMs * audioTimescale / 1e3),
998
+ cts: Math.floor(startMs * audioTimescale / 1e3),
999
+ duration: Math.floor((endMs - startMs) * audioTimescale / 1e3),
1000
+ offset: 0,
1001
+ size: 0
1002
+ });
1003
+ }
1004
+ fragmentIndex[audioStream.index] = {
1005
+ track: audioStream.index,
1006
+ type: "audio",
1007
+ timescale: audioTimescale,
1008
+ duration: Math.floor(jitMetadata.durationMs * audioTimescale / 1e3),
1009
+ channel_count: audioStream.channels || 2,
1010
+ sample_rate: audioStream.sampleRate || 48e3,
1011
+ sample_size: 16,
1012
+ sample_count: Math.floor(jitMetadata.durationMs * (audioStream.sampleRate || 48e3) / 1e3),
1013
+ codec: audioStream.codecName || "aac",
1014
+ segments,
1015
+ initSegment: {
1016
+ offset: 0,
1017
+ size: 0
1018
+ }
1019
+ };
1020
+ }
1021
+ return fragmentIndex;
1022
+ }
1023
+ calculateAssetSegmentKeys(fragmentIndex, seekMs) {
1024
+ const segmentKeys = {};
1025
+ for (const [trackId, index] of Object.entries(fragmentIndex)) {
1026
+ const segment = index.segments.toReversed().find((segment$1) => {
1027
+ const segmentStartMs = segment$1.dts / index.timescale * 1e3;
1028
+ return segmentStartMs <= seekMs;
1029
+ });
1030
+ if (segment) {
1031
+ const startTimeMs = segment.dts / index.timescale * 1e3;
1032
+ segmentKeys[trackId] = {
1033
+ startTimeMs,
1034
+ trackId
1035
+ };
1036
+ }
1037
+ }
1038
+ return segmentKeys;
1039
+ }
1040
+ calculateJitSegmentKeys(metadata, seekMs) {
1041
+ const segmentKeys = {};
1042
+ const segmentDuration = metadata.segmentDuration || 2e3;
1043
+ for (const stream of metadata.streams) {
1044
+ const segmentIndex = Math.floor(seekMs / segmentDuration);
1045
+ const startTimeMs = segmentIndex * segmentDuration;
1046
+ segmentKeys[stream.index] = {
1047
+ startTimeMs,
1048
+ trackId: String(stream.index)
1049
+ };
1050
+ }
1051
+ return segmentKeys;
1052
+ }
1053
+ calculateAssetSeekResult(fragmentIndex, initSegments, seekMs) {
1054
+ const result = {};
1055
+ for (const index of Object.values(fragmentIndex)) {
1056
+ const initTrack = initSegments.find((segment$1) => segment$1.trackId === String(index.track))?.mp4File.getInfo().tracks[0];
1057
+ if (!initTrack) continue;
1058
+ const segment = index.segments.toReversed().find((segment$1) => {
1059
+ const segmentStartMs = segment$1.dts / initTrack.timescale * 1e3;
1060
+ return segmentStartMs <= seekMs;
1061
+ });
1062
+ const nextSegment = index.segments.find((segment$1) => {
1063
+ return segment$1.dts / initTrack.timescale * 1e3 > seekMs;
1064
+ });
1065
+ if (segment) result[index.track] = {
1066
+ segment,
1067
+ track: initTrack,
1068
+ nextSegment
1069
+ };
1070
+ }
1071
+ return result;
1072
+ }
1073
+ calculateJitSeekResult(fragmentIndex, seekMs) {
1074
+ const result = {};
1075
+ for (const index of Object.values(fragmentIndex)) {
1076
+ const track = this.createTrackInfo(index);
1077
+ const segment = index.segments.toReversed().find((segment$1) => {
1078
+ const segmentStartMs = segment$1.dts / track.timescale * 1e3;
1079
+ return segmentStartMs <= seekMs;
1080
+ });
1081
+ const nextSegment = index.segments.find((segment$1) => {
1082
+ return segment$1.dts / track.timescale * 1e3 > seekMs;
1083
+ });
1084
+ if (segment) result[index.track] = {
1085
+ segment,
1086
+ track,
1087
+ nextSegment
1088
+ };
1089
+ }
1090
+ return result;
1091
+ }
1092
+ createTrackInfo(index) {
1093
+ return {
1094
+ id: index.track,
1095
+ name: index.type,
1096
+ type: index.type,
1097
+ timescale: index.timescale,
1098
+ duration: index.duration,
1099
+ bitrate: index.type === "video" ? 1e6 : 128e3,
1100
+ created: /* @__PURE__ */ new Date(),
1101
+ modified: /* @__PURE__ */ new Date(),
1102
+ movie_duration: index.duration,
1103
+ movie_timescale: index.timescale,
1104
+ layer: 0,
1105
+ alternate_group: 0,
1106
+ volume: index.type === "audio" ? 1 : 0,
1107
+ track_width: index.type === "video" ? index.width || 0 : 0,
1108
+ track_height: index.type === "video" ? index.height || 0 : 0,
1109
+ samples_duration: index.duration,
1110
+ codec: index.codec || "unknown",
1111
+ language: "und",
1112
+ nb_samples: index.sample_count || 0
1113
+ };
1114
+ }
1115
+ updated(changedProperties) {
1116
+ super.updated(changedProperties);
1117
+ if (changedProperties.has("ownCurrentTimeMs")) this.executeSeek(this.currentSourceTimeMs);
1118
+ if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) updateAnimations(this);
1119
+ }
1120
+ get hasOwnDuration() {
1121
+ return true;
1122
+ }
1123
+ get desiredSeekTimeMs() {
1124
+ return this._desiredSeekTimeMs;
1125
+ }
1126
+ set desiredSeekTimeMs(value) {
1127
+ if (this._desiredSeekTimeMs !== value) this._desiredSeekTimeMs = value;
1128
+ }
1129
+ async executeSeek(seekToMs) {
1130
+ this.desiredSeekTimeMs = seekToMs;
1131
+ }
637
1132
  };
638
- __decorateClass([
639
- property({ type: Number })
640
- ], _EFMedia.prototype, "currentTimeMs", 2);
641
- __decorateClass([
642
- property({ type: String, attribute: "asset-id", reflect: true })
643
- ], _EFMedia.prototype, "assetId", 1);
644
- __decorateClass([
645
- state()
646
- ], _EFMedia.prototype, "desiredSeekTimeMs", 2);
647
- let EFMedia = _EFMedia;
648
- function processFFTData(fftData, zeroThresholdPercent = 0.1) {
649
- const totalBins = fftData.length;
650
- const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
651
- let zeroCount = 0;
652
- let cutoffIndex = totalBins;
653
- for (let i = totalBins - 1; i >= 0; i--) {
654
- if (fftData[i] < 10) {
655
- zeroCount++;
656
- } else {
657
- if (zeroCount >= zeroThresholdCount) {
658
- cutoffIndex = i + 1;
659
- break;
660
- }
661
- }
662
- }
663
- if (cutoffIndex < zeroThresholdCount) {
664
- return fftData;
665
- }
666
- const goodData = fftData.slice(0, cutoffIndex);
667
- const resampledData = interpolateData(goodData, fftData.length);
668
- const attenuationStartIndex = Math.floor(totalBins * 0.9);
669
- for (let i = attenuationStartIndex; i < totalBins; i++) {
670
- const attenuationProgress = (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + 0.2;
671
- const attenuationFactor = Math.max(0, 1 - attenuationProgress);
672
- resampledData[i] = Math.floor(resampledData[i] * attenuationFactor);
673
- }
674
- return resampledData;
1133
+ _decorate([property({ type: Number })], EFMedia.prototype, "currentTimeMs", void 0);
1134
+ _decorate([property({
1135
+ type: Number,
1136
+ attribute: "prefetch-segments"
1137
+ })], EFMedia.prototype, "prefetchSegments", void 0);
1138
+ _decorate([property({
1139
+ type: Number,
1140
+ attribute: "cache-size"
1141
+ })], EFMedia.prototype, "cacheSize", void 0);
1142
+ _decorate([property({
1143
+ type: Boolean,
1144
+ attribute: "enable-prefetch"
1145
+ })], EFMedia.prototype, "enablePrefetch", void 0);
1146
+ _decorate([state()], EFMedia.prototype, "jitLoadingState", void 0);
1147
+ _decorate([state()], EFMedia.prototype, "jitErrorMessage", void 0);
1148
+ _decorate([state()], EFMedia.prototype, "jitCacheStats", void 0);
1149
+ _decorate([property({
1150
+ type: String,
1151
+ attribute: "asset-id",
1152
+ reflect: true
1153
+ })], EFMedia.prototype, "assetId", null);
1154
+ _decorate([state()], EFMedia.prototype, "_desiredSeekTimeMs", void 0);
1155
+ function processFFTData(fftData, zeroThresholdPercent = .1) {
1156
+ const totalBins = fftData.length;
1157
+ const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
1158
+ let zeroCount = 0;
1159
+ let cutoffIndex = totalBins;
1160
+ for (let i = totalBins - 1; i >= 0; i--) if (fftData[i] ?? true) zeroCount++;
1161
+ else if (zeroCount >= zeroThresholdCount) {
1162
+ cutoffIndex = i + 1;
1163
+ break;
1164
+ }
1165
+ if (cutoffIndex < zeroThresholdCount) return fftData;
1166
+ const goodData = fftData.slice(0, cutoffIndex);
1167
+ const resampledData = interpolateData(goodData, fftData.length);
1168
+ const attenuationStartIndex = Math.floor(totalBins * .9);
1169
+ for (let i = attenuationStartIndex; i < totalBins; i++) {
1170
+ const attenuationProgress = (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + .2;
1171
+ const attenuationFactor = Math.max(0, 1 - attenuationProgress);
1172
+ resampledData[i] = Math.floor((resampledData[i] ?? 0) * attenuationFactor);
1173
+ }
1174
+ return resampledData;
675
1175
  }
676
1176
  function interpolateData(data, targetSize) {
677
- const resampled = new Uint8Array(targetSize);
678
- const dataLength = data.length;
679
- for (let i = 0; i < targetSize; i++) {
680
- const ratio = i / (targetSize - 1) * (dataLength - 1);
681
- const index = Math.floor(ratio);
682
- const fraction = ratio - index;
683
- if (index >= dataLength - 1) {
684
- resampled[i] = data[dataLength - 1];
685
- } else {
686
- resampled[i] = Math.round(
687
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
688
- data[index] * (1 - fraction) + data[index + 1] * fraction
689
- );
690
- }
691
- }
692
- return resampled;
1177
+ const resampled = new Uint8Array(targetSize);
1178
+ const dataLength = data.length;
1179
+ for (let i = 0; i < targetSize; i++) {
1180
+ const ratio = i / (targetSize - 1) * (dataLength - 1);
1181
+ const index = Math.floor(ratio);
1182
+ const fraction = ratio - index;
1183
+ if (index >= dataLength - 1) resampled[i] = data[dataLength - 1] ?? 0;
1184
+ else resampled[i] = Math.round((data[index] ?? 0) * (1 - fraction) + (data[index + 1] ?? 0) * fraction);
1185
+ }
1186
+ return resampled;
693
1187
  }
694
- export {
695
- EFMedia,
696
- deepGetMediaElements
697
- };
1188
+ export { EFMedia, deepGetMediaElements };