@editframe/elements 0.16.8-beta.0 → 0.17.6-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/README.md +30 -0
  2. package/dist/DecoderResetFrequency.test.d.ts +1 -0
  3. package/dist/DecoderResetRecovery.test.d.ts +1 -0
  4. package/dist/DelayedLoadingState.d.ts +48 -0
  5. package/dist/DelayedLoadingState.integration.test.d.ts +1 -0
  6. package/dist/DelayedLoadingState.js +113 -0
  7. package/dist/DelayedLoadingState.test.d.ts +1 -0
  8. package/dist/EF_FRAMEGEN.d.ts +10 -1
  9. package/dist/EF_FRAMEGEN.js +199 -179
  10. package/dist/EF_INTERACTIVE.js +2 -6
  11. package/dist/EF_RENDERING.js +1 -3
  12. package/dist/JitTranscodingClient.browsertest.d.ts +1 -0
  13. package/dist/JitTranscodingClient.d.ts +167 -0
  14. package/dist/JitTranscodingClient.js +373 -0
  15. package/dist/JitTranscodingClient.test.d.ts +1 -0
  16. package/dist/LoadingDebounce.test.d.ts +1 -0
  17. package/dist/LoadingIndicator.browsertest.d.ts +0 -0
  18. package/dist/ManualScrubTest.test.d.ts +1 -0
  19. package/dist/ScrubResolvedFlashing.test.d.ts +1 -0
  20. package/dist/ScrubTrackIntegration.test.d.ts +1 -0
  21. package/dist/ScrubTrackManager.d.ts +96 -0
  22. package/dist/ScrubTrackManager.js +216 -0
  23. package/dist/ScrubTrackManager.test.d.ts +1 -0
  24. package/dist/SegmentSwitchLoading.test.d.ts +1 -0
  25. package/dist/VideoSeekFlashing.browsertest.d.ts +0 -0
  26. package/dist/VideoStuckDiagnostic.test.d.ts +1 -0
  27. package/dist/elements/CrossUpdateController.js +13 -15
  28. package/dist/elements/EFAudio.browsertest.d.ts +0 -0
  29. package/dist/elements/EFAudio.d.ts +1 -1
  30. package/dist/elements/EFAudio.js +30 -43
  31. package/dist/elements/EFCaptions.js +337 -373
  32. package/dist/elements/EFImage.js +64 -90
  33. package/dist/elements/EFMedia.d.ts +98 -33
  34. package/dist/elements/EFMedia.js +1169 -678
  35. package/dist/elements/EFSourceMixin.js +31 -48
  36. package/dist/elements/EFTemporal.d.ts +1 -0
  37. package/dist/elements/EFTemporal.js +266 -360
  38. package/dist/elements/EFTimegroup.d.ts +3 -1
  39. package/dist/elements/EFTimegroup.js +262 -323
  40. package/dist/elements/EFVideo.browsertest.d.ts +0 -0
  41. package/dist/elements/EFVideo.d.ts +90 -2
  42. package/dist/elements/EFVideo.js +408 -111
  43. package/dist/elements/EFWaveform.js +375 -411
  44. package/dist/elements/FetchMixin.js +14 -24
  45. package/dist/elements/MediaController.d.ts +30 -0
  46. package/dist/elements/TargetController.js +130 -156
  47. package/dist/elements/TimegroupController.js +17 -19
  48. package/dist/elements/durationConverter.js +15 -4
  49. package/dist/elements/parseTimeToMs.js +4 -10
  50. package/dist/elements/printTaskStatus.d.ts +2 -0
  51. package/dist/elements/printTaskStatus.js +11 -0
  52. package/dist/elements/updateAnimations.js +39 -59
  53. package/dist/getRenderInfo.js +58 -67
  54. package/dist/gui/ContextMixin.js +203 -288
  55. package/dist/gui/EFConfiguration.js +27 -43
  56. package/dist/gui/EFFilmstrip.js +440 -620
  57. package/dist/gui/EFFitScale.js +112 -135
  58. package/dist/gui/EFFocusOverlay.js +45 -61
  59. package/dist/gui/EFPreview.js +30 -49
  60. package/dist/gui/EFScrubber.js +78 -99
  61. package/dist/gui/EFTimeDisplay.js +49 -70
  62. package/dist/gui/EFToggleLoop.js +17 -34
  63. package/dist/gui/EFTogglePlay.js +37 -58
  64. package/dist/gui/EFWorkbench.js +66 -88
  65. package/dist/gui/TWMixin.js +2 -48
  66. package/dist/gui/TWMixin2.js +31 -0
  67. package/dist/gui/efContext.js +2 -6
  68. package/dist/gui/fetchContext.js +1 -3
  69. package/dist/gui/focusContext.js +1 -3
  70. package/dist/gui/focusedElementContext.js +2 -6
  71. package/dist/gui/playingContext.js +1 -4
  72. package/dist/index.js +5 -30
  73. package/dist/msToTimeCode.js +11 -13
  74. package/dist/style.css +2 -1
  75. package/package.json +3 -3
  76. package/src/elements/EFAudio.browsertest.ts +569 -0
  77. package/src/elements/EFAudio.ts +4 -6
  78. package/src/elements/EFCaptions.browsertest.ts +0 -1
  79. package/src/elements/EFImage.browsertest.ts +0 -1
  80. package/src/elements/EFMedia.browsertest.ts +147 -115
  81. package/src/elements/EFMedia.ts +1339 -307
  82. package/src/elements/EFTemporal.browsertest.ts +0 -1
  83. package/src/elements/EFTemporal.ts +11 -0
  84. package/src/elements/EFTimegroup.ts +73 -10
  85. package/src/elements/EFVideo.browsertest.ts +680 -0
  86. package/src/elements/EFVideo.ts +729 -50
  87. package/src/elements/EFWaveform.ts +4 -4
  88. package/src/elements/MediaController.ts +108 -0
  89. package/src/elements/__screenshots__/EFMedia.browsertest.ts/EFMedia-JIT-audio-playback-audioBufferTask-should-work-in-JIT-mode-without-URL-errors-1.png +0 -0
  90. package/src/elements/printTaskStatus.ts +16 -0
  91. package/src/elements/updateAnimations.ts +6 -0
  92. package/src/gui/TWMixin.ts +10 -3
  93. package/test/EFVideo.frame-tasks.browsertest.ts +524 -0
  94. package/test/EFVideo.framegen.browsertest.ts +118 -0
  95. package/test/createJitTestClips.ts +293 -0
  96. package/test/useAssetMSW.ts +49 -0
  97. package/test/useMSW.ts +31 -0
  98. package/types.json +1 -1
  99. package/dist/gui/TWMixin.css.js +0 -4
  100. /package/dist/elements/{TargetController.test.d.ts → TargetController.browsertest.d.ts} +0 -0
  101. /package/src/elements/{TargetController.test.ts → TargetController.browsertest.ts} +0 -0
@@ -1,21 +1,25 @@
1
+ import type { TrackFragmentIndex, TrackSegment } from "@editframe/assets";
2
+ import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
3
+ import { MP4File } from "@editframe/assets/MP4File.js";
1
4
  import { Task } from "@lit/task";
2
5
  import { deepArrayEquals } from "@lit/task/deep-equals.js";
3
6
  import debug from "debug";
4
- import { LitElement, type PropertyValueMap, css } from "lit";
7
+ import { css, LitElement, type PropertyValueMap } from "lit";
5
8
  import { property, state } from "lit/decorators.js";
6
9
  import type * as MP4Box from "mp4box";
7
-
8
- import type { TrackFragmentIndex, TrackSegment } from "@editframe/assets";
9
-
10
- import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
11
- import { MP4File } from "@editframe/assets/MP4File.js";
12
10
  import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
11
+ import { JitTranscodingClient } from "../JitTranscodingClient.js";
13
12
  import { EFSourceMixin } from "./EFSourceMixin.js";
14
13
  import { EFTemporal } from "./EFTemporal.js";
15
14
  import { FetchMixin } from "./FetchMixin.js";
16
15
  import { EFTargetable } from "./TargetController.ts";
17
16
  import { updateAnimations } from "./updateAnimations.ts";
18
17
 
18
+ // EF_FRAMEGEN is a global instance created in EF_FRAMEGEN.ts
19
+ declare global {
20
+ var EF_FRAMEGEN: import("../EF_FRAMEGEN.js").EFFramegen;
21
+ }
22
+
19
23
  const log = debug("ef:elements:EFMedia");
20
24
 
21
25
  const freqWeightsCache = new Map<number, Float32Array>();
@@ -84,312 +88,310 @@ export class EFMedia extends EFTargetable(
84
88
  @property({ type: Number })
85
89
  currentTimeMs = 0;
86
90
 
87
- #assetId: string | null = null;
91
+ /**
92
+ * Media loading mode - determines how content is loaded and processed
93
+ * - "asset": Use existing asset-based loading (assetId or fragment-based URLs)
94
+ * - "jit-transcode": Use JIT transcoding for remote URLs
95
+ * - "auto": Automatically detect based on URL patterns (default)
96
+ */
97
+ private _mode: "asset" | "jit-transcode" | "auto" = "auto";
88
98
 
89
99
  /**
90
- * The unique identifier for the media asset.
91
- * This property can be set programmatically or via the "asset-id" attribute.
92
- * @domAttribute "asset-id"
100
+ * Get the mode, prioritizing attribute values over property values
93
101
  */
94
- @property({ type: String, attribute: "asset-id", reflect: true })
95
- set assetId(value: string | null) {
96
- this.#assetId = value;
102
+ get mode(): "asset" | "jit-transcode" | "auto" {
103
+ const attr = this.getAttribute("mode") as
104
+ | ("asset" | "jit-transcode" | "auto")
105
+ | null;
106
+ return attr || this._mode || "auto";
97
107
  }
98
108
 
99
- get assetId() {
100
- return this.#assetId || this.getAttribute("asset-id");
109
+ set mode(value: "asset" | "jit-transcode" | "auto") {
110
+ const oldValue = this.mode;
111
+ this._mode = value;
112
+ this.setAttribute("mode", value);
113
+ this.requestUpdate("mode", oldValue);
101
114
  }
102
115
 
103
- fragmentIndexPath() {
104
- if (this.assetId) {
105
- return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
116
+ connectedCallback(): void {
117
+ super.connectedCallback();
118
+
119
+ // Initialize mode from attribute if present
120
+ const modeAttr = this.getAttribute("mode") as
121
+ | ("asset" | "jit-transcode" | "auto")
122
+ | null;
123
+ if (modeAttr && modeAttr !== this._mode) {
124
+ this._mode = modeAttr;
125
+ this.requestUpdate("mode");
106
126
  }
107
- return `/@ef-track-fragment-index/${this.src ?? ""}`;
108
- }
109
127
 
110
- fragmentTrackPath(trackId: string) {
111
- if (this.assetId) {
112
- return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
128
+ // Manually sync attributes to properties for better control
129
+ const prefetchSegmentsAttr = this.getAttribute("prefetch-segments");
130
+ if (prefetchSegmentsAttr !== null) {
131
+ this.prefetchSegments = Number.parseInt(prefetchSegmentsAttr, 10) || 3;
113
132
  }
114
- // trackId is only specified as a query in the @ef-track url shape
115
- // this is because that system doesn't have a full url matching system.
116
- // This is an annoying incosistency that should be fixed.
117
- return `/@ef-track/${this.src ?? ""}?trackId=${trackId}`;
118
- }
119
133
 
120
- public trackFragmentIndexLoader = new Task(this, {
121
- args: () => [this.fragmentIndexPath(), this.fetch] as const,
122
- task: async ([fragmentIndexPath, fetch], { signal }) => {
123
- try {
124
- const response = await fetch(fragmentIndexPath, { signal });
134
+ const cacheSizeAttr = this.getAttribute("cache-size");
135
+ if (cacheSizeAttr !== null) {
136
+ this.cacheSize = Number.parseInt(cacheSizeAttr, 10) || 20;
137
+ }
125
138
 
126
- return (await response.json()) as Record<number, TrackFragmentIndex>;
127
- } catch (error) {
128
- log("Failed to load track fragment index", error);
129
- return undefined;
130
- }
131
- },
132
- onComplete: () => {
133
- this.requestUpdate("intrinsicDurationMs");
134
- this.requestUpdate("ownCurrentTimeMs");
135
- this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
136
- this.rootTimegroup?.requestUpdate("durationMs");
137
- },
138
- });
139
+ const enablePrefetchAttr = this.getAttribute("enable-prefetch");
140
+ if (enablePrefetchAttr !== null) {
141
+ this.enablePrefetch = enablePrefetchAttr === "true";
142
+ }
143
+ }
139
144
 
140
- public initSegmentsLoader = new Task(this, {
141
- autoRun: EF_INTERACTIVE,
142
- args: () =>
143
- [this.trackFragmentIndexLoader.value, this.src, this.fetch] as const,
144
- task: async ([fragmentIndex, _src, fetch], { signal }) => {
145
- if (!fragmentIndex) {
146
- return;
147
- }
148
- return await Promise.all(
149
- Object.entries(fragmentIndex).map(async ([trackId, track]) => {
150
- const start = track.initSegment.offset;
151
- const end = track.initSegment.offset + track.initSegment.size;
152
- const response = await fetch(this.fragmentTrackPath(trackId), {
153
- signal,
154
- headers: { Range: `bytes=${start}-${end - 1}` },
155
- });
156
- const buffer =
157
- (await response.arrayBuffer()) as MP4Box.MP4ArrayBuffer;
158
- buffer.fileStart = 0;
159
- const mp4File = new MP4File();
160
- mp4File.appendBuffer(buffer, true);
161
- mp4File.flush();
162
- await mp4File.readyPromise;
145
+ /**
146
+ * Configuration for JIT transcoding performance optimizations
147
+ */
148
+ @property({ type: Number, attribute: "prefetch-segments" })
149
+ prefetchSegments = 3;
163
150
 
164
- return { trackId, buffer, mp4File };
165
- }),
166
- );
167
- },
168
- });
151
+ @property({ type: Number, attribute: "cache-size" })
152
+ cacheSize = 20;
169
153
 
170
- get defaultVideoTrackId() {
171
- return Object.values(this.trackFragmentIndexLoader.value ?? {}).find(
172
- (track) => track.type === "video",
173
- )?.track;
174
- }
154
+ @property({ type: Boolean, attribute: "enable-prefetch" })
155
+ enablePrefetch = true;
175
156
 
176
- get defaultAudioTrackId() {
177
- return Object.values(this.trackFragmentIndexLoader.value ?? {}).find(
178
- (track) => track.type === "audio",
179
- )?.track;
180
- }
157
+ /**
158
+ * Loading states for JIT transcoding
159
+ */
160
+ @state()
161
+ jitLoadingState: "idle" | "metadata" | "segments" | "error" = "idle";
181
162
 
182
- seekTask = new Task(this, {
183
- autoRun: EF_INTERACTIVE,
184
- args: () =>
185
- [
186
- this.desiredSeekTimeMs,
187
- this.trackFragmentIndexLoader.value,
188
- this.initSegmentsLoader.value,
189
- ] as const,
190
- task: async (
191
- [seekToMs, fragmentIndex, initSegments],
192
- { signal: _signal },
193
- ) => {
194
- if (fragmentIndex === undefined) {
195
- return;
196
- }
197
- if (initSegments === undefined) {
198
- return;
199
- }
163
+ @state()
164
+ jitErrorMessage: string | null = null;
200
165
 
201
- const result: Record<
202
- string,
203
- {
204
- segment: TrackSegment;
205
- track: MP4Box.TrackInfo;
206
- nextSegment?: TrackSegment;
207
- }
208
- > = {};
166
+ @state()
167
+ jitCacheStats: { size: number; hitRate: number; efficiency: number } | null =
168
+ null;
209
169
 
210
- for (const index of Object.values(fragmentIndex)) {
211
- const track = initSegments
212
- .find((segment) => segment.trackId === String(index.track))
213
- ?.mp4File.getInfo().tracks[0];
170
+ /**
171
+ * Detected loading mode based on URL patterns and manual override
172
+ */
173
+ get effectiveMode(): "asset" | "jit-transcode" {
174
+ // First check for explicit manual overrides
175
+ const actualMode = this.mode;
214
176
 
215
- if (!track) {
216
- throw new Error("Could not finding matching track");
217
- }
177
+ if (actualMode === "asset" || actualMode === "jit-transcode") {
178
+ return actualMode;
179
+ }
218
180
 
219
- const segment = index.segments.toReversed().find((segment) => {
220
- return (segment.dts / track.timescale) * 1000 <= seekToMs;
221
- });
181
+ // Auto-detection logic only runs when mode is "auto" or not set
182
+ if (this.assetId) {
183
+ return "asset"; // Always use asset mode if assetId is specified
184
+ }
222
185
 
223
- const nextSegment = index.segments.find((segment) => {
224
- return (segment.dts / track.timescale) * 1000 > seekToMs;
225
- });
186
+ if (!this.src) {
187
+ return "asset"; // Default to asset mode if no src
188
+ }
226
189
 
227
- if (!segment) {
228
- return;
229
- }
190
+ if (JitTranscodingClient.isJitTranscodeEligible(this.src)) {
191
+ return "jit-transcode";
192
+ }
230
193
 
231
- result[index.track] = { segment, track, nextSegment };
232
- }
194
+ return "asset"; // Default to asset mode for everything else
195
+ }
233
196
 
234
- return result;
197
+ jitClientTask = new Task(this, {
198
+ autoRun: EF_INTERACTIVE,
199
+ onError: (error) => {
200
+ console.error("jitClientTask error", error);
201
+ },
202
+ args: () =>
203
+ [
204
+ this.apiHost,
205
+ this.cacheSize,
206
+ this.enablePrefetch,
207
+ this.prefetchSegments,
208
+ ] as const,
209
+ task: ([apiHost, cacheSize, enablePrefetch, prefetchSegments]) => {
210
+ const baseUrl =
211
+ apiHost && apiHost !== "https://editframe.dev"
212
+ ? apiHost
213
+ : "http://localhost:3000";
214
+
215
+ return new JitTranscodingClient({
216
+ baseUrl,
217
+ segmentCacheSize: cacheSize,
218
+ enableNetworkAdaptation: enablePrefetch,
219
+ enablePrefetch: enablePrefetch,
220
+ prefetchSegments: prefetchSegments,
221
+ });
235
222
  },
236
223
  });
237
224
 
238
- fetchSeekTask = new Task(this, {
239
- autoRun: EF_INTERACTIVE,
240
- argsEqual: deepArrayEquals,
241
- args: () =>
242
- [this.initSegmentsLoader.value, this.seekTask.value, this.fetch] as const,
243
- task: async ([initSegments, seekResult, fetch], { signal }) => {
244
- if (!initSegments) {
245
- return;
225
+ /**
226
+ * JIT transcoding metadata loader
227
+ * Loads video metadata for JIT transcoded content
228
+ */
229
+ jitMetadataLoader = new Task(this, {
230
+ autoRun: EF_INTERACTIVE, // Always run since this is critical for frame rendering
231
+ onError: (error) => {
232
+ console.error("jitMetadataLoader error", error);
233
+ },
234
+ args: () => [this.src, this.jitClientTask.value] as const,
235
+ task: async ([src, _jitClient], { signal: _signal }) => {
236
+ if (this.effectiveMode !== "jit-transcode") {
237
+ return null;
246
238
  }
247
- if (!seekResult) {
248
- return;
239
+ await this.jitClientTask.taskComplete;
240
+ const jitClient = this.jitClientTask.value;
241
+ if (!src || !jitClient) {
242
+ return null;
249
243
  }
250
244
 
251
- const files: Record<string, File> = {};
252
-
253
- for (const [trackId, { segment, track, nextSegment }] of Object.entries(
254
- seekResult,
255
- )) {
256
- const start = segment.offset;
257
- const end = segment.offset + segment.size;
258
-
259
- const response = await fetch(this.fragmentTrackPath(trackId), {
260
- signal,
261
- headers: { Range: `bytes=${start}-${end - 1}` },
262
- });
263
-
264
- if (nextSegment) {
265
- const nextStart = nextSegment.offset;
266
- const nextEnd = nextSegment.offset + nextSegment.size;
267
- fetch(this.fragmentTrackPath(trackId), {
268
- signal,
269
- headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` },
270
- })
271
- .then(() => {
272
- log("Prefetched next segment");
273
- })
274
- .catch((error) => {
275
- log("Failed to prefetch next segment", error);
276
- });
277
- }
278
-
279
- const initSegment = Object.values(initSegments).find(
280
- (initSegment) => initSegment.trackId === String(track.id),
281
- );
282
- if (!initSegment) {
283
- throw new Error("Could not find matching init segment");
284
- }
285
- const initBuffer = initSegment.buffer;
245
+ try {
246
+ this.jitLoadingState = "metadata";
247
+ this.jitErrorMessage = null;
286
248
 
287
- const mediaBuffer =
288
- (await response.arrayBuffer()) as unknown as MP4Box.MP4ArrayBuffer;
249
+ const metadata = await jitClient.loadVideoMetadata(src);
289
250
 
290
- files[trackId] = new File([initBuffer, mediaBuffer], "video.mp4", {
291
- type: "video/mp4",
292
- });
251
+ this.jitLoadingState = "idle";
252
+ return metadata;
253
+ } catch (error) {
254
+ this.jitLoadingState = "error";
255
+ this.jitErrorMessage =
256
+ error instanceof Error
257
+ ? error.message
258
+ : "Failed to load video metadata";
259
+ log("Failed to load JIT metadata:", error);
260
+ return null;
293
261
  }
294
-
295
- return files;
296
262
  },
297
- });
298
-
299
- videoAssetTask = new Task(this, {
300
- autoRun: EF_INTERACTIVE,
301
- args: () => [this.fetchSeekTask.value] as const,
302
- task: async ([files], { signal: _signal }) => {
303
- if (!files) {
304
- return;
305
- }
306
- if (!this.defaultVideoTrackId) {
307
- return;
308
- }
309
- const videoFile = files[this.defaultVideoTrackId];
310
- if (!videoFile) {
311
- return;
312
- }
313
- // TODO: Extract to general cleanup function
314
- for (const frame of this.videoAssetTask.value?.decodedFrames || []) {
315
- frame.close();
263
+ onComplete: () => {
264
+ if (this.jitLoadingState === "metadata") {
265
+ this.jitLoadingState = "idle";
316
266
  }
317
- this.videoAssetTask.value?.videoDecoder?.close();
318
- return await VideoAsset.createFromReadableStream(
319
- "video.mp4",
320
- videoFile.stream(),
321
- videoFile,
322
- );
267
+ this.requestUpdate("intrinsicDurationMs");
268
+ this.requestUpdate("ownCurrentTimeMs");
269
+ this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
270
+ this.rootTimegroup?.requestUpdate("durationMs");
323
271
  },
324
272
  });
325
273
 
326
- @state()
327
- desiredSeekTimeMs = 0;
274
+ #assetId: string | null = null;
328
275
 
329
- protected async executeSeek(seekToMs: number) {
330
- this.desiredSeekTimeMs = seekToMs;
276
+ /**
277
+ * The unique identifier for the media asset.
278
+ * This property can be set programmatically or via the "asset-id" attribute.
279
+ * @domAttribute "asset-id"
280
+ */
281
+ @property({ type: String, attribute: "asset-id", reflect: true })
282
+ set assetId(value: string | null) {
283
+ this.#assetId = value;
331
284
  }
332
285
 
333
- protected updated(
334
- changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>,
335
- ): void {
336
- if (changedProperties.has("ownCurrentTimeMs")) {
337
- this.executeSeek(this.currentSourceTimeMs);
286
+ get assetId() {
287
+ return this.#assetId || this.getAttribute("asset-id");
288
+ }
289
+
290
+ fragmentIndexPath() {
291
+ if (this.assetId) {
292
+ return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
338
293
  }
339
- // TODO: this is copied straight from EFTimegroup.ts
340
- // and should be refactored to be shared/reduce bad duplication of
341
- // critical logic.
342
- if (
343
- changedProperties.has("currentTime") ||
344
- changedProperties.has("ownCurrentTimeMs")
345
- ) {
346
- updateAnimations(this);
294
+ const src = this.src ?? "";
295
+ if (!src) {
296
+ // Return a safe path that will fail gracefully in tests - allows tasks to run without null errors
297
+ return "/@ef-track-fragment-index/no-src-available";
298
+ }
299
+ // Normalize path to avoid double slashes and handle @ef- prefixed paths
300
+ const normalizedSrc = src.startsWith("/") ? src.slice(1) : src;
301
+ // If src is an @ef- style path, it's likely already a path fragment, not a full URL
302
+ if (normalizedSrc.startsWith("@ef-")) {
303
+ // For @ef- paths, we may need different handling - they might be asset IDs
304
+ return `/@ef-track-fragment-index/${normalizedSrc}`;
347
305
  }
306
+ return `/@ef-track-fragment-index/${normalizedSrc}`;
348
307
  }
349
308
 
350
- get hasOwnDuration() {
351
- return true;
309
+ fragmentTrackPath(trackId: string) {
310
+ if (this.assetId) {
311
+ return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
312
+ }
313
+ // trackId is only specified as a query in the @ef-track url shape
314
+ // this is because that system doesn't have a full url matching system.
315
+ // This is an annoying incosistency that should be fixed.
316
+ const src = this.src ?? "";
317
+ if (!src) {
318
+ // Return a safe path that will fail gracefully in tests - allows tasks to run without null errors
319
+ return `/@ef-track/no-src-available?trackId=${trackId}`;
320
+ }
321
+ // Normalize path to avoid double slashes and handle @ef- prefixed paths
322
+ const normalizedSrc = src.startsWith("/") ? src.slice(1) : src;
323
+ // If src is an @ef- style path, it's likely already a path fragment, not a full URL
324
+ if (normalizedSrc.startsWith("@ef-")) {
325
+ return `/@ef-track/${normalizedSrc}?trackId=${trackId}`;
326
+ }
327
+ return `/@ef-track/${normalizedSrc}?trackId=${trackId}`;
352
328
  }
353
329
 
354
- get intrinsicDurationMs() {
355
- if (!this.trackFragmentIndexLoader.value) {
356
- return 0;
357
- }
330
+ get mediaDurationTask() {
331
+ return this.fragmentIndexTask;
332
+ }
358
333
 
359
- const durations = Object.values(this.trackFragmentIndexLoader.value).map(
360
- (track) => {
361
- return (track.duration / track.timescale) * 1000;
362
- },
334
+ get defaultVideoTrackId() {
335
+ const fragmentIndex = this.fragmentIndexTask.value as Record<
336
+ number,
337
+ TrackFragmentIndex
338
+ > | null;
339
+ return Object.values(fragmentIndex ?? {}).find(
340
+ (track) => track.type === "video",
341
+ )?.track;
342
+ }
343
+
344
+ get defaultAudioTrackId() {
345
+ const fragmentIndex = this.fragmentIndexTask.value as Record<
346
+ number,
347
+ TrackFragmentIndex
348
+ > | null;
349
+ return Object.values(fragmentIndex ?? {}).find(
350
+ (track) => track.type === "audio",
351
+ )?.track;
352
+ }
353
+
354
+ get intrinsicDurationMs() {
355
+ const fragmentIndex = this.fragmentIndexTask.value as Record<
356
+ number,
357
+ TrackFragmentIndex
358
+ > | null;
359
+ if (!fragmentIndex) return 0;
360
+
361
+ const durations = Object.values(fragmentIndex).map(
362
+ (track) => (track.duration / track.timescale) * 1000,
363
363
  );
364
- if (durations.length === 0) {
365
- return 0;
366
- }
364
+ if (durations.length === 0) return 0;
367
365
  return Math.max(...durations);
368
366
  }
369
367
 
370
- #audioContext = new OfflineAudioContext(2, 48000 / 30, 48000);
368
+ #audioContext = (() => {
369
+ try {
370
+ return new OfflineAudioContext(2, 48000 / 30, 48000);
371
+ } catch (error) {
372
+ throw new Error(
373
+ `[EFMedia.audioBufferTask] Failed to create OfflineAudioContext(2, ${48000 / 30}, 48000): ${error instanceof Error ? error.message : String(error)}. This is the class field audioContext for audio buffer task processing.`,
374
+ );
375
+ }
376
+ })();
371
377
 
372
378
  audioBufferTask = new Task(this, {
373
379
  autoRun: EF_INTERACTIVE,
374
- args: () => [this.fetchSeekTask.value, this.seekTask.value] as const,
380
+ onError: (error) => {
381
+ console.error("audioBufferTask error", error);
382
+ },
383
+ args: () => [this.mediaSegmentsTask.value, this.seekTask.value] as const,
375
384
  task: async ([files, segments], { signal: _signal }) => {
376
- if (!files) {
377
- return;
378
- }
379
- if (!segments) {
380
- return;
381
- }
382
- if (!this.defaultAudioTrackId) {
383
- return;
384
- }
385
+ if (!files || !segments) return;
386
+
387
+ if (!this.defaultAudioTrackId) return;
388
+
385
389
  const segment = segments[this.defaultAudioTrackId];
386
- if (!segment) {
387
- return;
388
- }
390
+ if (!segment) return;
391
+
389
392
  const audioFile = files[this.defaultAudioTrackId];
390
- if (!audioFile) {
391
- return;
392
- }
393
+ if (!audioFile) return;
394
+
393
395
  return {
394
396
  buffer: await this.#audioContext.decodeAudioData(
395
397
  await audioFile.arrayBuffer(),
@@ -400,6 +402,8 @@ export class EFMedia extends EFTargetable(
400
402
  });
401
403
 
402
404
  async fetchAudioSpanningTime(fromMs: number, toMs: number) {
405
+ // Clamp toMs to the duration of the media
406
+ toMs = Math.min(toMs, this.durationMs);
403
407
  // Adjust range for track's own time
404
408
  if (this.sourceInMs) {
405
409
  fromMs -=
@@ -412,49 +416,253 @@ export class EFMedia extends EFTargetable(
412
416
  fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
413
417
  toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
414
418
 
415
- await this.trackFragmentIndexLoader.taskComplete;
419
+ await this.fragmentIndexTask.taskComplete;
420
+
421
+ const fragmentIndex = this.fragmentIndexTask.value as Record<
422
+ number,
423
+ TrackFragmentIndex
424
+ > | null;
416
425
  const audioTrackId = this.defaultAudioTrackId;
417
426
  if (!audioTrackId) {
418
- log("No audio track found");
419
- return;
427
+ return undefined;
420
428
  }
421
429
 
422
- const audioTrackIndex = this.trackFragmentIndexLoader.value?.[audioTrackId];
430
+ const audioTrackIndex = fragmentIndex?.[audioTrackId];
423
431
  if (!audioTrackIndex) {
424
- log("No audio track found");
425
- return;
432
+ return undefined;
426
433
  }
427
434
 
428
- const start = audioTrackIndex.initSegment.offset;
429
- const end =
430
- audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
431
- const audioInitFragmentRequest = this.fetch(
432
- this.fragmentTrackPath(String(audioTrackId)),
433
- {
434
- headers: { Range: `bytes=${start}-${end - 1}` },
435
- },
436
- );
435
+ // Branch based on effective mode: JIT vs Asset
436
+ if (this.effectiveMode === "jit-transcode" && this.src) {
437
+ // JIT mode: fetch segments and extract audio directly
438
+ const jitClient = this.jitClientTask.value;
439
+ if (!jitClient) {
440
+ return undefined;
441
+ }
442
+
443
+ try {
444
+ // Calculate which JIT segments we need
445
+ const segmentDuration = 2000; // 2s segments
446
+ const startSegmentIndex = Math.floor(fromMs / segmentDuration);
447
+ // Clamp to the last segment index, otherwise this will fetch audio past the end of the media, which is a 500 error in our server
448
+ const maxSegmentIndex =
449
+ Math.floor(this.durationMs / segmentDuration) - 1;
450
+ const endSegmentIndex = Math.min(
451
+ Math.floor(toMs / segmentDuration),
452
+ maxSegmentIndex,
453
+ );
454
+
455
+ // Fetch all needed JIT segments (they contain both video and audio)
456
+ const quality = await jitClient.getAdaptiveQuality();
457
+ const segmentPromises: Promise<{
458
+ buffer: ArrayBuffer;
459
+ startMs: number;
460
+ endMs: number;
461
+ }>[] = [];
462
+
463
+ for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
464
+ const segmentStartMs = i * segmentDuration;
465
+ const segmentEndMs = (i + 1) * segmentDuration;
466
+
467
+ segmentPromises.push(
468
+ jitClient
469
+ .fetchSegment(this.src, segmentStartMs, quality)
470
+ .then((buffer) => ({
471
+ buffer,
472
+ startMs: segmentStartMs,
473
+ endMs: segmentEndMs,
474
+ })),
475
+ );
476
+ }
477
+
478
+ const segments = await Promise.all(segmentPromises);
479
+
480
+ // Decode each segment individually to extract audio
481
+ const audioBuffers: {
482
+ buffer: AudioBuffer;
483
+ startMs: number;
484
+ endMs: number;
485
+ }[] = [];
486
+
487
+ for (const segment of segments) {
488
+ try {
489
+ // Use a temporary audio context to decode audio from the video file
490
+ let tempContext: OfflineAudioContext;
491
+ try {
492
+ tempContext = new OfflineAudioContext(2, 48000, 48000);
493
+ } catch (error) {
494
+ throw new Error(
495
+ `[EFMedia.fetchAudioSpanningTime JIT] Failed to create temp OfflineAudioContext(2, 48000, 48000) for segment ${segment.startMs}-${segment.endMs}ms: ${error instanceof Error ? error.message : String(error)}. This is for decoding audio from JIT video segments.`,
496
+ );
497
+ }
498
+ // Clone the ArrayBuffer to avoid detaching issues when reusing cached segments
499
+ const clonedBuffer = segment.buffer.slice(0);
500
+ const audioBuffer = await tempContext.decodeAudioData(clonedBuffer);
501
+ audioBuffers.push({
502
+ buffer: audioBuffer,
503
+ startMs: segment.startMs,
504
+ endMs: segment.endMs,
505
+ });
506
+ } catch (error) {
507
+ log(
508
+ `Failed to decode audio from segment ${segment.startMs}-${segment.endMs}ms:`,
509
+ error,
510
+ );
511
+ throw error;
512
+ }
513
+ }
514
+
515
+ if (audioBuffers.length === 0) {
516
+ return undefined;
517
+ }
518
+
519
+ // Calculate total duration and samples needed
520
+ const firstAudioBuffer = audioBuffers[0];
521
+ const lastAudioBuffer = audioBuffers[audioBuffers.length - 1];
522
+
523
+ if (!firstAudioBuffer || !lastAudioBuffer) {
524
+ return undefined;
525
+ }
526
+
527
+ const sampleRate = firstAudioBuffer.buffer.sampleRate;
528
+ const numberOfChannels = firstAudioBuffer.buffer.numberOfChannels;
529
+
530
+ // Calculate the exact time range we need
531
+ const actualStartMs = Math.max(fromMs, firstAudioBuffer.startMs);
532
+ const actualEndMs = Math.min(toMs, lastAudioBuffer.endMs);
533
+ const totalDurationMs = actualEndMs - actualStartMs;
534
+ const totalSamples = Math.floor((totalDurationMs / 1000) * sampleRate);
535
+ if (totalSamples <= 0) {
536
+ return undefined;
537
+ }
538
+
539
+ // Create a new audio context for the final buffer
540
+ let finalContext: OfflineAudioContext;
541
+ try {
542
+ finalContext = new OfflineAudioContext(
543
+ numberOfChannels,
544
+ totalSamples,
545
+ sampleRate,
546
+ );
547
+ } catch (error) {
548
+ throw new Error(
549
+ `[EFMedia.fetchAudioSpanningTime final] Failed to create final OfflineAudioContext(${numberOfChannels}, ${totalSamples}, ${sampleRate}) for time range ${actualStartMs}-${actualEndMs}ms: ${error instanceof Error ? error.message : String(error)}. This is for creating the final concatenated audio buffer.`,
550
+ );
551
+ }
552
+ const finalBuffer = finalContext.createBuffer(
553
+ numberOfChannels,
554
+ totalSamples,
555
+ sampleRate,
556
+ );
557
+
558
+ // Copy audio data from each decoded segment to the final buffer
559
+ let outputOffset = 0;
560
+
561
+ for (const {
562
+ buffer: audioBuffer,
563
+ startMs: segmentStartMs,
564
+ endMs: segmentEndMs,
565
+ } of audioBuffers) {
566
+ // Calculate which part of this segment we need
567
+ const segmentNeedStart = Math.max(actualStartMs, segmentStartMs);
568
+ const segmentNeedEnd = Math.min(actualEndMs, segmentEndMs);
569
+
570
+ if (segmentNeedStart >= segmentNeedEnd) {
571
+ continue; // Skip segments outside our range
572
+ }
573
+
574
+ // Calculate sample offsets within this segment
575
+ const segmentStartSample = Math.floor(
576
+ ((segmentNeedStart - segmentStartMs) / 1000) * sampleRate,
577
+ );
578
+ const segmentDurationSamples = Math.floor(
579
+ ((segmentNeedEnd - segmentNeedStart) / 1000) * sampleRate,
580
+ );
581
+
582
+ // Ensure we don't exceed buffer boundaries
583
+ const actualSamples = Math.min(
584
+ segmentDurationSamples,
585
+ audioBuffer.length - segmentStartSample,
586
+ totalSamples - outputOffset,
587
+ );
588
+
589
+ if (actualSamples <= 0) {
590
+ continue;
591
+ }
592
+
593
+ // Copy each channel
594
+ for (let channel = 0; channel < numberOfChannels; channel++) {
595
+ const sourceData = audioBuffer.getChannelData(channel);
596
+ const targetData = finalBuffer.getChannelData(channel);
597
+
598
+ for (let i = 0; i < actualSamples; i++) {
599
+ const sourceIndex = segmentStartSample + i;
600
+ const targetIndex = outputOffset + i;
601
+
602
+ if (
603
+ sourceIndex < sourceData.length &&
604
+ targetIndex < targetData.length
605
+ ) {
606
+ const sample = sourceData[sourceIndex];
607
+ if (sample !== undefined) {
608
+ targetData[targetIndex] = sample;
609
+ }
610
+ }
611
+ }
612
+ }
613
+
614
+ outputOffset += actualSamples;
615
+ }
616
+
617
+ // Encode the final buffer back to a blob
618
+ // We'll create a simple WAV file since that's more reliable than trying to create MP4
619
+ const wavBlob = this.encodeWAVBuffer(finalBuffer);
437
620
 
438
- const fragments = Object.values(audioTrackIndex.segments).filter(
439
- (segment) => {
440
- const segmentStartsBeforeEnd =
441
- segment.dts <= (toMs * audioTrackIndex.timescale) / 1000;
442
- const segmentEndsAfterStart =
443
- segment.dts + segment.duration >=
444
- (fromMs * audioTrackIndex.timescale) / 1000;
445
- return segmentStartsBeforeEnd && segmentEndsAfterStart;
621
+ const result = {
622
+ blob: wavBlob,
623
+ startMs: actualStartMs - (this.trimStartMs ?? 0),
624
+ endMs: actualEndMs - (this.trimEndMs ?? 0),
625
+ };
626
+
627
+ return result;
628
+ } catch (error) {
629
+ log(
630
+ "Failed to extract and concatenate audio from JIT video segments:",
631
+ error,
632
+ );
633
+ return undefined;
634
+ }
635
+ }
636
+
637
+ // Asset mode: use original fragmented MP4 approach
638
+ const start = audioTrackIndex.initSegment.offset;
639
+ const end =
640
+ audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
641
+ const audioInitFragmentRequest = this.fetch(
642
+ this.fragmentTrackPath(String(audioTrackId)),
643
+ {
644
+ headers: { Range: `bytes=${start}-${end - 1}` },
446
645
  },
447
646
  );
448
647
 
648
+ const fragments = Object.values(
649
+ audioTrackIndex.segments as TrackSegment[],
650
+ ).filter((segment: TrackSegment) => {
651
+ const segmentStartsBeforeEnd =
652
+ segment.dts <= (toMs * audioTrackIndex.timescale) / 1000;
653
+ const segmentEndsAfterStart =
654
+ segment.dts + segment.duration >=
655
+ (fromMs * audioTrackIndex.timescale) / 1000;
656
+ return segmentStartsBeforeEnd && segmentEndsAfterStart;
657
+ });
658
+
449
659
  const firstFragment = fragments[0];
450
660
  if (!firstFragment) {
451
- log("No audio fragments found");
452
- return;
661
+ return undefined;
453
662
  }
454
663
  const lastFragment = fragments[fragments.length - 1];
455
664
  if (!lastFragment) {
456
- log("No audio fragments found");
457
- return;
665
+ return undefined;
458
666
  }
459
667
  const fragmentStart = firstFragment.offset;
460
668
  const fragmentEnd = lastFragment.offset + lastFragment.size;
@@ -488,6 +696,77 @@ export class EFMedia extends EFTargetable(
488
696
  };
489
697
  }
490
698
 
699
+ /**
700
+ * Encode an AudioBuffer to a WAV blob
701
+ */
702
+ private encodeWAVBuffer(audioBuffer: AudioBuffer): Blob {
703
+ const numberOfChannels = audioBuffer.numberOfChannels;
704
+ const sampleRate = audioBuffer.sampleRate;
705
+ const length = audioBuffer.length;
706
+
707
+ // Calculate buffer sizes
708
+ const bytesPerSample = 2; // 16-bit
709
+ const blockAlign = numberOfChannels * bytesPerSample;
710
+ const byteRate = sampleRate * blockAlign;
711
+ const dataSize = length * blockAlign;
712
+ const fileSize = 36 + dataSize;
713
+
714
+ // Create WAV file buffer
715
+ const buffer = new ArrayBuffer(44 + dataSize);
716
+ const view = new DataView(buffer);
717
+
718
+ // Write WAV header
719
+ let offset = 0;
720
+
721
+ // RIFF chunk descriptor
722
+ view.setUint32(offset, 0x52494646, false); // "RIFF"
723
+ offset += 4;
724
+ view.setUint32(offset, fileSize, true); // File size
725
+ offset += 4;
726
+ view.setUint32(offset, 0x57415645, false); // "WAVE"
727
+ offset += 4;
728
+
729
+ // fmt sub-chunk
730
+ view.setUint32(offset, 0x666d7420, false); // "fmt "
731
+ offset += 4;
732
+ view.setUint32(offset, 16, true); // Subchunk1Size (16 for PCM)
733
+ offset += 4;
734
+ view.setUint16(offset, 1, true); // AudioFormat (1 for PCM)
735
+ offset += 2;
736
+ view.setUint16(offset, numberOfChannels, true); // NumChannels
737
+ offset += 2;
738
+ view.setUint32(offset, sampleRate, true); // SampleRate
739
+ offset += 4;
740
+ view.setUint32(offset, byteRate, true); // ByteRate
741
+ offset += 4;
742
+ view.setUint16(offset, blockAlign, true); // BlockAlign
743
+ offset += 2;
744
+ view.setUint16(offset, 16, true); // BitsPerSample
745
+ offset += 2;
746
+
747
+ // data sub-chunk
748
+ view.setUint32(offset, 0x64617461, false); // "data"
749
+ offset += 4;
750
+ view.setUint32(offset, dataSize, true); // Subchunk2Size
751
+ offset += 4;
752
+
753
+ // Write audio data
754
+ for (let i = 0; i < length; i++) {
755
+ for (let channel = 0; channel < numberOfChannels; channel++) {
756
+ const sample = audioBuffer.getChannelData(channel)[i] || 0;
757
+ // Convert float (-1 to 1) to 16-bit PCM
758
+ const pcmSample = Math.max(
759
+ -32768,
760
+ Math.min(32767, Math.floor(sample * 32767)),
761
+ );
762
+ view.setInt16(offset, pcmSample, true);
763
+ offset += 2;
764
+ }
765
+ }
766
+
767
+ return new Blob([buffer], { type: "audio/wav" });
768
+ }
769
+
491
770
  set fftSize(value: number) {
492
771
  const oldValue = this.fftSize;
493
772
  this.setAttribute("fft-size", String(value));
@@ -530,7 +809,7 @@ export class EFMedia extends EFTargetable(
530
809
  // Update FREQ_WEIGHTS to use the instance fftSize instead of a static value
531
810
  get FREQ_WEIGHTS() {
532
811
  if (freqWeightsCache.has(this.fftSize)) {
533
- // biome-ignore lint/style/noNonNullAssertion: Will exist due to prior has check
812
+ // biome-ignore lint/style/noNonNullAssertion: We know the value is set due to the guard above
534
813
  return freqWeightsCache.get(this.fftSize)!;
535
814
  }
536
815
 
@@ -553,6 +832,9 @@ export class EFMedia extends EFTargetable(
553
832
 
554
833
  byteTimeDomainTask = new Task(this, {
555
834
  autoRun: EF_INTERACTIVE,
835
+ onError: (error) => {
836
+ console.error("byteTimeDomainTask error", error);
837
+ },
556
838
  args: () =>
557
839
  [
558
840
  this.audioBufferTask.status,
@@ -565,7 +847,7 @@ export class EFMedia extends EFTargetable(
565
847
  task: async () => {
566
848
  await this.audioBufferTask.taskComplete;
567
849
  if (!this.audioBufferTask.value) return null;
568
- if (this.currentSourceTimeMs <= 0) return null;
850
+ if (this.currentSourceTimeMs < 0) return null;
569
851
 
570
852
  const currentTimeMs = this.currentSourceTimeMs;
571
853
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
@@ -588,11 +870,14 @@ export class EFMedia extends EFTargetable(
588
870
  const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
589
871
  if (cachedFrame) return cachedFrame;
590
872
 
591
- const audioContext = new OfflineAudioContext(
592
- 2,
593
- 48000 * (1 / 30),
594
- 48000,
595
- );
873
+ let audioContext: OfflineAudioContext;
874
+ try {
875
+ audioContext = new OfflineAudioContext(2, 48000 * (1 / 30), 48000);
876
+ } catch (error) {
877
+ throw new Error(
878
+ `[EFMedia.byteTimeDomainTask] Failed to create OfflineAudioContext(2, ${48000 * (1 / 30)}, 48000) for frame ${frameIndex} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio time domain analysis.`,
879
+ );
880
+ }
596
881
 
597
882
  const source = audioContext.createBufferSource();
598
883
  source.buffer = audioBuffer;
@@ -679,6 +964,9 @@ export class EFMedia extends EFTargetable(
679
964
 
680
965
  frequencyDataTask = new Task(this, {
681
966
  autoRun: EF_INTERACTIVE,
967
+ onError: (error) => {
968
+ console.error("frequencyDataTask error", error);
969
+ },
682
970
  args: () =>
683
971
  [
684
972
  this.audioBufferTask.status,
@@ -691,7 +979,7 @@ export class EFMedia extends EFTargetable(
691
979
  task: async () => {
692
980
  await this.audioBufferTask.taskComplete;
693
981
  if (!this.audioBufferTask.value) return null;
694
- if (this.currentSourceTimeMs <= 0) return null;
982
+ if (this.currentSourceTimeMs < 0) return null;
695
983
 
696
984
  const currentTimeMs = this.currentSourceTimeMs;
697
985
  const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
@@ -720,11 +1008,17 @@ export class EFMedia extends EFTargetable(
720
1008
  return cachedFrame;
721
1009
  }
722
1010
 
723
- const audioContext = new OfflineAudioContext(
724
- 2,
725
- 48000 * (1 / 30),
726
- 48000,
727
- );
1011
+ // Running 48000 * (1 / 30) = 1600 broke something terrible, it came out as 0,
1012
+ // I'm assuming weird floating point nonsense to do with running on rosetta
1013
+ const SIZE = 48000 / 30;
1014
+ let audioContext: OfflineAudioContext;
1015
+ try {
1016
+ audioContext = new OfflineAudioContext(2, SIZE, 48000);
1017
+ } catch (error) {
1018
+ throw new Error(
1019
+ `[EFMedia.frequencyDataTask] Failed to create OfflineAudioContext(2, ${SIZE}, 48000) for frame ${i} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio frequency analysis.`,
1020
+ );
1021
+ }
728
1022
  const analyser = audioContext.createAnalyser();
729
1023
  analyser.fftSize = this.fftSize;
730
1024
  analyser.minDecibels = -90;
@@ -773,8 +1067,7 @@ export class EFMedia extends EFTargetable(
773
1067
 
774
1068
  framesData.forEach((frame, frameIndex) => {
775
1069
  const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
776
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
777
- weightedSum += frame[i]! * decayWeight;
1070
+ weightedSum += (frame[i] ?? 0) * decayWeight;
778
1071
  weightSum += decayWeight;
779
1072
  });
780
1073
 
@@ -783,8 +1076,7 @@ export class EFMedia extends EFTargetable(
783
1076
 
784
1077
  // Apply frequency weights using instance FREQ_WEIGHTS
785
1078
  smoothedData.forEach((value, i) => {
786
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
787
- const freqWeight = this.FREQ_WEIGHTS[i]!;
1079
+ const freqWeight = this.FREQ_WEIGHTS[i] ?? 0;
788
1080
  smoothedData[i] = Math.min(255, Math.round(value * freqWeight));
789
1081
  });
790
1082
 
@@ -811,9 +1103,753 @@ export class EFMedia extends EFTargetable(
811
1103
  get fftGain() {
812
1104
  return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
813
1105
  }
1106
+
1107
+ // Add helper methods for the new architecture
1108
+ private synthesizeFragmentIndex(
1109
+ jitMetadata: any,
1110
+ ): Record<number, TrackFragmentIndex> {
1111
+ const segmentDuration = jitMetadata.segmentDuration || 2000;
1112
+ const numSegments = Math.ceil(jitMetadata.durationMs / segmentDuration);
1113
+ const fragmentIndex: Record<number, TrackFragmentIndex> = {};
1114
+
1115
+ // Create video track fragment index
1116
+ const videoStream = jitMetadata.streams.find(
1117
+ (s: any) => s.type === "video",
1118
+ );
1119
+ if (videoStream) {
1120
+ const segments: TrackSegment[] = [];
1121
+ for (let i = 0; i < numSegments; i++) {
1122
+ const startMs = i * segmentDuration;
1123
+ const endMs = Math.min(
1124
+ startMs + segmentDuration,
1125
+ jitMetadata.durationMs,
1126
+ );
1127
+ segments.push({
1128
+ dts: Math.floor(startMs * 90), // Convert to video timescale
1129
+ cts: Math.floor(startMs * 90),
1130
+ duration: Math.floor((endMs - startMs) * 90),
1131
+ offset: 0, // Not used for JIT segments
1132
+ size: 0, // Not used for JIT segments
1133
+ });
1134
+ }
1135
+
1136
+ fragmentIndex[videoStream.index] = {
1137
+ track: videoStream.index,
1138
+ type: "video",
1139
+ timescale: 90000, // Standard video timescale
1140
+ duration: Math.floor(jitMetadata.durationMs * 90),
1141
+ width: videoStream.width || 1920,
1142
+ height: videoStream.height || 1080,
1143
+ sample_count: numSegments * 50, // Estimate ~50 frames per 2s segment
1144
+ codec: videoStream.codecName || "h264",
1145
+ segments,
1146
+ initSegment: { offset: 0, size: 0 }, // Not used for JIT
1147
+ };
1148
+ }
1149
+
1150
+ // Create audio track fragment index
1151
+ const audioStream = jitMetadata.streams.find(
1152
+ (s: any) => s.type === "audio",
1153
+ );
1154
+ if (audioStream) {
1155
+ const segments: TrackSegment[] = [];
1156
+ const audioTimescale = audioStream.sampleRate || 48000;
1157
+ for (let i = 0; i < numSegments; i++) {
1158
+ const startMs = i * segmentDuration;
1159
+ const endMs = Math.min(
1160
+ startMs + segmentDuration,
1161
+ jitMetadata.durationMs,
1162
+ );
1163
+ segments.push({
1164
+ dts: Math.floor((startMs * audioTimescale) / 1000),
1165
+ cts: Math.floor((startMs * audioTimescale) / 1000),
1166
+ duration: Math.floor(((endMs - startMs) * audioTimescale) / 1000),
1167
+ offset: 0, // Not used for JIT segments
1168
+ size: 0, // Not used for JIT segments
1169
+ });
1170
+ }
1171
+
1172
+ fragmentIndex[audioStream.index] = {
1173
+ track: audioStream.index,
1174
+ type: "audio",
1175
+ timescale: audioTimescale,
1176
+ duration: Math.floor((jitMetadata.durationMs * audioTimescale) / 1000),
1177
+ channel_count: audioStream.channels || 2,
1178
+ sample_rate: audioStream.sampleRate || 48000,
1179
+ sample_size: 16, // Standard sample size
1180
+ sample_count: Math.floor(
1181
+ (jitMetadata.durationMs * (audioStream.sampleRate || 48000)) / 1000,
1182
+ ),
1183
+ codec: audioStream.codecName || "aac",
1184
+ segments,
1185
+ initSegment: { offset: 0, size: 0 }, // Not used for JIT
1186
+ };
1187
+ }
1188
+
1189
+ return fragmentIndex;
1190
+ }
1191
+
1192
+ private calculateAssetSegmentKeys(
1193
+ fragmentIndex: Record<number, TrackFragmentIndex>,
1194
+ seekMs: number,
1195
+ ) {
1196
+ const segmentKeys: Record<
1197
+ string,
1198
+ { startTimeMs: number; trackId: string }
1199
+ > = {};
1200
+
1201
+ for (const [trackId, index] of Object.entries(fragmentIndex)) {
1202
+ const segment = index.segments.toReversed().find((segment) => {
1203
+ const segmentStartMs = (segment.dts / index.timescale) * 1000;
1204
+ return segmentStartMs <= seekMs;
1205
+ });
1206
+
1207
+ if (segment) {
1208
+ const startTimeMs = (segment.dts / index.timescale) * 1000;
1209
+ segmentKeys[trackId] = { startTimeMs, trackId };
1210
+ }
1211
+ }
1212
+
1213
+ return segmentKeys;
1214
+ }
1215
+
1216
+ private calculateJitSegmentKeys(metadata: any, seekMs: number) {
1217
+ const segmentKeys: Record<
1218
+ string,
1219
+ { startTimeMs: number; trackId: string }
1220
+ > = {};
1221
+ const segmentDuration = metadata.segmentDuration || 2000;
1222
+
1223
+ for (const stream of metadata.streams) {
1224
+ const segmentIndex = Math.floor(seekMs / segmentDuration);
1225
+ const startTimeMs = segmentIndex * segmentDuration;
1226
+ segmentKeys[stream.index] = {
1227
+ startTimeMs,
1228
+ trackId: String(stream.index),
1229
+ };
1230
+ }
1231
+
1232
+ return segmentKeys;
1233
+ }
1234
+
1235
+ private calculateAssetSeekResult(
1236
+ fragmentIndex: Record<number, TrackFragmentIndex>,
1237
+ initSegments: any[],
1238
+ seekMs: number,
1239
+ ) {
1240
+ const result: Record<
1241
+ string,
1242
+ {
1243
+ segment: TrackSegment;
1244
+ track: MP4Box.TrackInfo;
1245
+ nextSegment?: TrackSegment;
1246
+ }
1247
+ > = {};
1248
+
1249
+ for (const index of Object.values(fragmentIndex)) {
1250
+ const initTrack = initSegments
1251
+ .find((segment) => segment.trackId === String(index.track))
1252
+ ?.mp4File.getInfo().tracks[0];
1253
+
1254
+ if (!initTrack) continue;
1255
+
1256
+ const segment = index.segments.toReversed().find((segment) => {
1257
+ const segmentStartMs = (segment.dts / initTrack.timescale) * 1000;
1258
+ return segmentStartMs <= seekMs;
1259
+ });
1260
+
1261
+ const nextSegment = index.segments.find((segment) => {
1262
+ return (segment.dts / initTrack.timescale) * 1000 > seekMs;
1263
+ });
1264
+
1265
+ if (segment) {
1266
+ result[index.track] = { segment, track: initTrack, nextSegment };
1267
+ }
1268
+ }
1269
+
1270
+ return result;
1271
+ }
1272
+
1273
+ private calculateJitSeekResult(
1274
+ fragmentIndex: Record<number, TrackFragmentIndex>,
1275
+ seekMs: number,
1276
+ ) {
1277
+ const result: Record<
1278
+ string,
1279
+ {
1280
+ segment: TrackSegment;
1281
+ track: MP4Box.TrackInfo;
1282
+ nextSegment?: TrackSegment;
1283
+ }
1284
+ > = {};
1285
+
1286
+ for (const index of Object.values(fragmentIndex)) {
1287
+ const track = this.createTrackInfo(index);
1288
+
1289
+ const segment = index.segments.toReversed().find((segment) => {
1290
+ const segmentStartMs = (segment.dts / track.timescale) * 1000;
1291
+ return segmentStartMs <= seekMs;
1292
+ });
1293
+
1294
+ const nextSegment = index.segments.find((segment) => {
1295
+ return (segment.dts / track.timescale) * 1000 > seekMs;
1296
+ });
1297
+
1298
+ if (segment) {
1299
+ result[index.track] = { segment, track, nextSegment };
1300
+ }
1301
+ }
1302
+
1303
+ return result;
1304
+ }
1305
+
1306
+ private createTrackInfo(index: TrackFragmentIndex): MP4Box.TrackInfo {
1307
+ return {
1308
+ id: index.track,
1309
+ name: index.type,
1310
+ type: index.type,
1311
+ timescale: index.timescale,
1312
+ duration: index.duration,
1313
+ bitrate: index.type === "video" ? 1000000 : 128000,
1314
+ created: new Date(),
1315
+ modified: new Date(),
1316
+ movie_duration: index.duration,
1317
+ movie_timescale: index.timescale,
1318
+ layer: 0,
1319
+ alternate_group: 0,
1320
+ volume: index.type === "audio" ? 1.0 : 0,
1321
+ track_width: index.type === "video" ? (index as any).width || 0 : 0,
1322
+ track_height: index.type === "video" ? (index as any).height || 0 : 0,
1323
+ samples_duration: index.duration,
1324
+ codec: (index as any).codec || "unknown",
1325
+ language: "und",
1326
+ nb_samples: (index as any).sample_count || 0,
1327
+ } as MP4Box.TrackInfo;
1328
+ }
1329
+
1330
+ protected updated(
1331
+ changedProperties: PropertyValueMap<any> | Map<PropertyKey, unknown>,
1332
+ ): void {
1333
+ super.updated(changedProperties);
1334
+ if (changedProperties.has("ownCurrentTimeMs")) {
1335
+ this.executeSeek(this.currentSourceTimeMs);
1336
+ }
1337
+ if (
1338
+ changedProperties.has("currentTime") ||
1339
+ changedProperties.has("ownCurrentTimeMs")
1340
+ ) {
1341
+ updateAnimations(this);
1342
+ }
1343
+ }
1344
+
1345
+ get hasOwnDuration() {
1346
+ return true;
1347
+ }
1348
+
1349
+ // Update videoAssetTask to use new convergent tasks
1350
+ videoAssetTask = new Task(this, {
1351
+ autoRun: EF_INTERACTIVE,
1352
+ onError: (error) => {
1353
+ console.error("videoAssetTask error", error);
1354
+ },
1355
+ args: () => [this.effectiveMode, this.mediaSegmentsTask.value] as const,
1356
+ task: async ([mode, files], { signal: _signal }) => {
1357
+ if (!files) return;
1358
+
1359
+ const fragmentIndex = this.fragmentIndexTask.value as Record<
1360
+ number,
1361
+ TrackFragmentIndex
1362
+ > | null;
1363
+ const computedVideoTrackId = Object.values(fragmentIndex ?? {}).find(
1364
+ (track) => track.type === "video",
1365
+ )?.track;
1366
+
1367
+ if (computedVideoTrackId === undefined) return;
1368
+
1369
+ const videoFile = files[computedVideoTrackId];
1370
+ if (!videoFile) return;
1371
+
1372
+ // Cleanup existing asset
1373
+ const existingAsset = this.videoAssetTask.value;
1374
+ if (existingAsset) {
1375
+ for (const frame of existingAsset?.decodedFrames || []) {
1376
+ frame.close();
1377
+ }
1378
+ const maybeDecoder = existingAsset?.videoDecoder;
1379
+ if (maybeDecoder?.state !== "closed") {
1380
+ maybeDecoder.close();
1381
+ }
1382
+ }
1383
+
1384
+ // Single branching point for creation method
1385
+ if (mode === "jit-transcode") {
1386
+ return await VideoAsset.createFromCompleteMP4(
1387
+ `jit-segment-${computedVideoTrackId}`,
1388
+ videoFile,
1389
+ );
1390
+ }
1391
+
1392
+ return await VideoAsset.createFromReadableStream(
1393
+ "video.mp4",
1394
+ videoFile.stream(),
1395
+ videoFile,
1396
+ );
1397
+ },
1398
+ });
1399
+
1400
+ @state()
1401
+ private _desiredSeekTimeMs = -1; // Initialize to -1 so that setting to 0 triggers a change
1402
+
1403
+ get desiredSeekTimeMs() {
1404
+ return this._desiredSeekTimeMs;
1405
+ }
1406
+
1407
+ set desiredSeekTimeMs(value: number) {
1408
+ if (this._desiredSeekTimeMs !== value) {
1409
+ this._desiredSeekTimeMs = value;
1410
+ }
1411
+ }
1412
+
1413
+ protected async executeSeek(seekToMs: number) {
1414
+ this.desiredSeekTimeMs = seekToMs;
1415
+ }
1416
+
1417
+ // DIVERGENT TASKS - Mode-Specific
1418
+
1419
+ // Asset Mode Tasks
1420
+ assetIndexLoader = new Task(this, {
1421
+ autoRun: EF_INTERACTIVE, // Always run since this is critical for frame rendering
1422
+ onError: (error) => {
1423
+ console.error("assetIndexLoader error", error);
1424
+ },
1425
+ args: () =>
1426
+ [
1427
+ this.effectiveMode === "asset" ? this.fragmentIndexPath() : null,
1428
+ this.fetch,
1429
+ ] as const,
1430
+ task: async ([path, fetch], { signal }) => {
1431
+ if (!path) return null;
1432
+ try {
1433
+ const response = await fetch(path, { signal });
1434
+ return (await response.json()) as Record<number, TrackFragmentIndex>;
1435
+ } catch (error) {
1436
+ console.error("Failed to load asset fragment index", error);
1437
+ return null;
1438
+ }
1439
+ },
1440
+ onComplete: () => {
1441
+ this.requestUpdate("intrinsicDurationMs");
1442
+ this.requestUpdate("ownCurrentTimeMs");
1443
+ this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
1444
+ this.rootTimegroup?.requestUpdate("durationMs");
1445
+ },
1446
+ });
1447
+
1448
+ // Asset segment keys calculation - separate from loading
1449
+ assetSegmentKeysTask = new Task(this, {
1450
+ autoRun: EF_INTERACTIVE, // Always run since this is critical for frame rendering
1451
+ onError: (error) => {
1452
+ console.error("assetSegmentKeysTask error", error);
1453
+ },
1454
+ args: () =>
1455
+ [
1456
+ this.effectiveMode === "asset" ? this.assetIndexLoader.value : null,
1457
+ this.desiredSeekTimeMs,
1458
+ ] as const,
1459
+ task: async ([fragmentIndex, seekMs]) => {
1460
+ if (this.effectiveMode === "asset") {
1461
+ await this.assetIndexLoader.taskComplete;
1462
+ fragmentIndex = this.assetIndexLoader.value;
1463
+ }
1464
+ if (!fragmentIndex || seekMs == null) return null;
1465
+ return this.calculateAssetSegmentKeys(fragmentIndex, seekMs);
1466
+ },
1467
+ });
1468
+
1469
+ // Asset init segments loader - separate from media segments
1470
+ assetInitSegmentsTask = new Task(this, {
1471
+ autoRun: EF_INTERACTIVE, // Always run since this is critical for frame rendering
1472
+ onError: (error) => {
1473
+ console.error("assetInitSegmentsTask error", error);
1474
+ },
1475
+ args: () =>
1476
+ [
1477
+ this.effectiveMode === "asset" ? this.assetIndexLoader.value : null,
1478
+ this.fetch,
1479
+ ] as const,
1480
+ task: async ([fragmentIndex, fetch], { signal }) => {
1481
+ if (this.effectiveMode === "asset") {
1482
+ await this.assetIndexLoader.taskComplete;
1483
+ fragmentIndex = this.assetIndexLoader.value;
1484
+ }
1485
+ if (!fragmentIndex) return null;
1486
+
1487
+ return await Promise.all(
1488
+ Object.entries(fragmentIndex).map(async ([trackId, track]) => {
1489
+ const start = track.initSegment.offset;
1490
+ const end = track.initSegment.offset + track.initSegment.size;
1491
+ const response = await fetch(this.fragmentTrackPath(trackId), {
1492
+ signal,
1493
+ headers: { Range: `bytes=${start}-${end - 1}` },
1494
+ });
1495
+ const buffer =
1496
+ (await response.arrayBuffer()) as MP4Box.MP4ArrayBuffer;
1497
+ buffer.fileStart = 0;
1498
+ const mp4File = new MP4File();
1499
+ mp4File.appendBuffer(buffer, true);
1500
+ mp4File.flush();
1501
+ await mp4File.readyPromise;
1502
+ return { trackId, buffer, mp4File };
1503
+ }),
1504
+ );
1505
+ },
1506
+ });
1507
+
1508
+ // Asset media segments loader - now focused only on media segments
1509
+ assetSegmentLoader = new Task(this, {
1510
+ autoRun: EF_INTERACTIVE, // Always run since this is critical for frame rendering
1511
+ onError: (error) => {
1512
+ console.error("assetSegmentLoader error", error);
1513
+ },
1514
+ argsEqual: deepArrayEquals,
1515
+ args: () =>
1516
+ [
1517
+ this.assetIndexLoader.value,
1518
+ this.assetSegmentKeysTask.value,
1519
+ this.assetInitSegmentsTask.value,
1520
+ this.fetch,
1521
+ ] as const,
1522
+ task: async (
1523
+ [fragmentIndex, segmentKeys, initSegments, fetch],
1524
+ { signal },
1525
+ ) => {
1526
+ if (this.effectiveMode === "asset") {
1527
+ await this.assetIndexLoader.taskComplete;
1528
+ fragmentIndex = this.assetIndexLoader.value;
1529
+ await this.assetSegmentKeysTask.taskComplete;
1530
+ segmentKeys = this.assetSegmentKeysTask.value;
1531
+ await this.assetInitSegmentsTask.taskComplete;
1532
+ initSegments = this.assetInitSegmentsTask.value;
1533
+ }
1534
+
1535
+ if (!fragmentIndex || !segmentKeys || !initSegments) return null;
1536
+
1537
+ // Access current seek time directly for calculations that need it
1538
+ const seekMs = this.desiredSeekTimeMs;
1539
+ if (seekMs == null) return null;
1540
+
1541
+ const files: Record<string, File> = {};
1542
+
1543
+ // Calculate and fetch media segments
1544
+ const seekResult = this.calculateAssetSeekResult(
1545
+ fragmentIndex,
1546
+ initSegments,
1547
+ seekMs,
1548
+ );
1549
+ if (!seekResult) return null;
1550
+
1551
+ for (const [trackId, { segment, track, nextSegment }] of Object.entries(
1552
+ seekResult,
1553
+ )) {
1554
+ const start = segment.offset;
1555
+ const end = segment.offset + segment.size;
1556
+
1557
+ const response = await fetch(this.fragmentTrackPath(trackId), {
1558
+ signal,
1559
+ headers: { Range: `bytes=${start}-${end - 1}` },
1560
+ });
1561
+
1562
+ // Prefetch next segment
1563
+ if (nextSegment) {
1564
+ const nextStart = nextSegment.offset;
1565
+ const nextEnd = nextSegment.offset + nextSegment.size;
1566
+ fetch(this.fragmentTrackPath(trackId), {
1567
+ signal,
1568
+ headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` },
1569
+ }).catch(() => {}); // Fire and forget
1570
+ }
1571
+
1572
+ const initSegment = initSegments.find(
1573
+ (seg) => seg.trackId === String(track.id),
1574
+ );
1575
+ if (!initSegment) continue;
1576
+
1577
+ const mediaBuffer = await response.arrayBuffer();
1578
+ files[trackId] = new File(
1579
+ [initSegment.buffer, mediaBuffer],
1580
+ "video.mp4",
1581
+ {
1582
+ type: "video/mp4",
1583
+ },
1584
+ );
1585
+ }
1586
+
1587
+ return files;
1588
+ },
1589
+ });
1590
+
1591
+ // JIT segment keys calculation - separate from loading
1592
+ jitSegmentKeysTask = new Task(this, {
1593
+ autoRun: EF_INTERACTIVE,
1594
+ onError: (error) => {
1595
+ console.error("jitSegmentKeysTask error", error);
1596
+ },
1597
+ args: () =>
1598
+ [
1599
+ this.effectiveMode === "jit-transcode"
1600
+ ? this.jitMetadataLoader.value
1601
+ : null,
1602
+ this.desiredSeekTimeMs,
1603
+ ] as const,
1604
+ task: ([metadata, seekMs]) => {
1605
+ if (!metadata || seekMs == null) return null;
1606
+ return this.calculateJitSegmentKeys(metadata, seekMs);
1607
+ },
1608
+ });
1609
+
1610
+ // JIT segments loader - now focused only on segment loading
1611
+ jitSegmentLoader = new Task(this, {
1612
+ autoRun: EF_INTERACTIVE,
1613
+ onError: (error) => {
1614
+ console.error("jitSegmentLoader error", error);
1615
+ },
1616
+ argsEqual: deepArrayEquals,
1617
+ args: () =>
1618
+ [
1619
+ this.src,
1620
+ this.jitSegmentKeysTask.value,
1621
+ this.jitMetadataLoader.value,
1622
+ ] as const,
1623
+ task: async ([src, segmentKeys, metadata], { signal: _signal }) => {
1624
+ await this.jitSegmentKeysTask.taskComplete;
1625
+ await this.jitMetadataLoader.taskComplete;
1626
+
1627
+ if (!src || !segmentKeys || !metadata || !this.jitClientTask.value)
1628
+ return null;
1629
+
1630
+ // Access current seek time directly for calculations that need it
1631
+ const seekMs = this.desiredSeekTimeMs;
1632
+ if (seekMs == null) return null;
1633
+
1634
+ try {
1635
+ this.jitLoadingState = "segments";
1636
+ this.jitErrorMessage = null;
1637
+
1638
+ const files: Record<string, File> = {};
1639
+ const quality = await this.jitClientTask.value.getAdaptiveQuality();
1640
+
1641
+ // Calculate which segments we need based on synthetic fragment index
1642
+ const fragmentIndex = this.synthesizeFragmentIndex(metadata);
1643
+ const seekResult = this.calculateJitSeekResult(fragmentIndex, seekMs);
1644
+
1645
+ for (const [trackId, { segment, track, nextSegment }] of Object.entries(
1646
+ seekResult,
1647
+ )) {
1648
+ const startTimeMs = (segment.dts / track.timescale) * 1000;
1649
+
1650
+ // Fetch current segment
1651
+ const segmentBuffer = await this.jitClientTask.value.fetchSegment(
1652
+ src,
1653
+ startTimeMs,
1654
+ quality,
1655
+ );
1656
+ files[trackId] = new File([segmentBuffer], "segment.mp4", {
1657
+ type: "video/mp4",
1658
+ });
1659
+
1660
+ // Prefetch next segment
1661
+ if (nextSegment && this.enablePrefetch) {
1662
+ const nextStartTimeMs = (nextSegment.dts / track.timescale) * 1000;
1663
+ this.jitClientTask.value
1664
+ .fetchSegment(src, nextStartTimeMs, quality)
1665
+ .catch(() => {}); // Fire and forget
1666
+ }
1667
+ }
1668
+
1669
+ this.jitCacheStats = this.jitClientTask.value.getCacheStats();
1670
+ this.jitLoadingState = "idle";
1671
+ return files;
1672
+ } catch (error) {
1673
+ this.jitLoadingState = "error";
1674
+ this.jitErrorMessage =
1675
+ error instanceof Error
1676
+ ? error.message
1677
+ : "Failed to load video segments";
1678
+ throw error;
1679
+ }
1680
+ },
1681
+ });
1682
+
1683
+ // CONVERGENT TASKS - Mode-Agnostic
1684
+
1685
+ // Convergent fragment index from either asset or JIT metadata
1686
+ fragmentIndexTask = new Task(this, {
1687
+ autoRun: EF_INTERACTIVE,
1688
+ onError: (error) => {
1689
+ console.error("fragmentIndexTask error", error);
1690
+ },
1691
+ args: () =>
1692
+ [this.assetIndexLoader.value, this.jitMetadataLoader.value] as const,
1693
+ task: async ([assetIndex, jitMetadata]) => {
1694
+ await this.assetIndexLoader.taskComplete;
1695
+ await this.jitMetadataLoader.taskComplete;
1696
+ if (assetIndex) return assetIndex;
1697
+ if (jitMetadata) return this.synthesizeFragmentIndex(jitMetadata);
1698
+ return null;
1699
+ },
1700
+ });
1701
+
1702
+ // Convergent media segments from either asset or JIT loaders
1703
+ mediaSegmentsTask = new Task(this, {
1704
+ autoRun: EF_INTERACTIVE,
1705
+ onError: (error) => {
1706
+ console.error("mediaSegmentsTask error", error);
1707
+ },
1708
+ args: () =>
1709
+ [this.assetSegmentLoader.value, this.jitSegmentLoader.value] as const,
1710
+ task: async ([_assetFiles, _jitFiles], { signal }) => {
1711
+ log("🔍 SIGNAL: mediaSegmentsTask starting", {
1712
+ signalAborted: signal.aborted,
1713
+ });
1714
+
1715
+ await this.assetSegmentLoader.taskComplete;
1716
+ if (signal.aborted) {
1717
+ log(
1718
+ "🔍 SIGNAL: mediaSegmentsTask aborted after assetSegmentLoader.taskComplete",
1719
+ );
1720
+ return null;
1721
+ }
1722
+
1723
+ await this.jitSegmentLoader.taskComplete;
1724
+ if (signal.aborted) {
1725
+ log(
1726
+ "🔍 SIGNAL: mediaSegmentsTask aborted after jitSegmentLoader.taskComplete",
1727
+ );
1728
+ return null;
1729
+ }
1730
+
1731
+ // Get fresh values
1732
+ const assetFiles = this.assetSegmentLoader.value;
1733
+ const jitFiles = this.jitSegmentLoader.value;
1734
+
1735
+ log("🔍 SIGNAL: mediaSegmentsTask using fresh values", {
1736
+ hasAssetFiles: !!assetFiles,
1737
+ hasJitFiles: !!jitFiles,
1738
+ signalAborted: signal.aborted,
1739
+ });
1740
+
1741
+ const result = assetFiles || jitFiles || null;
1742
+ log("🔍 SIGNAL: mediaSegmentsTask resolved", {
1743
+ hasResult: !!result,
1744
+ signalAborted: signal.aborted,
1745
+ });
1746
+ return result;
1747
+ },
1748
+ });
1749
+
1750
+ // Replace seekTask with unified task
1751
+ seekTask = new Task(this, {
1752
+ autoRun: EF_INTERACTIVE, // Always run since this is critical for frame rendering
1753
+ onError: (error) => {
1754
+ console.error("seekTask error", error);
1755
+ },
1756
+ args: () =>
1757
+ [
1758
+ this.fragmentIndexTask.value,
1759
+ this.mediaSegmentsTask.value,
1760
+ this.desiredSeekTimeMs,
1761
+ ] as const,
1762
+ task: async ([_fragmentIndex, _files, seekMs], { signal }) => {
1763
+ log("🔍 SIGNAL: seekTask starting", {
1764
+ seekMs,
1765
+ signalAborted: signal.aborted,
1766
+ });
1767
+
1768
+ await this.fragmentIndexTask.taskComplete;
1769
+ if (signal.aborted) {
1770
+ log("🔍 SIGNAL: seekTask aborted after fragmentIndexTask.taskComplete");
1771
+ return null;
1772
+ }
1773
+
1774
+ await this.mediaSegmentsTask.taskComplete;
1775
+ if (signal.aborted) {
1776
+ log("🔍 SIGNAL: seekTask aborted after mediaSegmentsTask.taskComplete");
1777
+ return null;
1778
+ }
1779
+
1780
+ // Get fresh values after awaiting
1781
+ const fragmentIndex = this.fragmentIndexTask.value;
1782
+ const files = this.mediaSegmentsTask.value;
1783
+
1784
+ log("🔍 SIGNAL: seekTask using fresh values", {
1785
+ hasFragmentIndex: !!fragmentIndex,
1786
+ hasFiles: !!files,
1787
+ seekMs,
1788
+ signalAborted: signal.aborted,
1789
+ });
1790
+
1791
+ const typedFragmentIndex = fragmentIndex as Record<
1792
+ number,
1793
+ TrackFragmentIndex
1794
+ > | null;
1795
+ if (!typedFragmentIndex || !files) {
1796
+ log("🔍 SIGNAL: seekTask calculation aborted - missing required data");
1797
+ return null;
1798
+ }
1799
+
1800
+ // Calculate seek metadata that downstream tasks need
1801
+ const result: Record<
1802
+ string,
1803
+ {
1804
+ segment: TrackSegment;
1805
+ track: MP4Box.TrackInfo;
1806
+ nextSegment?: TrackSegment;
1807
+ }
1808
+ > = {};
1809
+
1810
+ for (const index of Object.values(typedFragmentIndex)) {
1811
+ // Create track info (synthetic for JIT, real for asset)
1812
+ const track = this.createTrackInfo(index);
1813
+ log("trace: processing track", {
1814
+ trackId: index.track,
1815
+ type: index.type,
1816
+ });
1817
+
1818
+ const segment = index.segments
1819
+ .toReversed()
1820
+ .find((segment: TrackSegment) => {
1821
+ const segmentStartMs = (segment.dts / track.timescale) * 1000;
1822
+ return segmentStartMs <= seekMs;
1823
+ });
1824
+
1825
+ const nextSegment = index.segments.find((segment: TrackSegment) => {
1826
+ const segmentStartMs = (segment.dts / track.timescale) * 1000;
1827
+ return segmentStartMs > seekMs;
1828
+ });
1829
+
1830
+ if (segment) {
1831
+ result[index.track] = { segment, track, nextSegment };
1832
+ log("trace: found segment for track", {
1833
+ trackId: index.track,
1834
+ segmentDts: segment.dts,
1835
+ hasNextSegment: !!nextSegment,
1836
+ });
1837
+ }
1838
+ }
1839
+
1840
+ log("🔍 SIGNAL: seekTask calculation complete", {
1841
+ trackCount: Object.keys(result).length,
1842
+ signalAborted: signal.aborted,
1843
+ });
1844
+ return result;
1845
+ },
1846
+ });
814
1847
  }
815
1848
 
816
- function processFFTData(fftData: Uint8Array, zeroThresholdPercent = 0.1) {
1849
+ function processFFTData(
1850
+ fftData: Uint8Array,
1851
+ zeroThresholdPercent = 0.1,
1852
+ ): Uint8Array {
817
1853
  // Step 1: Determine the threshold for zeros
818
1854
  const totalBins = fftData.length;
819
1855
  const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
@@ -823,8 +1859,7 @@ function processFFTData(fftData: Uint8Array, zeroThresholdPercent = 0.1) {
823
1859
  let cutoffIndex = totalBins; // Default to the end of the array
824
1860
 
825
1861
  for (let i = totalBins - 1; i >= 0; i--) {
826
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
827
- if (fftData[i]! < 10) {
1862
+ if (fftData[i] ?? 0 < 10) {
828
1863
  zeroCount++;
829
1864
  } else {
830
1865
  // If we encounter a non-zero value, we can stop
@@ -850,14 +1885,13 @@ function processFFTData(fftData: Uint8Array, zeroThresholdPercent = 0.1) {
850
1885
  const attenuationProgress =
851
1886
  (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + 0.2;
852
1887
  const attenuationFactor = Math.max(0, 1 - attenuationProgress);
853
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
854
- resampledData[i] = Math.floor(resampledData[i]! * attenuationFactor);
1888
+ resampledData[i] = Math.floor((resampledData[i] ?? 0) * attenuationFactor);
855
1889
  }
856
1890
 
857
1891
  return resampledData;
858
1892
  }
859
1893
 
860
- function interpolateData(data: Uint8Array, targetSize: number) {
1894
+ function interpolateData(data: Uint8Array, targetSize: number): Uint8Array {
861
1895
  const resampled = new Uint8Array(targetSize);
862
1896
  const dataLength = data.length;
863
1897
 
@@ -869,13 +1903,11 @@ function interpolateData(data: Uint8Array, targetSize: number) {
869
1903
 
870
1904
  // Handle edge cases
871
1905
  if (index >= dataLength - 1) {
872
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
873
- resampled[i] = data[dataLength - 1]!; // Last value
1906
+ resampled[i] = data[dataLength - 1] ?? 0; // Last value
874
1907
  } else {
875
1908
  // Linear interpolation
876
1909
  resampled[i] = Math.round(
877
- // biome-ignore lint/style/noNonNullAssertion: Manual bounds check
878
- data[index]! * (1 - fraction) + data[index + 1]! * fraction,
1910
+ (data[index] ?? 0) * (1 - fraction) + (data[index + 1] ?? 0) * fraction,
879
1911
  );
880
1912
  }
881
1913
  }