@editframe/elements 0.20.3-beta.0 → 0.21.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. package/dist/DelayedLoadingState.js +0 -27
  2. package/dist/EF_FRAMEGEN.d.ts +5 -3
  3. package/dist/EF_FRAMEGEN.js +51 -29
  4. package/dist/_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js +7 -0
  5. package/dist/elements/ContextProxiesController.js +2 -22
  6. package/dist/elements/EFAudio.js +4 -8
  7. package/dist/elements/EFCaptions.js +59 -84
  8. package/dist/elements/EFImage.js +5 -6
  9. package/dist/elements/EFMedia/AssetIdMediaEngine.js +2 -4
  10. package/dist/elements/EFMedia/AssetMediaEngine.d.ts +4 -4
  11. package/dist/elements/EFMedia/AssetMediaEngine.js +41 -32
  12. package/dist/elements/EFMedia/BaseMediaEngine.d.ts +10 -2
  13. package/dist/elements/EFMedia/BaseMediaEngine.js +57 -67
  14. package/dist/elements/EFMedia/BufferedSeekingInput.js +134 -76
  15. package/dist/elements/EFMedia/JitMediaEngine.js +22 -23
  16. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +4 -7
  17. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +1 -3
  18. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +2 -2
  19. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +9 -7
  20. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +1 -3
  21. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +2 -12
  22. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +2 -2
  23. package/dist/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.d.ts +1 -0
  24. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +6 -3
  25. package/dist/elements/EFMedia/shared/AudioSpanUtils.d.ts +1 -1
  26. package/dist/elements/EFMedia/shared/AudioSpanUtils.js +5 -17
  27. package/dist/elements/EFMedia/shared/BufferUtils.d.ts +1 -1
  28. package/dist/elements/EFMedia/shared/BufferUtils.js +2 -13
  29. package/dist/elements/EFMedia/shared/GlobalInputCache.js +0 -24
  30. package/dist/elements/EFMedia/shared/MediaTaskUtils.d.ts +1 -1
  31. package/dist/elements/EFMedia/shared/PrecisionUtils.js +0 -21
  32. package/dist/elements/EFMedia/shared/RenditionHelpers.d.ts +1 -9
  33. package/dist/elements/EFMedia/shared/ThumbnailExtractor.js +0 -17
  34. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.d.ts +1 -2
  35. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +2 -16
  36. package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.d.ts +29 -0
  37. package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.js +32 -0
  38. package/dist/elements/EFMedia/videoTasks/ScrubInputCache.js +1 -15
  39. package/dist/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.js +3 -8
  40. package/dist/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.js +0 -2
  41. package/dist/elements/EFMedia/videoTasks/makeScrubVideoInputTask.js +8 -7
  42. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.js +12 -13
  43. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.js +0 -2
  44. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.js +1 -3
  45. package/dist/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.js +134 -71
  46. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +8 -12
  47. package/dist/elements/EFMedia.d.ts +2 -1
  48. package/dist/elements/EFMedia.js +26 -23
  49. package/dist/elements/EFSourceMixin.js +5 -7
  50. package/dist/elements/EFSurface.js +6 -9
  51. package/dist/elements/EFTemporal.js +19 -37
  52. package/dist/elements/EFThumbnailStrip.js +16 -59
  53. package/dist/elements/EFTimegroup.js +96 -91
  54. package/dist/elements/EFVideo.d.ts +6 -2
  55. package/dist/elements/EFVideo.js +142 -107
  56. package/dist/elements/EFWaveform.js +18 -27
  57. package/dist/elements/SampleBuffer.js +2 -5
  58. package/dist/elements/TargetController.js +3 -3
  59. package/dist/elements/durationConverter.js +4 -4
  60. package/dist/elements/updateAnimations.js +14 -35
  61. package/dist/gui/ContextMixin.js +23 -52
  62. package/dist/gui/EFConfiguration.js +7 -7
  63. package/dist/gui/EFControls.js +5 -5
  64. package/dist/gui/EFFilmstrip.js +77 -98
  65. package/dist/gui/EFFitScale.js +5 -6
  66. package/dist/gui/EFFocusOverlay.js +4 -4
  67. package/dist/gui/EFPreview.js +4 -4
  68. package/dist/gui/EFScrubber.js +9 -9
  69. package/dist/gui/EFTimeDisplay.js +5 -5
  70. package/dist/gui/EFToggleLoop.js +4 -4
  71. package/dist/gui/EFTogglePlay.js +5 -5
  72. package/dist/gui/EFWorkbench.js +5 -5
  73. package/dist/gui/TWMixin2.js +1 -1
  74. package/dist/index.d.ts +1 -0
  75. package/dist/otel/BridgeSpanExporter.d.ts +13 -0
  76. package/dist/otel/BridgeSpanExporter.js +87 -0
  77. package/dist/otel/setupBrowserTracing.d.ts +12 -0
  78. package/dist/otel/setupBrowserTracing.js +30 -0
  79. package/dist/otel/tracingHelpers.d.ts +34 -0
  80. package/dist/otel/tracingHelpers.js +113 -0
  81. package/dist/transcoding/cache/RequestDeduplicator.js +0 -21
  82. package/dist/transcoding/cache/URLTokenDeduplicator.js +1 -21
  83. package/dist/transcoding/types/index.d.ts +6 -4
  84. package/dist/transcoding/utils/UrlGenerator.js +2 -19
  85. package/dist/utils/LRUCache.js +6 -53
  86. package/package.json +10 -2
  87. package/src/elements/EFCaptions.browsertest.ts +2 -0
  88. package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +6 -4
  89. package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +25 -23
  90. package/src/elements/EFMedia/AssetMediaEngine.ts +81 -43
  91. package/src/elements/EFMedia/BaseMediaEngine.browsertest.ts +94 -0
  92. package/src/elements/EFMedia/BaseMediaEngine.ts +120 -60
  93. package/src/elements/EFMedia/BufferedSeekingInput.ts +218 -101
  94. package/src/elements/EFMedia/JitMediaEngine.ts +20 -6
  95. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +5 -2
  96. package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +0 -5
  97. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +2 -1
  98. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +18 -8
  99. package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +4 -16
  100. package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +4 -2
  101. package/src/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.ts +95 -0
  102. package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +5 -6
  103. package/src/elements/EFMedia/shared/AudioSpanUtils.ts +5 -4
  104. package/src/elements/EFMedia/shared/BufferUtils.ts +7 -3
  105. package/src/elements/EFMedia/shared/MediaTaskUtils.ts +1 -1
  106. package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +41 -42
  107. package/src/elements/EFMedia/shared/RenditionHelpers.ts +0 -23
  108. package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +1 -9
  109. package/src/elements/EFMedia/videoTasks/MainVideoInputCache.ts +76 -0
  110. package/src/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.ts +3 -2
  111. package/src/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.ts +0 -5
  112. package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +17 -15
  113. package/src/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.ts +7 -1
  114. package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.ts +0 -5
  115. package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.ts +0 -5
  116. package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +222 -125
  117. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +2 -5
  118. package/src/elements/EFMedia.ts +18 -2
  119. package/src/elements/EFThumbnailStrip.media-engine.browsertest.ts +2 -1
  120. package/src/elements/EFTimegroup.browsertest.ts +10 -8
  121. package/src/elements/EFTimegroup.ts +165 -77
  122. package/src/elements/EFVideo.browsertest.ts +19 -27
  123. package/src/elements/EFVideo.ts +203 -101
  124. package/src/otel/BridgeSpanExporter.ts +150 -0
  125. package/src/otel/setupBrowserTracing.ts +68 -0
  126. package/src/otel/tracingHelpers.ts +251 -0
  127. package/src/transcoding/types/index.ts +6 -4
  128. package/types.json +1 -1
@@ -1,7 +1,8 @@
1
+ import { withSpan } from "../../otel/tracingHelpers.js";
1
2
  import { roundToMilliseconds } from "./shared/PrecisionUtils.js";
2
3
  import { SampleBuffer } from "../SampleBuffer.js";
3
4
  import { AudioSampleSink, BufferSource, Input, InputAudioTrack, InputVideoTrack, MP4, VideoSampleSink } from "mediabunny";
4
- const defaultOptions = {
5
+ var defaultOptions = {
5
6
  videoBufferSize: 30,
6
7
  audioBufferSize: 100,
7
8
  startTimeOffsetMs: 0
@@ -14,11 +15,10 @@ var BufferedSeekingInput = class {
14
15
  this.trackIteratorCreationPromises = /* @__PURE__ */ new Map();
15
16
  this.trackSeekPromises = /* @__PURE__ */ new Map();
16
17
  const bufferSource = new BufferSource(arrayBuffer);
17
- const input = new Input({
18
+ this.input = new Input({
18
19
  source: bufferSource,
19
20
  formats: [MP4]
20
21
  });
21
- this.input = input;
22
22
  this.options = {
23
23
  ...defaultOptions,
24
24
  ...options
@@ -34,8 +34,7 @@ var BufferedSeekingInput = class {
34
34
  return buffer ? Object.freeze([...buffer.getContents()]) : [];
35
35
  }
36
36
  getBufferTimestamps(trackId) {
37
- const contents = this.getBufferContents(trackId);
38
- return contents.map((sample) => sample.timestamp || 0);
37
+ return this.getBufferContents(trackId).map((sample) => sample.timestamp || 0);
39
38
  }
40
39
  clearBuffer(trackId) {
41
40
  const buffer = this.trackBuffers.get(trackId);
@@ -45,30 +44,25 @@ var BufferedSeekingInput = class {
45
44
  return this.input.computeDuration();
46
45
  }
47
46
  async getTrack(trackId) {
48
- const tracks = await this.input.getTracks();
49
- const track = tracks.find((track$1) => track$1.id === trackId);
47
+ const track = (await this.input.getTracks()).find((track$1) => track$1.id === trackId);
50
48
  if (!track) throw new Error(`Track ${trackId} not found`);
51
49
  return track;
52
50
  }
53
51
  async getAudioTrack(trackId) {
54
- const tracks = await this.input.getAudioTracks();
55
- const track = tracks.find((track$1) => track$1.id === trackId && track$1.type === "audio");
52
+ const track = (await this.input.getAudioTracks()).find((track$1) => track$1.id === trackId && track$1.type === "audio");
56
53
  if (!track) throw new Error(`Track ${trackId} not found`);
57
54
  return track;
58
55
  }
59
56
  async getVideoTrack(trackId) {
60
- const tracks = await this.input.getVideoTracks();
61
- const track = tracks.find((track$1) => track$1.id === trackId && track$1.type === "video");
57
+ const track = (await this.input.getVideoTracks()).find((track$1) => track$1.id === trackId && track$1.type === "video");
62
58
  if (!track) throw new Error(`Track ${trackId} not found`);
63
59
  return track;
64
60
  }
65
61
  async getFirstVideoTrack() {
66
- const tracks = await this.input.getVideoTracks();
67
- return tracks[0];
62
+ return (await this.input.getVideoTracks())[0];
68
63
  }
69
64
  async getFirstAudioTrack() {
70
- const tracks = await this.input.getAudioTracks();
71
- return tracks[0];
65
+ return (await this.input.getAudioTracks())[0];
72
66
  }
73
67
  getTrackIterator(track) {
74
68
  if (this.trackIterators.has(track.id)) return this.trackIterators.get(track.id);
@@ -82,18 +76,15 @@ var BufferedSeekingInput = class {
82
76
  throw new Error(`Unsupported track type ${track.type}`);
83
77
  }
84
78
  createTrackIterator(track) {
85
- const sampleSink = this.createTrackSampleSink(track);
86
- return sampleSink.samples();
79
+ return this.createTrackSampleSink(track).samples();
87
80
  }
88
81
  createTrackBuffer(track) {
89
82
  if (track.type === "audio") {
90
83
  const bufferSize$1 = this.options.audioBufferSize;
91
- const sampleBuffer$1 = new SampleBuffer(bufferSize$1);
92
- return sampleBuffer$1;
84
+ return new SampleBuffer(bufferSize$1);
93
85
  }
94
86
  const bufferSize = this.options.videoBufferSize;
95
- const sampleBuffer = new SampleBuffer(bufferSize);
96
- return sampleBuffer;
87
+ return new SampleBuffer(bufferSize);
97
88
  }
98
89
  getTrackBuffer(track) {
99
90
  const maybeTrackBuffer = this.trackBuffers.get(track.id);
@@ -103,21 +94,30 @@ var BufferedSeekingInput = class {
103
94
  return trackBuffer;
104
95
  }
105
96
  async seek(trackId, timeMs) {
106
- const mediaTimeMs = timeMs + this.startTimeOffsetMs;
107
- const roundedMediaTimeMs = roundToMilliseconds(mediaTimeMs);
108
- const existingSeek = this.trackSeekPromises.get(trackId);
109
- if (existingSeek) await existingSeek;
110
- const seekPromise = this.seekSafe(trackId, roundedMediaTimeMs);
111
- this.trackSeekPromises.set(trackId, seekPromise);
112
- try {
113
- return await seekPromise;
114
- } finally {
115
- this.trackSeekPromises.delete(trackId);
116
- }
97
+ return withSpan("bufferedInput.seek", {
98
+ trackId,
99
+ timeMs,
100
+ startTimeOffsetMs: this.startTimeOffsetMs
101
+ }, void 0, async (span) => {
102
+ const mediaTimeMs = timeMs + this.startTimeOffsetMs;
103
+ const roundedMediaTimeMs = roundToMilliseconds(mediaTimeMs);
104
+ span.setAttribute("roundedMediaTimeMs", roundedMediaTimeMs);
105
+ const existingSeek = this.trackSeekPromises.get(trackId);
106
+ if (existingSeek) {
107
+ span.setAttribute("waitedForExistingSeek", true);
108
+ await existingSeek;
109
+ }
110
+ const seekPromise = this.seekSafe(trackId, roundedMediaTimeMs);
111
+ this.trackSeekPromises.set(trackId, seekPromise);
112
+ try {
113
+ return await seekPromise;
114
+ } finally {
115
+ this.trackSeekPromises.delete(trackId);
116
+ }
117
+ });
117
118
  }
118
119
  async resetIterator(track) {
119
- const trackBuffer = this.trackBuffers.get(track.id);
120
- trackBuffer?.clear();
120
+ this.trackBuffers.get(track.id)?.clear();
121
121
  const ongoingIteratorCreation = this.trackIteratorCreationPromises.get(track.id);
122
122
  if (ongoingIteratorCreation) await ongoingIteratorCreation;
123
123
  const iterator = this.trackIterators.get(track.id);
@@ -128,50 +128,108 @@ var BufferedSeekingInput = class {
128
128
  }
129
129
  #seekLock;
130
130
  async seekSafe(trackId, timeMs) {
131
- if (this.#seekLock) await this.#seekLock.promise;
132
- const seekLock = Promise.withResolvers();
133
- this.#seekLock = seekLock;
134
- try {
135
- const track = await this.getTrack(trackId);
136
- const trackBuffer = this.getTrackBuffer(track);
137
- const roundedTimeMs = roundToMilliseconds(timeMs);
138
- const firstTimestampMs = roundToMilliseconds(await track.getFirstTimestamp() * 1e3);
139
- if (roundedTimeMs < firstTimestampMs) {
140
- console.error("Seeking outside bounds of input", {
141
- roundedTimeMs,
142
- firstTimestampMs
143
- });
144
- throw new NoSample(`Seeking outside bounds of input ${roundedTimeMs} < ${firstTimestampMs}`);
131
+ return withSpan("bufferedInput.seekSafe", {
132
+ trackId,
133
+ timeMs
134
+ }, void 0, async (span) => {
135
+ if (this.#seekLock) {
136
+ span.setAttribute("waitedForSeekLock", true);
137
+ await this.#seekLock.promise;
145
138
  }
146
- const bufferContents = trackBuffer.getContents();
147
- if (bufferContents.length > 0) {
148
- const bufferStartMs = roundToMilliseconds(trackBuffer.firstTimestamp * 1e3);
149
- if (roundedTimeMs < bufferStartMs) await this.resetIterator(track);
139
+ const seekLock = Promise.withResolvers();
140
+ this.#seekLock = seekLock;
141
+ try {
142
+ const track = await this.getTrack(trackId);
143
+ span.setAttribute("trackType", track.type);
144
+ const trackBuffer = this.getTrackBuffer(track);
145
+ const roundedTimeMs = roundToMilliseconds(timeMs);
146
+ const firstTimestampMs = roundToMilliseconds(await track.getFirstTimestamp() * 1e3);
147
+ span.setAttribute("firstTimestampMs", firstTimestampMs);
148
+ if (roundedTimeMs < firstTimestampMs) {
149
+ console.error("Seeking outside bounds of input", {
150
+ roundedTimeMs,
151
+ firstTimestampMs
152
+ });
153
+ throw new NoSample(`Seeking outside bounds of input ${roundedTimeMs} < ${firstTimestampMs}`);
154
+ }
155
+ const bufferContents = trackBuffer.getContents();
156
+ span.setAttribute("bufferContentsLength", bufferContents.length);
157
+ if (bufferContents.length > 0) {
158
+ const bufferStartMs = roundToMilliseconds(trackBuffer.firstTimestamp * 1e3);
159
+ span.setAttribute("bufferStartMs", bufferStartMs);
160
+ if (roundedTimeMs < bufferStartMs) {
161
+ span.setAttribute("resetIterator", true);
162
+ await this.resetIterator(track);
163
+ }
164
+ }
165
+ const alreadyInBuffer = trackBuffer.find(timeMs);
166
+ if (alreadyInBuffer) {
167
+ span.setAttribute("foundInBuffer", true);
168
+ span.setAttribute("bufferSize", trackBuffer.length);
169
+ const contents$1 = trackBuffer.getContents();
170
+ if (contents$1.length > 0) span.setAttribute("bufferTimestamps", contents$1.map((s) => Math.round((s.timestamp || 0) * 1e3)).slice(0, 10).join(","));
171
+ return alreadyInBuffer;
172
+ }
173
+ span.setAttribute("foundInBuffer", false);
174
+ span.setAttribute("bufferSize", trackBuffer.length);
175
+ span.setAttribute("requestedTimeMs", Math.round(timeMs));
176
+ const contents = trackBuffer.getContents();
177
+ if (contents.length > 0) {
178
+ const firstSample = contents[0];
179
+ const lastSample = contents[contents.length - 1];
180
+ if (firstSample && lastSample) {
181
+ const bufferStartMs = Math.round((firstSample.timestamp || 0) * 1e3);
182
+ const bufferEndMs = Math.round(((lastSample.timestamp || 0) + (lastSample.duration || 0)) * 1e3);
183
+ span.setAttribute("bufferStartMs", bufferStartMs);
184
+ span.setAttribute("bufferEndMs", bufferEndMs);
185
+ span.setAttribute("bufferRangeMs", `${bufferStartMs}-${bufferEndMs}`);
186
+ }
187
+ }
188
+ const iterator = this.getTrackIterator(track);
189
+ let iterationCount = 0;
190
+ const decodeStart = performance.now();
191
+ while (true) {
192
+ iterationCount++;
193
+ const iterStart = performance.now();
194
+ const { done, value: decodedSample } = await iterator.next();
195
+ const iterEnd = performance.now();
196
+ if (iterationCount <= 5) span.setAttribute(`iter${iterationCount}Ms`, Math.round((iterEnd - iterStart) * 100) / 100);
197
+ if (decodedSample) {
198
+ trackBuffer.push(decodedSample);
199
+ if (iterationCount <= 5) span.setAttribute(`iter${iterationCount}Timestamp`, Math.round((decodedSample.timestamp || 0) * 1e3));
200
+ }
201
+ const foundSample = trackBuffer.find(roundedTimeMs);
202
+ if (foundSample) {
203
+ const decodeEnd = performance.now();
204
+ span.setAttribute("iterationCount", iterationCount);
205
+ span.setAttribute("decodeMs", Math.round((decodeEnd - decodeStart) * 100) / 100);
206
+ span.setAttribute("avgIterMs", Math.round((decodeEnd - decodeStart) / iterationCount * 100) / 100);
207
+ span.setAttribute("foundSample", true);
208
+ span.setAttribute("foundTimestamp", Math.round((foundSample.timestamp || 0) * 1e3));
209
+ return foundSample;
210
+ }
211
+ if (done) break;
212
+ }
213
+ span.setAttribute("iterationCount", iterationCount);
214
+ span.setAttribute("reachedEnd", true);
215
+ const finalBufferContents = trackBuffer.getContents();
216
+ if (finalBufferContents.length > 0) {
217
+ const lastSample = finalBufferContents[finalBufferContents.length - 1];
218
+ const lastSampleEndMs = roundToMilliseconds(((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) * 1e3);
219
+ const trackDurationMs = await track.computeDuration() * 1e3;
220
+ const isSeekingToTrackEnd = roundToMilliseconds(timeMs) === roundToMilliseconds(trackDurationMs);
221
+ const isAtEndOfTrack = roundToMilliseconds(timeMs) >= lastSampleEndMs;
222
+ if (isSeekingToTrackEnd && isAtEndOfTrack) {
223
+ span.setAttribute("returnedLastSample", true);
224
+ return lastSample;
225
+ }
226
+ }
227
+ throw new NoSample(`Sample not found for time ${timeMs} in ${track.type} track ${trackId}`);
228
+ } finally {
229
+ this.#seekLock = void 0;
230
+ seekLock.resolve();
150
231
  }
151
- const alreadyInBuffer = trackBuffer.find(timeMs);
152
- if (alreadyInBuffer) return alreadyInBuffer;
153
- const iterator = this.getTrackIterator(track);
154
- while (true) {
155
- const { done, value: decodedSample } = await iterator.next();
156
- if (decodedSample) trackBuffer.push(decodedSample);
157
- const foundSample = trackBuffer.find(roundedTimeMs);
158
- if (foundSample) return foundSample;
159
- if (done) break;
160
- }
161
- const finalBufferContents = trackBuffer.getContents();
162
- if (finalBufferContents.length > 0) {
163
- const lastSample = finalBufferContents[finalBufferContents.length - 1];
164
- const lastSampleEndMs = roundToMilliseconds(((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) * 1e3);
165
- const trackDurationMs = await track.computeDuration() * 1e3;
166
- const isSeekingToTrackEnd = roundToMilliseconds(timeMs) === roundToMilliseconds(trackDurationMs);
167
- const isAtEndOfTrack = roundToMilliseconds(timeMs) >= lastSampleEndMs;
168
- if (isSeekingToTrackEnd && isAtEndOfTrack) return lastSample;
169
- }
170
- throw new NoSample(`Sample not found for time ${timeMs} in ${track.type} track ${trackId}`);
171
- } finally {
172
- this.#seekLock = void 0;
173
- seekLock.resolve();
174
- }
232
+ });
175
233
  }
176
234
  };
177
235
  export { BufferedSeekingInput };
@@ -3,8 +3,7 @@ import { ThumbnailExtractor } from "./shared/ThumbnailExtractor.js";
3
3
  var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
4
4
  static async fetch(host, urlGenerator, url) {
5
5
  const engine = new JitMediaEngine(host, urlGenerator);
6
- const data = await engine.fetchManifest(url);
7
- engine.data = data;
6
+ engine.data = await engine.fetchManifest(url);
8
7
  return engine;
9
8
  }
10
9
  constructor(host, urlGenerator) {
@@ -20,7 +19,7 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
20
19
  return this.data.sourceUrl;
21
20
  }
22
21
  get audioRendition() {
23
- if (!this.data.audioRenditions || this.data.audioRenditions.length === 0) return void 0;
22
+ if (!this.data.audioRenditions || this.data.audioRenditions.length === 0) return;
24
23
  const rendition = this.data.audioRenditions[0];
25
24
  if (!rendition) return void 0;
26
25
  return {
@@ -32,7 +31,7 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
32
31
  };
33
32
  }
34
33
  get videoRendition() {
35
- if (!this.data.videoRenditions || this.data.videoRenditions.length === 0) return void 0;
34
+ if (!this.data.videoRenditions || this.data.videoRenditions.length === 0) return;
36
35
  const rendition = this.data.videoRenditions[0];
37
36
  if (!rendition) return void 0;
38
37
  return {
@@ -57,26 +56,24 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
57
56
  return this.fetchMedia(url);
58
57
  }
59
58
  computeSegmentId(desiredSeekTimeMs, rendition) {
60
- if (desiredSeekTimeMs > this.durationMs) return void 0;
59
+ if (desiredSeekTimeMs > this.durationMs) return;
61
60
  if (rendition.segmentDurationsMs && rendition.segmentDurationsMs.length > 0) {
62
61
  let cumulativeTime = 0;
63
62
  for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {
64
63
  const segmentDuration = rendition.segmentDurationsMs[i];
65
64
  if (segmentDuration === void 0) throw new Error("Segment duration is required for JIT metadata");
66
- const segmentStartMs$1 = cumulativeTime;
65
+ const segmentStartMs = cumulativeTime;
67
66
  const segmentEndMs = cumulativeTime + segmentDuration;
68
- const isLastSegment = i === rendition.segmentDurationsMs.length - 1;
69
- const includesEndTime = isLastSegment && desiredSeekTimeMs === this.durationMs;
70
- if (desiredSeekTimeMs >= segmentStartMs$1 && (desiredSeekTimeMs < segmentEndMs || includesEndTime)) return i + 1;
67
+ const includesEndTime = i === rendition.segmentDurationsMs.length - 1 && desiredSeekTimeMs === this.durationMs;
68
+ if (desiredSeekTimeMs >= segmentStartMs && (desiredSeekTimeMs < segmentEndMs || includesEndTime)) return i + 1;
71
69
  cumulativeTime += segmentDuration;
72
70
  if (cumulativeTime >= this.durationMs) break;
73
71
  }
74
- return void 0;
72
+ return;
75
73
  }
76
74
  if (!rendition.segmentDurationMs) throw new Error("Segment duration is required for JIT metadata");
77
75
  const segmentIndex = Math.floor(desiredSeekTimeMs / rendition.segmentDurationMs);
78
- const segmentStartMs = segmentIndex * rendition.segmentDurationMs;
79
- if (segmentStartMs >= this.durationMs) return void 0;
76
+ if (segmentIndex * rendition.segmentDurationMs >= this.durationMs) return;
80
77
  return segmentIndex + 1;
81
78
  }
82
79
  getScrubVideoRendition() {
@@ -91,10 +88,6 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
91
88
  segmentDurationsMs: scrubManifestRendition.segmentDurationsMs
92
89
  };
93
90
  }
94
- /**
95
- * Get preferred buffer configuration for JIT transcoding
96
- * Uses higher buffering since transcoding introduces latency
97
- */
98
91
  getBufferConfig() {
99
92
  return {
100
93
  videoBufferDurationMs: 8e3,
@@ -103,14 +96,20 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
103
96
  maxAudioBufferFetches: 3
104
97
  };
105
98
  }
106
- /**
107
- * Extract thumbnail canvases using same rendition priority as video playback for frame alignment
108
- */
109
99
  async extractThumbnails(timestamps) {
110
- const mainRendition = this.videoRendition;
111
- const scrubRendition = this.getScrubVideoRendition();
112
- const rendition = mainRendition || scrubRendition;
113
- if (!rendition) return timestamps.map(() => null);
100
+ let rendition;
101
+ try {
102
+ const mainRendition = this.getVideoRendition();
103
+ if (mainRendition) rendition = mainRendition;
104
+ else {
105
+ const scrubRendition = this.getScrubVideoRendition();
106
+ if (scrubRendition) rendition = scrubRendition;
107
+ else throw new Error("No video rendition available");
108
+ }
109
+ } catch (error) {
110
+ console.warn("JitMediaEngine: No video rendition available for thumbnails", error);
111
+ return timestamps.map(() => null);
112
+ }
114
113
  return this.thumbnailExtractor.extractThumbnails(timestamps, rendition, this.durationMs);
115
114
  }
116
115
  convertToSegmentRelativeTimestamps(globalTimestamps, _segmentId, _rendition) {
@@ -33,12 +33,10 @@ const makeAudioBufferTask = (host) => {
33
33
  };
34
34
  return manageMediaBuffer(seekTimeMs, currentConfig, currentState, host.intrinsicDurationMs || 1e4, signal, {
35
35
  computeSegmentId: async (timeMs, rendition) => {
36
- const mediaEngine$1 = await getLatestMediaEngine(host, signal);
37
- return mediaEngine$1.computeSegmentId(timeMs, rendition);
36
+ return (await getLatestMediaEngine(host, signal)).computeSegmentId(timeMs, rendition);
38
37
  },
39
38
  prefetchSegment: async (segmentId, rendition) => {
40
- const mediaEngine$1 = await getLatestMediaEngine(host, signal);
41
- await mediaEngine$1.fetchMediaSegment(segmentId, rendition);
39
+ await (await getLatestMediaEngine(host, signal)).fetchMediaSegment(segmentId, rendition);
42
40
  },
43
41
  isSegmentCached: (segmentId, rendition) => {
44
42
  const mediaEngine$1 = host.mediaEngineTask.value;
@@ -46,9 +44,8 @@ const makeAudioBufferTask = (host) => {
46
44
  return mediaEngine$1.isSegmentCached(segmentId, rendition);
47
45
  },
48
46
  getRendition: async () => {
49
- const mediaEngine$1 = await getLatestMediaEngine(host, signal);
50
- const audioRendition = mediaEngine$1.audioRendition;
51
- if (!audioRendition) throw new Error("No audio track available in source");
47
+ const audioRendition = (await getLatestMediaEngine(host, signal)).audioRendition;
48
+ if (!audioRendition) throw new Error("Audio rendition not available");
52
49
  return audioRendition;
53
50
  },
54
51
  logError: console.error
@@ -1,7 +1,7 @@
1
1
  import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
2
2
  import { LRUCache } from "../../../utils/LRUCache.js";
3
3
  import { Task } from "@lit/task";
4
- const DECAY_WEIGHT = .8;
4
+ var DECAY_WEIGHT = .8;
5
5
  function processFFTData(fftData, zeroThresholdPercent = .1) {
6
6
  const totalBins = fftData.length;
7
7
  const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
@@ -51,8 +51,6 @@ function makeAudioFrequencyAnalysisTask(element) {
51
51
  ],
52
52
  task: async (_, { signal }) => {
53
53
  if (element.currentSourceTimeMs < 0) return null;
54
- const mediaEngine = element.mediaEngineTask.value;
55
- if (!mediaEngine?.audioRendition) return null;
56
54
  const currentTimeMs = element.currentSourceTimeMs;
57
55
  const frameIntervalMs = 1e3 / 30;
58
56
  const earliestFrameMs = currentTimeMs - (element.fftDecay - 1) * frameIntervalMs;
@@ -9,8 +9,8 @@ const makeAudioInitSegmentFetchTask = (host) => {
9
9
  onComplete: (_value) => {},
10
10
  task: async ([_mediaEngine], { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
- const audioRendition = mediaEngine.audioRendition;
13
- if (!audioRendition) return void 0;
12
+ const audioRendition = mediaEngine.getAudioRendition();
13
+ if (!audioRendition) return;
14
14
  return mediaEngine.fetchInitSegment(audioRendition, signal);
15
15
  }
16
16
  });
@@ -9,16 +9,18 @@ const makeAudioInputTask = (host) => {
9
9
  },
10
10
  onComplete: (_value) => {},
11
11
  task: async (_, { signal }) => {
12
- const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
13
- signal.throwIfAborted();
14
- const segment = await host.audioSegmentFetchTask.taskComplete;
15
- signal.throwIfAborted();
16
- if (!initSegment || !segment) throw new Error("No audio track available in source");
17
12
  const mediaEngine = await host.mediaEngineTask.taskComplete;
13
+ if (signal.aborted) return void 0;
18
14
  const audioRendition = mediaEngine?.audioRendition;
19
- const startTimeOffsetMs = audioRendition?.startTimeOffsetMs;
15
+ if (!audioRendition) return;
16
+ const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
17
+ if (signal.aborted) return void 0;
18
+ const segment = await host.audioSegmentFetchTask.taskComplete;
19
+ if (signal.aborted) return void 0;
20
+ if (!initSegment || !segment) return;
21
+ const startTimeOffsetMs = audioRendition.startTimeOffsetMs;
20
22
  const arrayBuffer = await new Blob([initSegment, segment]).arrayBuffer();
21
- signal.throwIfAborted();
23
+ if (signal.aborted) return void 0;
22
24
  return new BufferedSeekingInput(arrayBuffer, {
23
25
  videoBufferSize: EFMedia.VIDEO_SAMPLE_BUFFER_SIZE,
24
26
  audioBufferSize: EFMedia.AUDIO_SAMPLE_BUFFER_SIZE,
@@ -13,9 +13,7 @@ const makeAudioSeekTask = (host) => {
13
13
  else console.error("audioSeekTask unknown error", error);
14
14
  },
15
15
  onComplete: (_value) => {},
16
- task: async () => {
17
- return void 0;
18
- }
16
+ task: async () => {}
19
17
  });
20
18
  };
21
19
  export { makeAudioSeekTask };
@@ -9,19 +9,9 @@ const makeAudioSegmentFetchTask = (host) => {
9
9
  onComplete: (_value) => {},
10
10
  task: async (_, { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
- const audioRendition = mediaEngine.audioRendition;
13
- if (!audioRendition) return void 0;
14
12
  const segmentId = await host.audioSegmentIdTask.taskComplete;
15
- if (segmentId === void 0) {
16
- const debugInfo = {
17
- hasRendition: true,
18
- segmentDurationMs: audioRendition.segmentDurationMs,
19
- segmentDurationsMs: audioRendition.segmentDurationsMs?.length || 0,
20
- desiredSeekTimeMs: host.desiredSeekTimeMs,
21
- intrinsicDurationMs: host.intrinsicDurationMs
22
- };
23
- throw new Error(`Segment ID is not available for audio. Debug info: ${JSON.stringify(debugInfo)}`);
24
- }
13
+ const audioRendition = mediaEngine.getAudioRendition();
14
+ if (!audioRendition || segmentId === void 0) return;
25
15
  return mediaEngine.fetchMediaSegment(segmentId, audioRendition, signal);
26
16
  }
27
17
  });
@@ -10,8 +10,8 @@ const makeAudioSegmentIdTask = (host) => {
10
10
  task: async ([, targetSeekTimeMs], { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
12
  signal.throwIfAborted();
13
- const audioRendition = mediaEngine.audioRendition;
14
- if (!audioRendition) return void 0;
13
+ const audioRendition = mediaEngine.getAudioRendition();
14
+ if (!audioRendition) return;
15
15
  return mediaEngine.computeSegmentId(targetSeekTimeMs, audioRendition);
16
16
  }
17
17
  });
@@ -1,12 +1,17 @@
1
1
  import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
2
2
  import { LRUCache } from "../../../utils/LRUCache.js";
3
+ import { IgnorableError } from "../../EFMedia.js";
3
4
  import { Task } from "@lit/task";
4
- const DECAY_WEIGHT = .8;
5
+ var DECAY_WEIGHT = .8;
5
6
  function makeAudioTimeDomainAnalysisTask(element) {
6
7
  const cache = new LRUCache(1e3);
7
8
  return new Task(element, {
8
9
  autoRun: EF_INTERACTIVE,
9
10
  onError: (error) => {
11
+ if (error instanceof IgnorableError) {
12
+ console.info("byteTimeDomainTask skipped: no audio track");
13
+ return;
14
+ }
10
15
  console.error("byteTimeDomainTask error", error);
11
16
  },
12
17
  args: () => [
@@ -18,8 +23,6 @@ function makeAudioTimeDomainAnalysisTask(element) {
18
23
  ],
19
24
  task: async (_, { signal }) => {
20
25
  if (element.currentSourceTimeMs < 0) return null;
21
- const mediaEngine = element.mediaEngineTask.value;
22
- if (!mediaEngine?.audioRendition) return null;
23
26
  const currentTimeMs = element.currentSourceTimeMs;
24
27
  const frameIntervalMs = 1e3 / 30;
25
28
  const earliestFrameMs = currentTimeMs - (element.fftDecay - 1) * frameIntervalMs;
@@ -4,4 +4,4 @@ import { EFMedia } from '../../EFMedia';
4
4
  * Fetch audio spanning a time range
5
5
  * Main function that orchestrates segment calculation, fetching, and blob creation
6
6
  */
7
- export declare const fetchAudioSpanningTime: (host: EFMedia, fromMs: number, toMs: number, signal: AbortSignal) => Promise<AudioSpan>;
7
+ export declare const fetchAudioSpanningTime: (host: EFMedia, fromMs: number, toMs: number, signal: AbortSignal) => Promise<AudioSpan | undefined>;
@@ -1,10 +1,6 @@
1
- /**
2
- * Fetch audio segment data using MediaEngine
3
- * Pure function with explicit dependencies
4
- */
5
- const fetchAudioSegmentData = async (segmentIds, mediaEngine, signal) => {
1
+ var fetchAudioSegmentData = async (segmentIds, mediaEngine, signal) => {
6
2
  const audioRendition = mediaEngine.audioRendition;
7
- if (!audioRendition) throw new Error("No audio track available in source");
3
+ if (!audioRendition) throw new Error("Audio rendition not available");
8
4
  const segmentData = /* @__PURE__ */ new Map();
9
5
  const fetchPromises = segmentIds.map(async (segmentId) => {
10
6
  const arrayBuffer = await mediaEngine.fetchMediaSegment(segmentId, audioRendition, signal);
@@ -15,24 +11,16 @@ const fetchAudioSegmentData = async (segmentIds, mediaEngine, signal) => {
15
11
  for (const [segmentId, arrayBuffer] of fetchedSegments) segmentData.set(segmentId, arrayBuffer);
16
12
  return segmentData;
17
13
  };
18
- /**
19
- * Create audio span blob from init segment and media segments
20
- * Pure function for blob creation
21
- */
22
- const createAudioSpanBlob = (initSegment, mediaSegments) => {
14
+ var createAudioSpanBlob = (initSegment, mediaSegments) => {
23
15
  const chunks = [initSegment, ...mediaSegments];
24
16
  return new Blob(chunks, { type: "audio/mp4" });
25
17
  };
26
- /**
27
- * Fetch audio spanning a time range
28
- * Main function that orchestrates segment calculation, fetching, and blob creation
29
- */
30
18
  const fetchAudioSpanningTime = async (host, fromMs, toMs, signal) => {
31
19
  if (fromMs >= toMs || fromMs < 0) throw new Error(`Invalid time range: fromMs=${fromMs}, toMs=${toMs}`);
32
20
  const mediaEngine = await host.mediaEngineTask.taskComplete;
33
21
  const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
34
- if (!mediaEngine?.audioRendition) throw new Error("No audio track available in source");
35
- if (!initSegment) throw new Error("Audio init segment is not available");
22
+ if (!mediaEngine?.audioRendition) return;
23
+ if (!initSegment) return;
36
24
  const segmentRanges = mediaEngine.calculateAudioSegmentRange(fromMs, toMs, mediaEngine.audioRendition, host.intrinsicDurationMs || 1e4);
37
25
  if (segmentRanges.length === 0) throw new Error(`No segments found for time range ${fromMs}-${toMs}ms`);
38
26
  const segmentIds = segmentRanges.map((r) => r.segmentId);
@@ -24,7 +24,7 @@ export interface MediaBufferDependencies<T extends AudioRendition | VideoRenditi
24
24
  computeSegmentId: (timeMs: number, rendition: T) => Promise<number | undefined>;
25
25
  prefetchSegment: (segmentId: number, rendition: T) => Promise<void>;
26
26
  isSegmentCached: (segmentId: number, rendition: T) => boolean;
27
- getRendition: () => Promise<T>;
27
+ getRendition: () => Promise<T | undefined>;
28
28
  logError: (message: string, error: any) => void;
29
29
  }
30
30
  /**
@@ -1,6 +1,3 @@
1
- /**
2
- * Async version of computeSegmentRange for when computeSegmentId is async
3
- */
4
1
  const computeSegmentRangeAsync = async (startTimeMs, endTimeMs, durationMs, rendition, computeSegmentId) => {
5
2
  const segments = [];
6
3
  const segmentDurationMs = rendition.segmentDurationMs || 1e3;
@@ -15,23 +12,15 @@ const computeSegmentRangeAsync = async (startTimeMs, endTimeMs, durationMs, rend
15
12
  }
16
13
  return segments.filter((id, index, arr) => arr.indexOf(id) === index);
17
14
  };
18
- /**
19
- * Compute buffer queue based on desired segments and what we've already requested
20
- * Pure function - determines what new segments should be prefetched
21
- */
22
15
  const computeBufferQueue = (desiredSegments, requestedSegments) => {
23
16
  return desiredSegments.filter((segmentId) => !requestedSegments.has(segmentId));
24
17
  };
25
- /**
26
- * Core media buffering orchestration logic - prefetch only, no data storage
27
- * Integrates with BaseMediaEngine's existing caching and request deduplication
28
- */
29
18
  const manageMediaBuffer = async (seekTimeMs, config, currentState, durationMs, signal, deps) => {
30
19
  if (!config.enableBuffering) return currentState;
31
20
  const rendition = await deps.getRendition();
21
+ if (!rendition) return currentState;
32
22
  const endTimeMs = seekTimeMs + config.bufferDurationMs;
33
- const desiredSegments = await computeSegmentRangeAsync(seekTimeMs, endTimeMs, durationMs, rendition, deps.computeSegmentId);
34
- const uncachedSegments = desiredSegments.filter((segmentId) => !deps.isSegmentCached(segmentId, rendition));
23
+ const uncachedSegments = (await computeSegmentRangeAsync(seekTimeMs, endTimeMs, durationMs, rendition, deps.computeSegmentId)).filter((segmentId) => !deps.isSegmentCached(segmentId, rendition));
35
24
  const newQueue = computeBufferQueue(uncachedSegments, currentState.requestedSegments);
36
25
  const newRequestedSegments = new Set(currentState.requestedSegments);
37
26
  const newActiveRequests = new Set(currentState.activeRequests);