@editframe/elements 0.25.1-beta.0 → 0.26.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/elements/EFAudio.d.ts +4 -4
  2. package/dist/elements/EFCaptions.d.ts +12 -12
  3. package/dist/elements/EFImage.d.ts +4 -4
  4. package/dist/elements/EFMedia/AssetMediaEngine.js +2 -1
  5. package/dist/elements/EFMedia/AssetMediaEngine.js.map +1 -1
  6. package/dist/elements/EFMedia/BaseMediaEngine.js +13 -0
  7. package/dist/elements/EFMedia/BaseMediaEngine.js.map +1 -1
  8. package/dist/elements/EFMedia/JitMediaEngine.js +2 -1
  9. package/dist/elements/EFMedia/JitMediaEngine.js.map +1 -1
  10. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +11 -4
  11. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js.map +1 -1
  12. package/dist/elements/EFMedia/shared/BufferUtils.js +16 -1
  13. package/dist/elements/EFMedia/shared/BufferUtils.js.map +1 -1
  14. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +11 -4
  15. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js.map +1 -1
  16. package/dist/elements/EFMedia.d.ts +2 -2
  17. package/dist/elements/EFSurface.d.ts +4 -4
  18. package/dist/elements/EFTemporal.js +16 -2
  19. package/dist/elements/EFTemporal.js.map +1 -1
  20. package/dist/elements/EFThumbnailStrip.d.ts +4 -4
  21. package/dist/elements/EFTimegroup.d.ts +22 -0
  22. package/dist/elements/EFTimegroup.js +39 -0
  23. package/dist/elements/EFTimegroup.js.map +1 -1
  24. package/dist/elements/EFVideo.d.ts +4 -4
  25. package/dist/elements/EFWaveform.d.ts +4 -4
  26. package/dist/elements/updateAnimations.js +3 -1
  27. package/dist/elements/updateAnimations.js.map +1 -1
  28. package/dist/gui/EFConfiguration.d.ts +4 -4
  29. package/dist/gui/EFControls.d.ts +2 -2
  30. package/dist/gui/EFDial.d.ts +4 -4
  31. package/dist/gui/EFFocusOverlay.d.ts +4 -4
  32. package/dist/gui/EFPause.d.ts +4 -4
  33. package/dist/gui/EFPlay.d.ts +4 -4
  34. package/dist/gui/EFPreview.d.ts +4 -4
  35. package/dist/gui/EFResizableBox.d.ts +4 -4
  36. package/dist/gui/EFScrubber.d.ts +4 -4
  37. package/dist/gui/EFTimeDisplay.d.ts +4 -4
  38. package/dist/gui/EFToggleLoop.d.ts +4 -4
  39. package/dist/gui/EFTogglePlay.d.ts +4 -4
  40. package/dist/gui/EFWorkbench.d.ts +6 -6
  41. package/dist/style.css +10 -0
  42. package/dist/transcoding/types/index.d.ts +1 -0
  43. package/package.json +2 -2
  44. package/src/elements/EFMedia/AssetMediaEngine.ts +1 -0
  45. package/src/elements/EFMedia/BaseMediaEngine.ts +20 -0
  46. package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +68 -0
  47. package/src/elements/EFMedia/JitMediaEngine.ts +1 -0
  48. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +12 -0
  49. package/src/elements/EFMedia/shared/BufferUtils.ts +42 -0
  50. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +12 -0
  51. package/src/elements/EFTemporal.ts +20 -4
  52. package/src/elements/EFTimegroup.browsertest.ts +198 -0
  53. package/src/elements/EFTimegroup.ts +61 -0
  54. package/src/elements/updateAnimations.browsertest.ts +801 -0
  55. package/src/elements/updateAnimations.ts +12 -1
  56. package/src/transcoding/types/index.ts +1 -0
  57. package/types.json +1 -1
@@ -2,15 +2,15 @@ import { MediaEngine } from "../transcoding/types/index.js";
2
2
  import { EFMedia } from "./EFMedia.js";
3
3
  import * as _lit_task8 from "@lit/task";
4
4
  import { Task } from "@lit/task";
5
- import * as lit_html2 from "lit-html";
6
- import * as lit_html_directives_ref_js2 from "lit-html/directives/ref.js";
5
+ import * as lit_html1 from "lit-html";
6
+ import * as lit_html_directives_ref_js1 from "lit-html/directives/ref.js";
7
7
 
8
8
  //#region src/elements/EFAudio.d.ts
9
9
  declare const EFAudio_base: typeof EFMedia;
10
10
  declare class EFAudio extends EFAudio_base {
11
11
  private _propertyHack;
12
- audioElementRef: lit_html_directives_ref_js2.Ref<HTMLAudioElement>;
13
- render(): lit_html2.TemplateResult<1>;
12
+ audioElementRef: lit_html_directives_ref_js1.Ref<HTMLAudioElement>;
13
+ render(): lit_html1.TemplateResult<1>;
14
14
  frameTask: Task<readonly [_lit_task8.TaskStatus, _lit_task8.TaskStatus, _lit_task8.TaskStatus, _lit_task8.TaskStatus], void>;
15
15
  /**
16
16
  * Legacy getter for fragment index task (maps to audioSegmentIdTask)
@@ -4,9 +4,9 @@ import { FetchMixinInterface } from "./FetchMixin.js";
4
4
  import { EFAudio } from "./EFAudio.js";
5
5
  import { EFVideo } from "./EFVideo.js";
6
6
  import { Task, TaskStatus } from "@lit/task";
7
- import * as lit4 from "lit";
7
+ import * as lit3 from "lit";
8
8
  import { LitElement, PropertyValueMap } from "lit";
9
- import * as lit_html4 from "lit-html";
9
+ import * as lit_html3 from "lit-html";
10
10
 
11
11
  //#region src/elements/EFCaptions.d.ts
12
12
  interface WordSegment {
@@ -25,8 +25,8 @@ interface Caption {
25
25
  }
26
26
  declare const EFCaptionsActiveWord_base: (new (...args: any[]) => TemporalMixinInterface) & typeof LitElement;
27
27
  declare class EFCaptionsActiveWord extends EFCaptionsActiveWord_base {
28
- static styles: lit4.CSSResult[];
29
- render(): lit_html4.TemplateResult<1> | undefined;
28
+ static styles: lit3.CSSResult[];
29
+ render(): lit_html3.TemplateResult<1> | undefined;
30
30
  wordStartMs: number;
31
31
  wordEndMs: number;
32
32
  wordText: string;
@@ -38,8 +38,8 @@ declare class EFCaptionsActiveWord extends EFCaptionsActiveWord_base {
38
38
  }
39
39
  declare const EFCaptionsSegment_base: (new (...args: any[]) => TemporalMixinInterface) & typeof LitElement;
40
40
  declare class EFCaptionsSegment extends EFCaptionsSegment_base {
41
- static styles: lit4.CSSResult[];
42
- render(): lit_html4.TemplateResult<1> | undefined;
41
+ static styles: lit3.CSSResult[];
42
+ render(): lit_html3.TemplateResult<1> | undefined;
43
43
  segmentStartMs: number;
44
44
  segmentEndMs: number;
45
45
  segmentText: string;
@@ -49,8 +49,8 @@ declare class EFCaptionsSegment extends EFCaptionsSegment_base {
49
49
  get durationMs(): number;
50
50
  }
51
51
  declare class EFCaptionsBeforeActiveWord extends EFCaptionsSegment {
52
- static styles: lit4.CSSResult[];
53
- render(): lit_html4.TemplateResult<1> | undefined;
52
+ static styles: lit3.CSSResult[];
53
+ render(): lit_html3.TemplateResult<1> | undefined;
54
54
  hidden: boolean;
55
55
  segmentText: string;
56
56
  segmentStartMs: number;
@@ -60,8 +60,8 @@ declare class EFCaptionsBeforeActiveWord extends EFCaptionsSegment {
60
60
  get durationMs(): number;
61
61
  }
62
62
  declare class EFCaptionsAfterActiveWord extends EFCaptionsSegment {
63
- static styles: lit4.CSSResult[];
64
- render(): lit_html4.TemplateResult<1> | undefined;
63
+ static styles: lit3.CSSResult[];
64
+ render(): lit_html3.TemplateResult<1> | undefined;
65
65
  hidden: boolean;
66
66
  segmentText: string;
67
67
  segmentStartMs: number;
@@ -72,7 +72,7 @@ declare class EFCaptionsAfterActiveWord extends EFCaptionsSegment {
72
72
  }
73
73
  declare const EFCaptions_base: (new (...args: any[]) => EFSourceMixinInterface) & (new (...args: any[]) => TemporalMixinInterface) & (new (...args: any[]) => FetchMixinInterface) & typeof LitElement;
74
74
  declare class EFCaptions extends EFCaptions_base {
75
- static styles: lit4.CSSResult[];
75
+ static styles: lit3.CSSResult[];
76
76
  targetSelector: string;
77
77
  set target(value: string);
78
78
  wordStyle: string;
@@ -95,7 +95,7 @@ declare class EFCaptions extends EFCaptions_base {
95
95
  segmentContainers: HTMLCollectionOf<EFCaptionsSegment>;
96
96
  beforeActiveWordContainers: HTMLCollectionOf<EFCaptionsBeforeActiveWord>;
97
97
  afterActiveWordContainers: HTMLCollectionOf<EFCaptionsAfterActiveWord>;
98
- render(): lit_html4.TemplateResult<1>;
98
+ render(): lit_html3.TemplateResult<1>;
99
99
  transcriptionsPath(): string | null;
100
100
  captionsPath(): string | null;
101
101
  protected md5SumLoader: Task<readonly [string, typeof fetch], string | null | undefined>;
@@ -3,21 +3,21 @@ import { EFSourceMixinInterface } from "./EFSourceMixin.js";
3
3
  import { FetchMixinInterface } from "./FetchMixin.js";
4
4
  import * as _lit_task0 from "@lit/task";
5
5
  import { Task } from "@lit/task";
6
- import * as lit1 from "lit";
6
+ import * as lit0 from "lit";
7
7
  import { LitElement } from "lit";
8
- import * as lit_html1 from "lit-html";
8
+ import * as lit_html0 from "lit-html";
9
9
  import * as lit_html_directives_ref_js0 from "lit-html/directives/ref.js";
10
10
 
11
11
  //#region src/elements/EFImage.d.ts
12
12
  declare const EFImage_base: (new (...args: any[]) => TemporalMixinInterface) & (new (...args: any[]) => EFSourceMixinInterface) & (new (...args: any[]) => FetchMixinInterface) & typeof LitElement;
13
13
  declare class EFImage extends EFImage_base {
14
14
  #private;
15
- static styles: lit1.CSSResult[];
15
+ static styles: lit0.CSSResult[];
16
16
  imageRef: lit_html_directives_ref_js0.Ref<HTMLImageElement>;
17
17
  canvasRef: lit_html_directives_ref_js0.Ref<HTMLCanvasElement>;
18
18
  set assetId(value: string | null);
19
19
  get assetId(): string | null;
20
- render(): lit_html1.TemplateResult<1>;
20
+ render(): lit_html0.TemplateResult<1>;
21
21
  private isDirectUrl;
22
22
  assetPath(): string;
23
23
  get hasOwnDuration(): boolean;
@@ -173,7 +173,8 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
173
173
  videoBufferDurationMs: 2e3,
174
174
  audioBufferDurationMs: 2e3,
175
175
  maxVideoBufferFetches: 1,
176
- maxAudioBufferFetches: 1
176
+ maxAudioBufferFetches: 1,
177
+ bufferThresholdMs: 3e4
177
178
  };
178
179
  }
179
180
  convertToSegmentRelativeTimestamps(globalTimestamps, segmentId, rendition) {
@@ -1 +1 @@
1
- {"version":3,"file":"AssetMediaEngine.js","names":["paths: InitSegmentPaths","segmentRanges: SegmentTimeRange[]","distance: number"],"sources":["../../../src/elements/EFMedia/AssetMediaEngine.ts"],"sourcesContent":["import type { TrackFragmentIndex } from \"@editframe/assets\";\n\nimport { withSpan } from \"../../otel/tracingHelpers.js\";\nimport type {\n AudioRendition,\n InitSegmentPaths,\n MediaEngine,\n SegmentTimeRange,\n VideoRendition,\n} from \"../../transcoding/types\";\nimport type { UrlGenerator } from \"../../transcoding/utils/UrlGenerator\";\nimport type { EFMedia } from \"../EFMedia\";\nimport { BaseMediaEngine } from \"./BaseMediaEngine\";\nimport type { MediaRendition } from \"./shared/MediaTaskUtils\";\nimport {\n convertToScaledTime,\n roundToMilliseconds,\n} from \"./shared/PrecisionUtils\";\n\nexport class AssetMediaEngine extends BaseMediaEngine implements MediaEngine {\n public src: string;\n protected data: Record<number, TrackFragmentIndex> = {};\n durationMs = 0;\n\n constructor(host: EFMedia, src: string) {\n super(host);\n this.src = src;\n }\n\n static async fetch(host: EFMedia, urlGenerator: UrlGenerator, src: string) {\n const engine = new AssetMediaEngine(host, src);\n const url = urlGenerator.generateTrackFragmentIndexUrl(src);\n const data = await engine.fetchManifest(url);\n engine.data = data as Record<number, TrackFragmentIndex>;\n\n // Calculate duration from the data\n const longestFragment = Object.values(engine.data).reduce(\n (max, fragment) => Math.max(max, fragment.duration / fragment.timescale),\n 0,\n );\n engine.durationMs = longestFragment * 1000;\n\n if (src.startsWith(\"/\")) {\n engine.src = src.slice(1);\n }\n return engine;\n }\n\n get audioTrackIndex() {\n return Object.values(this.data).find((track) => track.type === \"audio\");\n }\n\n get videoTrackIndex() {\n return Object.values(this.data).find((track) => track.type === \"video\");\n }\n\n get videoRendition() {\n const videoTrack = this.videoTrackIndex;\n\n if (!videoTrack || videoTrack.track === undefined) {\n return undefined;\n }\n\n return {\n trackId: videoTrack.track,\n src: this.src,\n startTimeOffsetMs: videoTrack.startTimeOffsetMs,\n };\n }\n\n get audioRendition() {\n const audioTrack = this.audioTrackIndex;\n\n if (!audioTrack || audioTrack.track === undefined) {\n return undefined;\n }\n\n return {\n trackId: audioTrack.track,\n src: this.src,\n };\n }\n\n get initSegmentPaths() {\n const paths: InitSegmentPaths = {};\n\n if (this.audioTrackIndex !== undefined) {\n paths.audio = {\n path: `@ef-track/${this.audioTrackIndex.track}.m4s`,\n pos: this.audioTrackIndex.initSegment.offset,\n size: this.audioTrackIndex.initSegment.size,\n };\n }\n\n if (this.videoTrackIndex !== undefined) {\n paths.video = {\n path: `/@ef-track/${this.videoTrackIndex.track}.m4s`,\n pos: this.videoTrackIndex.initSegment.offset,\n size: this.videoTrackIndex.initSegment.size,\n };\n }\n\n return paths;\n }\n\n get templates() {\n return {\n initSegment: \"/@ef-track/{src}?trackId={trackId}\",\n mediaSegment: \"/@ef-track/{src}?trackId={trackId}\",\n };\n }\n\n buildInitSegmentUrl(trackId: number) {\n return `/@ef-track/${this.src}?trackId=${trackId}`;\n }\n\n buildMediaSegmentUrl(trackId: number, segmentId: number) {\n return `/@ef-track/${this.src}?trackId=${trackId}&segmentId=${segmentId}`;\n }\n\n async fetchInitSegment(\n rendition: { trackId: number | undefined; src: string },\n signal: AbortSignal,\n ) {\n return withSpan(\n \"assetEngine.fetchInitSegment\",\n {\n trackId: rendition.trackId || -1,\n src: rendition.src,\n },\n undefined,\n async (span) => {\n if (!rendition.trackId) {\n throw new Error(\n \"[fetchInitSegment] Track ID is required for asset metadata\",\n );\n }\n const url = this.buildInitSegmentUrl(rendition.trackId);\n const initSegment = this.data[rendition.trackId]?.initSegment;\n if (!initSegment) {\n throw new Error(\"Init segment not found\");\n }\n\n span.setAttribute(\"offset\", initSegment.offset);\n span.setAttribute(\"size\", initSegment.size);\n\n // Use unified fetch method with Range headers\n const headers = {\n Range: `bytes=${initSegment.offset}-${initSegment.offset + initSegment.size - 1}`,\n };\n\n return this.fetchMediaWithHeaders(url, headers, signal);\n },\n );\n }\n\n async fetchMediaSegment(\n segmentId: number,\n rendition: { trackId: number | undefined; src: string },\n signal?: AbortSignal,\n ) {\n return withSpan(\n \"assetEngine.fetchMediaSegment\",\n {\n segmentId,\n trackId: rendition.trackId || -1,\n src: rendition.src,\n },\n undefined,\n async (span) => {\n if (!rendition.trackId) {\n throw new Error(\n \"[fetchMediaSegment] Track ID is required for asset metadata\",\n );\n }\n if (segmentId === undefined) {\n throw new Error(\"Segment ID is not available\");\n }\n const url = this.buildMediaSegmentUrl(rendition.trackId, segmentId);\n const mediaSegment = this.data[rendition.trackId]?.segments[segmentId];\n if (!mediaSegment) {\n throw new Error(\"Media segment not found\");\n }\n\n span.setAttribute(\"offset\", mediaSegment.offset);\n span.setAttribute(\"size\", mediaSegment.size);\n\n // Use unified fetch method with Range headers\n const headers = {\n Range: `bytes=${mediaSegment.offset}-${mediaSegment.offset + mediaSegment.size - 1}`,\n };\n\n return this.fetchMediaWithHeaders(url, headers, signal);\n },\n );\n }\n\n /**\n * Calculate audio segments for variable-duration segments using track fragment index\n */\n calculateAudioSegmentRange(\n fromMs: number,\n toMs: number,\n rendition: AudioRendition,\n _durationMs: number,\n ): SegmentTimeRange[] {\n if (fromMs >= toMs || !rendition.trackId) {\n console.warn(\n `calculateAudioSegmentRange: invalid fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(\n rendition,\n )}`,\n );\n return [];\n }\n\n const track = this.data[rendition.trackId];\n if (!track) {\n console.warn(\n `calculateAudioSegmentRange: track not found for rendition ${JSON.stringify(\n rendition,\n )}`,\n );\n return [];\n }\n\n const { timescale, segments } = track;\n const segmentRanges: SegmentTimeRange[] = [];\n\n for (let i = 0; i < segments.length; i++) {\n // biome-ignore lint/style/noNonNullAssertion: we know the segment is not null\n const segment = segments[i]!;\n const segmentStartTime = segment.cts;\n const segmentEndTime = segment.cts + segment.duration;\n\n // Convert to milliseconds\n const segmentStartMs = (segmentStartTime / timescale) * 1000;\n const segmentEndMs = (segmentEndTime / timescale) * 1000;\n\n // Check if segment overlaps with requested time range\n if (segmentStartMs < toMs && segmentEndMs > fromMs) {\n segmentRanges.push({\n segmentId: i, // AssetMediaEngine uses 0-based segment IDs\n startMs: segmentStartMs,\n endMs: segmentEndMs,\n });\n }\n }\n if (segmentRanges.length === 0) {\n console.warn(\n `calculateAudioSegmentRange: no segments found for fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(\n {\n rendition,\n track,\n },\n )}`,\n );\n }\n\n return segmentRanges;\n }\n\n computeSegmentId(seekTimeMs: number, rendition: MediaRendition) {\n if (!rendition.trackId) {\n console.warn(\n `computeSegmentId: trackId not found for rendition ${JSON.stringify(\n rendition,\n )}`,\n );\n throw new Error(\n \"[computeSegmentId] Track ID is required for asset metadata\",\n );\n }\n const track = this.data[rendition.trackId];\n if (!track) {\n throw new Error(\"Track not found\");\n }\n const { timescale, segments } = track;\n\n // Apply startTimeOffsetMs to map user timeline to media timeline for segment selection\n const startTimeOffsetMs =\n (\"startTimeOffsetMs\" in rendition && rendition.startTimeOffsetMs) || 0;\n\n const offsetSeekTimeMs = roundToMilliseconds(\n seekTimeMs + startTimeOffsetMs,\n );\n // Convert to timescale units using consistent precision\n const scaledSeekTime = convertToScaledTime(offsetSeekTimeMs, timescale);\n\n // Find the segment that contains the actual seek time\n for (let i = segments.length - 1; i >= 0; i--) {\n // biome-ignore lint/style/noNonNullAssertion: we know the segment is not null\n const segment = segments[i]!;\n const segmentEndTime = segment.cts + segment.duration;\n\n // Check if the seek time falls within this segment\n if (segment.cts <= scaledSeekTime && scaledSeekTime < segmentEndTime) {\n return i;\n }\n }\n\n // Handle gaps: if no exact segment contains the time, find the nearest one\n // This handles cases where seek time falls between segments (like 8041.667ms)\n let nearestSegmentIndex = 0;\n let nearestDistance = Number.MAX_SAFE_INTEGER;\n\n for (let i = 0; i < segments.length; i++) {\n // biome-ignore lint/style/noNonNullAssertion: we know the segment is not null\n const segment = segments[i]!;\n const segmentStartTime = segment.cts;\n const segmentEndTime = segment.cts + segment.duration;\n\n let distance: number;\n if (scaledSeekTime < segmentStartTime) {\n // Time is before this segment\n distance = segmentStartTime - scaledSeekTime;\n } else if (scaledSeekTime >= segmentEndTime) {\n // Time is after this segment\n distance = scaledSeekTime - segmentEndTime;\n } else {\n // Time is within this segment (should have been caught above, but just in case)\n return i;\n }\n\n if (distance < nearestDistance) {\n nearestDistance = distance;\n nearestSegmentIndex = i;\n }\n }\n\n return nearestSegmentIndex;\n }\n\n getScrubVideoRendition(): VideoRendition | undefined {\n // AssetMediaEngine does not have a dedicated scrub track\n return undefined;\n }\n\n /**\n * Get preferred buffer configuration for this media engine\n * AssetMediaEngine uses lower buffering since segments are already optimized\n */\n getBufferConfig() {\n return {\n // Buffer just 1 segment ahead (~2 seconds) for assets\n videoBufferDurationMs: 2000,\n audioBufferDurationMs: 2000,\n maxVideoBufferFetches: 1,\n maxAudioBufferFetches: 1,\n };\n }\n\n // AssetMediaEngine inherits the default extractThumbnails from BaseMediaEngine\n // which provides a clear warning that this engine type is not supported\n\n convertToSegmentRelativeTimestamps(\n globalTimestamps: number[],\n segmentId: number,\n rendition: VideoRendition,\n ): number[] {\n {\n // Asset: MediaBunny expects segment-relative timestamps in seconds\n // This is because Asset segments are independent timeline fragments\n\n if (!rendition.trackId) {\n throw new Error(\"Track ID is required for asset metadata\");\n }\n // For AssetMediaEngine, we need to calculate the actual segment start time\n // using the precise segment boundaries from the track fragment index\n const trackData = this.data[rendition.trackId];\n if (!trackData) {\n throw new Error(\"Track not found\");\n }\n const segment = trackData.segments?.[segmentId];\n if (!segment) {\n throw new Error(\"Segment not found\");\n }\n const segmentStartMs = (segment.cts / trackData.timescale) * 1000;\n\n return globalTimestamps.map(\n (globalMs) => (globalMs - segmentStartMs) / 1000,\n );\n }\n }\n}\n"],"mappings":";;;;;AAmBA,IAAa,mBAAb,MAAa,yBAAyB,gBAAuC;CAK3E,YAAY,MAAe,KAAa;AACtC,QAAM,KAAK;cAJwC,EAAE;oBAC1C;AAIX,OAAK,MAAM;;CAGb,aAAa,MAAM,MAAe,cAA4B,KAAa;EACzE,MAAM,SAAS,IAAI,iBAAiB,MAAM,IAAI;EAC9C,MAAM,MAAM,aAAa,8BAA8B,IAAI;AAE3D,SAAO,OADM,MAAM,OAAO,cAAc,IAAI;AAQ5C,SAAO,aAJiB,OAAO,OAAO,OAAO,KAAK,CAAC,QAChD,KAAK,aAAa,KAAK,IAAI,KAAK,SAAS,WAAW,SAAS,UAAU,EACxE,EACD,GACqC;AAEtC,MAAI,IAAI,WAAW,IAAI,CACrB,QAAO,MAAM,IAAI,MAAM,EAAE;AAE3B,SAAO;;CAGT,IAAI,kBAAkB;AACpB,SAAO,OAAO,OAAO,KAAK,KAAK,CAAC,MAAM,UAAU,MAAM,SAAS,QAAQ;;CAGzE,IAAI,kBAAkB;AACpB,SAAO,OAAO,OAAO,KAAK,KAAK,CAAC,MAAM,UAAU,MAAM,SAAS,QAAQ;;CAGzE,IAAI,iBAAiB;EACnB,MAAM,aAAa,KAAK;AAExB,MAAI,CAAC,cAAc,WAAW,UAAU,OACtC;AAGF,SAAO;GACL,SAAS,WAAW;GACpB,KAAK,KAAK;GACV,mBAAmB,WAAW;GAC/B;;CAGH,IAAI,iBAAiB;EACnB,MAAM,aAAa,KAAK;AAExB,MAAI,CAAC,cAAc,WAAW,UAAU,OACtC;AAGF,SAAO;GACL,SAAS,WAAW;GACpB,KAAK,KAAK;GACX;;CAGH,IAAI,mBAAmB;EACrB,MAAMA,QAA0B,EAAE;AAElC,MAAI,KAAK,oBAAoB,OAC3B,OAAM,QAAQ;GACZ,MAAM,aAAa,KAAK,gBAAgB,MAAM;GAC9C,KAAK,KAAK,gBAAgB,YAAY;GACtC,MAAM,KAAK,gBAAgB,YAAY;GACxC;AAGH,MAAI,KAAK,oBAAoB,OAC3B,OAAM,QAAQ;GACZ,MAAM,cAAc,KAAK,gBAAgB,MAAM;GAC/C,KAAK,KAAK,gBAAgB,YAAY;GACtC,MAAM,KAAK,gBAAgB,YAAY;GACxC;AAGH,SAAO;;CAGT,IAAI,YAAY;AACd,SAAO;GACL,aAAa;GACb,cAAc;GACf;;CAGH,oBAAoB,SAAiB;AACnC,SAAO,cAAc,KAAK,IAAI,WAAW;;CAG3C,qBAAqB,SAAiB,WAAmB;AACvD,SAAO,cAAc,KAAK,IAAI,WAAW,QAAQ,aAAa;;CAGhE,MAAM,iBACJ,WACA,QACA;AACA,SAAO,SACL,gCACA;GACE,SAAS,UAAU,WAAW;GAC9B,KAAK,UAAU;GAChB,EACD,QACA,OAAO,SAAS;AACd,OAAI,CAAC,UAAU,QACb,OAAM,IAAI,MACR,6DACD;GAEH,MAAM,MAAM,KAAK,oBAAoB,UAAU,QAAQ;GACvD,MAAM,cAAc,KAAK,KAAK,UAAU,UAAU;AAClD,OAAI,CAAC,YACH,OAAM,IAAI,MAAM,yBAAyB;AAG3C,QAAK,aAAa,UAAU,YAAY,OAAO;AAC/C,QAAK,aAAa,QAAQ,YAAY,KAAK;GAG3C,MAAM,UAAU,EACd,OAAO,SAAS,YAAY,OAAO,GAAG,YAAY,SAAS,YAAY,OAAO,KAC/E;AAED,UAAO,KAAK,sBAAsB,KAAK,SAAS,OAAO;IAE1D;;CAGH,MAAM,kBACJ,WACA,WACA,QACA;AACA,SAAO,SACL,iCACA;GACE;GACA,SAAS,UAAU,WAAW;GAC9B,KAAK,UAAU;GAChB,EACD,QACA,OAAO,SAAS;AACd,OAAI,CAAC,UAAU,QACb,OAAM,IAAI,MACR,8DACD;AAEH,OAAI,cAAc,OAChB,OAAM,IAAI,MAAM,8BAA8B;GAEhD,MAAM,MAAM,KAAK,qBAAqB,UAAU,SAAS,UAAU;GACnE,MAAM,eAAe,KAAK,KAAK,UAAU,UAAU,SAAS;AAC5D,OAAI,CAAC,aACH,OAAM,IAAI,MAAM,0BAA0B;AAG5C,QAAK,aAAa,UAAU,aAAa,OAAO;AAChD,QAAK,aAAa,QAAQ,aAAa,KAAK;GAG5C,MAAM,UAAU,EACd,OAAO,SAAS,aAAa,OAAO,GAAG,aAAa,SAAS,aAAa,OAAO,KAClF;AAED,UAAO,KAAK,sBAAsB,KAAK,SAAS,OAAO;IAE1D;;;;;CAMH,2BACE,QACA,MACA,WACA,aACoB;AACpB,MAAI,UAAU,QAAQ,CAAC,UAAU,SAAS;AACxC,WAAQ,KACN,8CAA8C,OAAO,QAAQ,KAAK,aAAa,KAAK,UAClF,UACD,GACF;AACD,UAAO,EAAE;;EAGX,MAAM,QAAQ,KAAK,KAAK,UAAU;AAClC,MAAI,CAAC,OAAO;AACV,WAAQ,KACN,6DAA6D,KAAK,UAChE,UACD,GACF;AACD,UAAO,EAAE;;EAGX,MAAM,EAAE,WAAW,aAAa;EAChC,MAAMC,gBAAoC,EAAE;AAE5C,OAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;GAExC,MAAM,UAAU,SAAS;GACzB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,iBAAiB,QAAQ,MAAM,QAAQ;GAG7C,MAAM,iBAAkB,mBAAmB,YAAa;GACxD,MAAM,eAAgB,iBAAiB,YAAa;AAGpD,OAAI,iBAAiB,QAAQ,eAAe,OAC1C,eAAc,KAAK;IACjB,WAAW;IACX,SAAS;IACT,OAAO;IACR,CAAC;;AAGN,MAAI,cAAc,WAAW,EAC3B,SAAQ,KACN,4DAA4D,OAAO,QAAQ,KAAK,aAAa,KAAK,UAChG;GACE;GACA;GACD,CACF,GACF;AAGH,SAAO;;CAGT,iBAAiB,YAAoB,WAA2B;AAC9D,MAAI,CAAC,UAAU,SAAS;AACtB,WAAQ,KACN,qDAAqD,KAAK,UACxD,UACD,GACF;AACD,SAAM,IAAI,MACR,6DACD;;EAEH,MAAM,QAAQ,KAAK,KAAK,UAAU;AAClC,MAAI,CAAC,MACH,OAAM,IAAI,MAAM,kBAAkB;EAEpC,MAAM,EAAE,WAAW,aAAa;EAUhC,MAAM,iBAAiB,oBAJE,oBACvB,cAHC,uBAAuB,aAAa,UAAU,qBAAsB,GAItE,EAE4D,UAAU;AAGvE,OAAK,IAAI,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;GAE7C,MAAM,UAAU,SAAS;GACzB,MAAM,iBAAiB,QAAQ,MAAM,QAAQ;AAG7C,OAAI,QAAQ,OAAO,kBAAkB,iBAAiB,eACpD,QAAO;;EAMX,IAAI,sBAAsB;EAC1B,IAAI,kBAAkB,OAAO;AAE7B,OAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;GAExC,MAAM,UAAU,SAAS;GACzB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,iBAAiB,QAAQ,MAAM,QAAQ;GAE7C,IAAIC;AACJ,OAAI,iBAAiB,iBAEnB,YAAW,mBAAmB;YACrB,kBAAkB,eAE3B,YAAW,iBAAiB;OAG5B,QAAO;AAGT,OAAI,WAAW,iBAAiB;AAC9B,sBAAkB;AAClB,0BAAsB;;;AAI1B,SAAO;;CAGT,yBAAqD;;;;;CASrD,kBAAkB;AAChB,SAAO;GAEL,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACxB;;CAMH,mCACE,kBACA,WACA,WACU;EACV;AAIE,OAAI,CAAC,UAAU,QACb,OAAM,IAAI,MAAM,0CAA0C;GAI5D,MAAM,YAAY,KAAK,KAAK,UAAU;AACtC,OAAI,CAAC,UACH,OAAM,IAAI,MAAM,kBAAkB;GAEpC,MAAM,UAAU,UAAU,WAAW;AACrC,OAAI,CAAC,QACH,OAAM,IAAI,MAAM,oBAAoB;GAEtC,MAAM,iBAAkB,QAAQ,MAAM,UAAU,YAAa;AAE7D,UAAO,iBAAiB,KACrB,cAAc,WAAW,kBAAkB,IAC7C"}
1
+ {"version":3,"file":"AssetMediaEngine.js","names":["paths: InitSegmentPaths","segmentRanges: SegmentTimeRange[]","distance: number"],"sources":["../../../src/elements/EFMedia/AssetMediaEngine.ts"],"sourcesContent":["import type { TrackFragmentIndex } from \"@editframe/assets\";\n\nimport { withSpan } from \"../../otel/tracingHelpers.js\";\nimport type {\n AudioRendition,\n InitSegmentPaths,\n MediaEngine,\n SegmentTimeRange,\n VideoRendition,\n} from \"../../transcoding/types\";\nimport type { UrlGenerator } from \"../../transcoding/utils/UrlGenerator\";\nimport type { EFMedia } from \"../EFMedia\";\nimport { BaseMediaEngine } from \"./BaseMediaEngine\";\nimport type { MediaRendition } from \"./shared/MediaTaskUtils\";\nimport {\n convertToScaledTime,\n roundToMilliseconds,\n} from \"./shared/PrecisionUtils\";\n\nexport class AssetMediaEngine extends BaseMediaEngine implements MediaEngine {\n public src: string;\n protected data: Record<number, TrackFragmentIndex> = {};\n durationMs = 0;\n\n constructor(host: EFMedia, src: string) {\n super(host);\n this.src = src;\n }\n\n static async fetch(host: EFMedia, urlGenerator: UrlGenerator, src: string) {\n const engine = new AssetMediaEngine(host, src);\n const url = urlGenerator.generateTrackFragmentIndexUrl(src);\n const data = await engine.fetchManifest(url);\n engine.data = data as Record<number, TrackFragmentIndex>;\n\n // Calculate duration from the data\n const longestFragment = Object.values(engine.data).reduce(\n (max, fragment) => Math.max(max, fragment.duration / fragment.timescale),\n 0,\n );\n engine.durationMs = longestFragment * 1000;\n\n if (src.startsWith(\"/\")) {\n engine.src = src.slice(1);\n }\n return engine;\n }\n\n get audioTrackIndex() {\n return Object.values(this.data).find((track) => track.type === \"audio\");\n }\n\n get videoTrackIndex() {\n return Object.values(this.data).find((track) => track.type === \"video\");\n }\n\n get videoRendition() {\n const videoTrack = this.videoTrackIndex;\n\n if (!videoTrack || videoTrack.track === undefined) {\n return undefined;\n }\n\n return {\n trackId: videoTrack.track,\n src: this.src,\n startTimeOffsetMs: videoTrack.startTimeOffsetMs,\n };\n }\n\n get audioRendition() {\n const audioTrack = this.audioTrackIndex;\n\n if (!audioTrack || audioTrack.track === undefined) {\n return undefined;\n }\n\n return {\n trackId: audioTrack.track,\n src: this.src,\n };\n }\n\n get initSegmentPaths() {\n const paths: InitSegmentPaths = {};\n\n if (this.audioTrackIndex !== undefined) {\n paths.audio = {\n path: `@ef-track/${this.audioTrackIndex.track}.m4s`,\n pos: this.audioTrackIndex.initSegment.offset,\n size: this.audioTrackIndex.initSegment.size,\n };\n }\n\n if (this.videoTrackIndex !== undefined) {\n paths.video = {\n path: `/@ef-track/${this.videoTrackIndex.track}.m4s`,\n pos: this.videoTrackIndex.initSegment.offset,\n size: this.videoTrackIndex.initSegment.size,\n };\n }\n\n return paths;\n }\n\n get templates() {\n return {\n initSegment: \"/@ef-track/{src}?trackId={trackId}\",\n mediaSegment: \"/@ef-track/{src}?trackId={trackId}\",\n };\n }\n\n buildInitSegmentUrl(trackId: number) {\n return `/@ef-track/${this.src}?trackId=${trackId}`;\n }\n\n buildMediaSegmentUrl(trackId: number, segmentId: number) {\n return `/@ef-track/${this.src}?trackId=${trackId}&segmentId=${segmentId}`;\n }\n\n async fetchInitSegment(\n rendition: { trackId: number | undefined; src: string },\n signal: AbortSignal,\n ) {\n return withSpan(\n \"assetEngine.fetchInitSegment\",\n {\n trackId: rendition.trackId || -1,\n src: rendition.src,\n },\n undefined,\n async (span) => {\n if (!rendition.trackId) {\n throw new Error(\n \"[fetchInitSegment] Track ID is required for asset metadata\",\n );\n }\n const url = this.buildInitSegmentUrl(rendition.trackId);\n const initSegment = this.data[rendition.trackId]?.initSegment;\n if (!initSegment) {\n throw new Error(\"Init segment not found\");\n }\n\n span.setAttribute(\"offset\", initSegment.offset);\n span.setAttribute(\"size\", initSegment.size);\n\n // Use unified fetch method with Range headers\n const headers = {\n Range: `bytes=${initSegment.offset}-${initSegment.offset + initSegment.size - 1}`,\n };\n\n return this.fetchMediaWithHeaders(url, headers, signal);\n },\n );\n }\n\n async fetchMediaSegment(\n segmentId: number,\n rendition: { trackId: number | undefined; src: string },\n signal?: AbortSignal,\n ) {\n return withSpan(\n \"assetEngine.fetchMediaSegment\",\n {\n segmentId,\n trackId: rendition.trackId || -1,\n src: rendition.src,\n },\n undefined,\n async (span) => {\n if (!rendition.trackId) {\n throw new Error(\n \"[fetchMediaSegment] Track ID is required for asset metadata\",\n );\n }\n if (segmentId === undefined) {\n throw new Error(\"Segment ID is not available\");\n }\n const url = this.buildMediaSegmentUrl(rendition.trackId, segmentId);\n const mediaSegment = this.data[rendition.trackId]?.segments[segmentId];\n if (!mediaSegment) {\n throw new Error(\"Media segment not found\");\n }\n\n span.setAttribute(\"offset\", mediaSegment.offset);\n span.setAttribute(\"size\", mediaSegment.size);\n\n // Use unified fetch method with Range headers\n const headers = {\n Range: `bytes=${mediaSegment.offset}-${mediaSegment.offset + mediaSegment.size - 1}`,\n };\n\n return this.fetchMediaWithHeaders(url, headers, signal);\n },\n );\n }\n\n /**\n * Calculate audio segments for variable-duration segments using track fragment index\n */\n calculateAudioSegmentRange(\n fromMs: number,\n toMs: number,\n rendition: AudioRendition,\n _durationMs: number,\n ): SegmentTimeRange[] {\n if (fromMs >= toMs || !rendition.trackId) {\n console.warn(\n `calculateAudioSegmentRange: invalid fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(\n rendition,\n )}`,\n );\n return [];\n }\n\n const track = this.data[rendition.trackId];\n if (!track) {\n console.warn(\n `calculateAudioSegmentRange: track not found for rendition ${JSON.stringify(\n rendition,\n )}`,\n );\n return [];\n }\n\n const { timescale, segments } = track;\n const segmentRanges: SegmentTimeRange[] = [];\n\n for (let i = 0; i < segments.length; i++) {\n // biome-ignore lint/style/noNonNullAssertion: we know the segment is not null\n const segment = segments[i]!;\n const segmentStartTime = segment.cts;\n const segmentEndTime = segment.cts + segment.duration;\n\n // Convert to milliseconds\n const segmentStartMs = (segmentStartTime / timescale) * 1000;\n const segmentEndMs = (segmentEndTime / timescale) * 1000;\n\n // Check if segment overlaps with requested time range\n if (segmentStartMs < toMs && segmentEndMs > fromMs) {\n segmentRanges.push({\n segmentId: i, // AssetMediaEngine uses 0-based segment IDs\n startMs: segmentStartMs,\n endMs: segmentEndMs,\n });\n }\n }\n if (segmentRanges.length === 0) {\n console.warn(\n `calculateAudioSegmentRange: no segments found for fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(\n {\n rendition,\n track,\n },\n )}`,\n );\n }\n\n return segmentRanges;\n }\n\n computeSegmentId(seekTimeMs: number, rendition: MediaRendition) {\n if (!rendition.trackId) {\n console.warn(\n `computeSegmentId: trackId not found for rendition ${JSON.stringify(\n rendition,\n )}`,\n );\n throw new Error(\n \"[computeSegmentId] Track ID is required for asset metadata\",\n );\n }\n const track = this.data[rendition.trackId];\n if (!track) {\n throw new Error(\"Track not found\");\n }\n const { timescale, segments } = track;\n\n // Apply startTimeOffsetMs to map user timeline to media timeline for segment selection\n const startTimeOffsetMs =\n (\"startTimeOffsetMs\" in rendition && rendition.startTimeOffsetMs) || 0;\n\n const offsetSeekTimeMs = roundToMilliseconds(\n seekTimeMs + startTimeOffsetMs,\n );\n // Convert to timescale units using consistent precision\n const scaledSeekTime = convertToScaledTime(offsetSeekTimeMs, timescale);\n\n // Find the segment that contains the actual seek time\n for (let i = segments.length - 1; i >= 0; i--) {\n // biome-ignore lint/style/noNonNullAssertion: we know the segment is not null\n const segment = segments[i]!;\n const segmentEndTime = segment.cts + segment.duration;\n\n // Check if the seek time falls within this segment\n if (segment.cts <= scaledSeekTime && scaledSeekTime < segmentEndTime) {\n return i;\n }\n }\n\n // Handle gaps: if no exact segment contains the time, find the nearest one\n // This handles cases where seek time falls between segments (like 8041.667ms)\n let nearestSegmentIndex = 0;\n let nearestDistance = Number.MAX_SAFE_INTEGER;\n\n for (let i = 0; i < segments.length; i++) {\n // biome-ignore lint/style/noNonNullAssertion: we know the segment is not null\n const segment = segments[i]!;\n const segmentStartTime = segment.cts;\n const segmentEndTime = segment.cts + segment.duration;\n\n let distance: number;\n if (scaledSeekTime < segmentStartTime) {\n // Time is before this segment\n distance = segmentStartTime - scaledSeekTime;\n } else if (scaledSeekTime >= segmentEndTime) {\n // Time is after this segment\n distance = scaledSeekTime - segmentEndTime;\n } else {\n // Time is within this segment (should have been caught above, but just in case)\n return i;\n }\n\n if (distance < nearestDistance) {\n nearestDistance = distance;\n nearestSegmentIndex = i;\n }\n }\n\n return nearestSegmentIndex;\n }\n\n getScrubVideoRendition(): VideoRendition | undefined {\n // AssetMediaEngine does not have a dedicated scrub track\n return undefined;\n }\n\n /**\n * Get preferred buffer configuration for this media engine\n * AssetMediaEngine uses lower buffering since segments are already optimized\n */\n getBufferConfig() {\n return {\n // Buffer just 1 segment ahead (~2 seconds) for assets\n videoBufferDurationMs: 2000,\n audioBufferDurationMs: 2000,\n maxVideoBufferFetches: 1,\n maxAudioBufferFetches: 1,\n bufferThresholdMs: 30000, // Timeline-aware buffering threshold\n };\n }\n\n // AssetMediaEngine inherits the default extractThumbnails from BaseMediaEngine\n // which provides a clear warning that this engine type is not supported\n\n convertToSegmentRelativeTimestamps(\n globalTimestamps: number[],\n segmentId: number,\n rendition: VideoRendition,\n ): number[] {\n {\n // Asset: MediaBunny expects segment-relative timestamps in seconds\n // This is because Asset segments are independent timeline fragments\n\n if (!rendition.trackId) {\n throw new Error(\"Track ID is required for asset metadata\");\n }\n // For AssetMediaEngine, we need to calculate the actual segment start time\n // using the precise segment boundaries from the track fragment index\n const trackData = this.data[rendition.trackId];\n if (!trackData) {\n throw new Error(\"Track not found\");\n }\n const segment = trackData.segments?.[segmentId];\n if (!segment) {\n throw new Error(\"Segment not found\");\n }\n const segmentStartMs = (segment.cts / trackData.timescale) * 1000;\n\n return globalTimestamps.map(\n (globalMs) => (globalMs - segmentStartMs) / 1000,\n );\n }\n }\n}\n"],"mappings":";;;;;AAmBA,IAAa,mBAAb,MAAa,yBAAyB,gBAAuC;CAK3E,YAAY,MAAe,KAAa;AACtC,QAAM,KAAK;cAJwC,EAAE;oBAC1C;AAIX,OAAK,MAAM;;CAGb,aAAa,MAAM,MAAe,cAA4B,KAAa;EACzE,MAAM,SAAS,IAAI,iBAAiB,MAAM,IAAI;EAC9C,MAAM,MAAM,aAAa,8BAA8B,IAAI;AAE3D,SAAO,OADM,MAAM,OAAO,cAAc,IAAI;AAQ5C,SAAO,aAJiB,OAAO,OAAO,OAAO,KAAK,CAAC,QAChD,KAAK,aAAa,KAAK,IAAI,KAAK,SAAS,WAAW,SAAS,UAAU,EACxE,EACD,GACqC;AAEtC,MAAI,IAAI,WAAW,IAAI,CACrB,QAAO,MAAM,IAAI,MAAM,EAAE;AAE3B,SAAO;;CAGT,IAAI,kBAAkB;AACpB,SAAO,OAAO,OAAO,KAAK,KAAK,CAAC,MAAM,UAAU,MAAM,SAAS,QAAQ;;CAGzE,IAAI,kBAAkB;AACpB,SAAO,OAAO,OAAO,KAAK,KAAK,CAAC,MAAM,UAAU,MAAM,SAAS,QAAQ;;CAGzE,IAAI,iBAAiB;EACnB,MAAM,aAAa,KAAK;AAExB,MAAI,CAAC,cAAc,WAAW,UAAU,OACtC;AAGF,SAAO;GACL,SAAS,WAAW;GACpB,KAAK,KAAK;GACV,mBAAmB,WAAW;GAC/B;;CAGH,IAAI,iBAAiB;EACnB,MAAM,aAAa,KAAK;AAExB,MAAI,CAAC,cAAc,WAAW,UAAU,OACtC;AAGF,SAAO;GACL,SAAS,WAAW;GACpB,KAAK,KAAK;GACX;;CAGH,IAAI,mBAAmB;EACrB,MAAMA,QAA0B,EAAE;AAElC,MAAI,KAAK,oBAAoB,OAC3B,OAAM,QAAQ;GACZ,MAAM,aAAa,KAAK,gBAAgB,MAAM;GAC9C,KAAK,KAAK,gBAAgB,YAAY;GACtC,MAAM,KAAK,gBAAgB,YAAY;GACxC;AAGH,MAAI,KAAK,oBAAoB,OAC3B,OAAM,QAAQ;GACZ,MAAM,cAAc,KAAK,gBAAgB,MAAM;GAC/C,KAAK,KAAK,gBAAgB,YAAY;GACtC,MAAM,KAAK,gBAAgB,YAAY;GACxC;AAGH,SAAO;;CAGT,IAAI,YAAY;AACd,SAAO;GACL,aAAa;GACb,cAAc;GACf;;CAGH,oBAAoB,SAAiB;AACnC,SAAO,cAAc,KAAK,IAAI,WAAW;;CAG3C,qBAAqB,SAAiB,WAAmB;AACvD,SAAO,cAAc,KAAK,IAAI,WAAW,QAAQ,aAAa;;CAGhE,MAAM,iBACJ,WACA,QACA;AACA,SAAO,SACL,gCACA;GACE,SAAS,UAAU,WAAW;GAC9B,KAAK,UAAU;GAChB,EACD,QACA,OAAO,SAAS;AACd,OAAI,CAAC,UAAU,QACb,OAAM,IAAI,MACR,6DACD;GAEH,MAAM,MAAM,KAAK,oBAAoB,UAAU,QAAQ;GACvD,MAAM,cAAc,KAAK,KAAK,UAAU,UAAU;AAClD,OAAI,CAAC,YACH,OAAM,IAAI,MAAM,yBAAyB;AAG3C,QAAK,aAAa,UAAU,YAAY,OAAO;AAC/C,QAAK,aAAa,QAAQ,YAAY,KAAK;GAG3C,MAAM,UAAU,EACd,OAAO,SAAS,YAAY,OAAO,GAAG,YAAY,SAAS,YAAY,OAAO,KAC/E;AAED,UAAO,KAAK,sBAAsB,KAAK,SAAS,OAAO;IAE1D;;CAGH,MAAM,kBACJ,WACA,WACA,QACA;AACA,SAAO,SACL,iCACA;GACE;GACA,SAAS,UAAU,WAAW;GAC9B,KAAK,UAAU;GAChB,EACD,QACA,OAAO,SAAS;AACd,OAAI,CAAC,UAAU,QACb,OAAM,IAAI,MACR,8DACD;AAEH,OAAI,cAAc,OAChB,OAAM,IAAI,MAAM,8BAA8B;GAEhD,MAAM,MAAM,KAAK,qBAAqB,UAAU,SAAS,UAAU;GACnE,MAAM,eAAe,KAAK,KAAK,UAAU,UAAU,SAAS;AAC5D,OAAI,CAAC,aACH,OAAM,IAAI,MAAM,0BAA0B;AAG5C,QAAK,aAAa,UAAU,aAAa,OAAO;AAChD,QAAK,aAAa,QAAQ,aAAa,KAAK;GAG5C,MAAM,UAAU,EACd,OAAO,SAAS,aAAa,OAAO,GAAG,aAAa,SAAS,aAAa,OAAO,KAClF;AAED,UAAO,KAAK,sBAAsB,KAAK,SAAS,OAAO;IAE1D;;;;;CAMH,2BACE,QACA,MACA,WACA,aACoB;AACpB,MAAI,UAAU,QAAQ,CAAC,UAAU,SAAS;AACxC,WAAQ,KACN,8CAA8C,OAAO,QAAQ,KAAK,aAAa,KAAK,UAClF,UACD,GACF;AACD,UAAO,EAAE;;EAGX,MAAM,QAAQ,KAAK,KAAK,UAAU;AAClC,MAAI,CAAC,OAAO;AACV,WAAQ,KACN,6DAA6D,KAAK,UAChE,UACD,GACF;AACD,UAAO,EAAE;;EAGX,MAAM,EAAE,WAAW,aAAa;EAChC,MAAMC,gBAAoC,EAAE;AAE5C,OAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;GAExC,MAAM,UAAU,SAAS;GACzB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,iBAAiB,QAAQ,MAAM,QAAQ;GAG7C,MAAM,iBAAkB,mBAAmB,YAAa;GACxD,MAAM,eAAgB,iBAAiB,YAAa;AAGpD,OAAI,iBAAiB,QAAQ,eAAe,OAC1C,eAAc,KAAK;IACjB,WAAW;IACX,SAAS;IACT,OAAO;IACR,CAAC;;AAGN,MAAI,cAAc,WAAW,EAC3B,SAAQ,KACN,4DAA4D,OAAO,QAAQ,KAAK,aAAa,KAAK,UAChG;GACE;GACA;GACD,CACF,GACF;AAGH,SAAO;;CAGT,iBAAiB,YAAoB,WAA2B;AAC9D,MAAI,CAAC,UAAU,SAAS;AACtB,WAAQ,KACN,qDAAqD,KAAK,UACxD,UACD,GACF;AACD,SAAM,IAAI,MACR,6DACD;;EAEH,MAAM,QAAQ,KAAK,KAAK,UAAU;AAClC,MAAI,CAAC,MACH,OAAM,IAAI,MAAM,kBAAkB;EAEpC,MAAM,EAAE,WAAW,aAAa;EAUhC,MAAM,iBAAiB,oBAJE,oBACvB,cAHC,uBAAuB,aAAa,UAAU,qBAAsB,GAItE,EAE4D,UAAU;AAGvE,OAAK,IAAI,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;GAE7C,MAAM,UAAU,SAAS;GACzB,MAAM,iBAAiB,QAAQ,MAAM,QAAQ;AAG7C,OAAI,QAAQ,OAAO,kBAAkB,iBAAiB,eACpD,QAAO;;EAMX,IAAI,sBAAsB;EAC1B,IAAI,kBAAkB,OAAO;AAE7B,OAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;GAExC,MAAM,UAAU,SAAS;GACzB,MAAM,mBAAmB,QAAQ;GACjC,MAAM,iBAAiB,QAAQ,MAAM,QAAQ;GAE7C,IAAIC;AACJ,OAAI,iBAAiB,iBAEnB,YAAW,mBAAmB;YACrB,kBAAkB,eAE3B,YAAW,iBAAiB;OAG5B,QAAO;AAGT,OAAI,WAAW,iBAAiB;AAC9B,sBAAkB;AAClB,0BAAsB;;;AAI1B,SAAO;;CAGT,yBAAqD;;;;;CASrD,kBAAkB;AAChB,SAAO;GAEL,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,mBAAmB;GACpB;;CAMH,mCACE,kBACA,WACA,WACU;EACV;AAIE,OAAI,CAAC,UAAU,QACb,OAAM,IAAI,MAAM,0CAA0C;GAI5D,MAAM,YAAY,KAAK,KAAK,UAAU;AACtC,OAAI,CAAC,UACH,OAAM,IAAI,MAAM,kBAAkB;GAEpC,MAAM,UAAU,UAAU,WAAW;AACrC,OAAI,CAAC,QACH,OAAM,IAAI,MAAM,oBAAoB;GAEtC,MAAM,iBAAkB,QAAQ,MAAM,UAAU,YAAa;AAE7D,UAAO,iBAAiB,KACrB,cAAc,WAAW,kBAAkB,IAC7C"}
@@ -239,6 +239,19 @@ var BaseMediaEngine = class {
239
239
  console.warn(`${engineName}: extractThumbnails not properly implemented. This MediaEngine type does not support thumbnail generation. Supported engines: JitMediaEngine. Requested ${timestamps.length} thumbnail${timestamps.length === 1 ? "" : "s"}.`);
240
240
  return timestamps.map(() => null);
241
241
  }
242
+ /**
243
+ * Get buffer configuration for this media engine
244
+ * Can be overridden by subclasses to provide custom buffer settings
245
+ */
246
+ getBufferConfig() {
247
+ return {
248
+ videoBufferDurationMs: 1e4,
249
+ audioBufferDurationMs: 1e4,
250
+ maxVideoBufferFetches: 3,
251
+ maxAudioBufferFetches: 3,
252
+ bufferThresholdMs: 3e4
253
+ };
254
+ }
242
255
  };
243
256
 
244
257
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"BaseMediaEngine.js","names":["result","tEnd","segments: SegmentTimeRange[]"],"sources":["../../../src/elements/EFMedia/BaseMediaEngine.ts"],"sourcesContent":["import { withSpan } from \"../../otel/tracingHelpers.js\";\nimport { RequestDeduplicator } from \"../../transcoding/cache/RequestDeduplicator.js\";\nimport type {\n AudioRendition,\n SegmentTimeRange,\n ThumbnailResult,\n VideoRendition,\n} from \"../../transcoding/types\";\nimport { SizeAwareLRUCache } from \"../../utils/LRUCache.js\";\nimport type { EFMedia } from \"../EFMedia.js\";\nimport type { MediaRendition } from \"./shared/MediaTaskUtils.js\";\n\n// Global instances shared across all media engines\nexport const mediaCache = new SizeAwareLRUCache<string>(100 * 1024 * 1024); // 100MB cache limit\nexport const globalRequestDeduplicator = new RequestDeduplicator();\n\nexport abstract class BaseMediaEngine {\n protected host: EFMedia;\n\n constructor(host: EFMedia) {\n this.host = host;\n }\n\n abstract get videoRendition(): VideoRendition | undefined;\n abstract get audioRendition(): AudioRendition | undefined;\n\n /**\n * Get video rendition if available. Returns undefined for audio-only assets.\n * Callers should handle undefined gracefully.\n */\n getVideoRendition(): VideoRendition | undefined {\n return this.videoRendition;\n }\n\n /**\n * Get audio rendition if available. Returns undefined for video-only assets.\n * Callers should handle undefined gracefully.\n */\n getAudioRendition(): AudioRendition | undefined {\n return this.audioRendition;\n }\n\n /**\n * Generate cache key for segment requests\n */\n private getSegmentCacheKey(\n segmentId: number,\n rendition: { src: string; trackId: number | undefined; id?: string },\n ): string {\n return `${rendition.src}-${rendition.id}-${segmentId}-${rendition.trackId}`;\n }\n\n /**\n * Unified fetch method with caching and global deduplication\n * All requests (media, manifest, init segments) go through this method\n */\n protected async fetchWithCache(\n url: string,\n options: {\n responseType: \"arrayBuffer\" | \"json\";\n headers?: Record<string, string>;\n signal?: AbortSignal;\n },\n ): Promise<any> {\n return withSpan(\n \"mediaEngine.fetchWithCache\",\n {\n url: url.length > 100 ? `${url.substring(0, 100)}...` : url,\n responseType: options.responseType,\n hasHeaders: !!options.headers,\n },\n undefined,\n async (span) => {\n const t0 = performance.now();\n const { responseType, headers, signal } = options;\n\n // Create cache key that includes URL and headers for proper isolation\n // Note: We don't include signal in cache key as it would prevent proper deduplication\n const cacheKey = headers ? `${url}:${JSON.stringify(headers)}` : url;\n\n // Check cache first\n const t1 = performance.now();\n const cached = mediaCache.get(cacheKey);\n const t2 = performance.now();\n span.setAttribute(\"cacheLookupMs\", Math.round((t2 - t1) * 1000) / 1000);\n\n if (cached) {\n span.setAttribute(\"cacheHit\", true);\n // If we have a cached promise, we need to handle the caller's abort signal\n // without affecting the underlying request that other instances might be using\n if (signal) {\n const t3 = performance.now();\n const result = await this.handleAbortForCachedRequest(\n cached,\n signal,\n );\n const t4 = performance.now();\n span.setAttribute(\n \"handleAbortMs\",\n Math.round((t4 - t3) * 100) / 100,\n );\n span.setAttribute(\n \"totalCacheHitMs\",\n Math.round((t4 - t0) * 100) / 100,\n );\n return result;\n }\n span.setAttribute(\n \"totalCacheHitMs\",\n Math.round((t2 - t0) * 100) / 100,\n );\n return cached;\n }\n\n span.setAttribute(\"cacheHit\", false);\n\n // Use global deduplicator to prevent concurrent requests for the same resource\n // Note: We do NOT pass the signal to the deduplicator - each caller manages their own abort\n const promise = globalRequestDeduplicator.executeRequest(\n cacheKey,\n async () => {\n const fetchStart = performance.now();\n try {\n // Make the fetch request WITHOUT the signal - let each caller handle their own abort\n // This prevents one instance's abort from affecting other instances using the shared cache\n const response = await this.host.fetch(url, { headers });\n const fetchEnd = performance.now();\n span.setAttribute(\"fetchMs\", fetchEnd - fetchStart);\n\n if (responseType === \"json\") {\n return response.json();\n }\n const buffer = await response.arrayBuffer();\n span.setAttribute(\"sizeBytes\", buffer.byteLength);\n return buffer;\n } catch (error) {\n // If the request was aborted, don't cache the error\n if (\n error instanceof DOMException &&\n error.name === \"AbortError\"\n ) {\n // Remove from cache so other requests can retry\n mediaCache.delete(cacheKey);\n }\n throw error;\n }\n },\n );\n\n // Cache the promise (not the result) to handle concurrent requests\n mediaCache.set(cacheKey, promise);\n\n // Handle the case where the promise might be aborted\n promise.catch((error) => {\n // If the request was aborted, remove it from cache to prevent corrupted data\n if (error instanceof DOMException && error.name === \"AbortError\") {\n mediaCache.delete(cacheKey);\n }\n });\n\n // If the caller has a signal, handle abort logic without affecting the underlying request\n if (signal) {\n const result = await this.handleAbortForCachedRequest(\n promise,\n signal,\n );\n const tEnd = performance.now();\n span.setAttribute(\n \"totalFetchMs\",\n Math.round((tEnd - t0) * 100) / 100,\n );\n return result;\n }\n\n const result = await promise;\n const tEnd = performance.now();\n span.setAttribute(\"totalFetchMs\", Math.round((tEnd - t0) * 100) / 100);\n return result;\n },\n );\n }\n\n /**\n * Handles abort logic for a cached request without affecting the underlying fetch\n * This allows multiple instances to share the same cached request while each\n * manages their own abort behavior\n */\n private handleAbortForCachedRequest<T>(\n promise: Promise<T>,\n signal: AbortSignal,\n ): Promise<T> {\n // If signal is already aborted, reject immediately\n if (signal.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n\n // Return a promise that respects the caller's abort signal\n // but doesn't affect the underlying cached request\n return Promise.race([\n promise,\n new Promise<never>((_, reject) => {\n signal.addEventListener(\"abort\", () => {\n reject(new DOMException(\"Aborted\", \"AbortError\"));\n });\n }),\n ]);\n }\n\n // Public wrapper methods that delegate to fetchWithCache\n async fetchMedia(url: string, signal?: AbortSignal): Promise<ArrayBuffer> {\n // Check abort signal immediately before any processing\n if (signal?.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n return this.fetchWithCache(url, { responseType: \"arrayBuffer\", signal });\n }\n\n async fetchManifest(url: string, signal?: AbortSignal): Promise<any> {\n // Check abort signal immediately before any processing\n if (signal?.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n return this.fetchWithCache(url, { responseType: \"json\", signal });\n }\n\n async fetchMediaWithHeaders(\n url: string,\n headers: Record<string, string>,\n signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n // Check abort signal immediately before any processing\n if (signal?.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n return this.fetchWithCache(url, {\n responseType: \"arrayBuffer\",\n headers,\n signal,\n });\n }\n\n // Legacy methods for backward compatibility\n async fetchMediaCache(\n url: string,\n signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n return this.fetchMedia(url, signal);\n }\n\n async fetchManifestCache(url: string, signal?: AbortSignal): Promise<any> {\n return this.fetchManifest(url, signal);\n }\n\n async fetchMediaCacheWithHeaders(\n url: string,\n headers: Record<string, string>,\n signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n return this.fetchMediaWithHeaders(url, headers, signal);\n }\n\n /**\n * Abstract method for actual segment fetching - implemented by subclasses\n */\n abstract fetchMediaSegment(\n segmentId: number,\n rendition: { trackId: number | undefined; src: string },\n ): Promise<ArrayBuffer>;\n\n abstract fetchInitSegment(\n rendition: { trackId: number | undefined; src: string },\n signal: AbortSignal,\n ): Promise<ArrayBuffer>;\n\n abstract computeSegmentId(\n desiredSeekTimeMs: number,\n rendition: MediaRendition,\n ): number | undefined;\n\n /**\n * Fetch media segment with built-in deduplication\n * Now uses global deduplication for all requests\n */\n async fetchMediaSegmentWithDeduplication(\n segmentId: number,\n rendition: { trackId: number | undefined; src: string },\n _signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n const cacheKey = this.getSegmentCacheKey(segmentId, rendition);\n\n return globalRequestDeduplicator.executeRequest(cacheKey, async () => {\n return this.fetchMediaSegment(segmentId, rendition);\n });\n }\n\n /**\n * Check if a segment is currently being fetched\n */\n isSegmentBeingFetched(\n segmentId: number,\n rendition: { src: string; trackId: number | undefined },\n ): boolean {\n const cacheKey = this.getSegmentCacheKey(segmentId, rendition);\n return globalRequestDeduplicator.isPending(cacheKey);\n }\n\n /**\n * Get count of active segment requests (for debugging/monitoring)\n */\n getActiveSegmentRequestCount(): number {\n return globalRequestDeduplicator.getPendingCount();\n }\n\n /**\n * Cancel all active segment requests (for cleanup)\n */\n cancelAllSegmentRequests(): void {\n globalRequestDeduplicator.clear();\n }\n\n /**\n * Calculate audio segments needed for a time range\n * Each media engine implements this based on their segment structure\n */\n calculateAudioSegmentRange(\n fromMs: number,\n toMs: number,\n rendition: AudioRendition,\n durationMs: number,\n ): SegmentTimeRange[] {\n // Default implementation for uniform segments (used by JitMediaEngine)\n if (fromMs >= toMs) {\n return [];\n }\n\n const segments: SegmentTimeRange[] = [];\n\n // Use actual segment durations if available (more accurate)\n if (\n rendition.segmentDurationsMs &&\n rendition.segmentDurationsMs.length > 0\n ) {\n let cumulativeTime = 0;\n\n for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {\n const segmentDuration = rendition.segmentDurationsMs[i];\n if (segmentDuration === undefined) {\n continue; // Skip undefined segment durations\n }\n const segmentStartMs = cumulativeTime;\n const segmentEndMs = Math.min(\n cumulativeTime + segmentDuration,\n durationMs,\n );\n\n // Don't include segments that start at or beyond the file duration\n if (segmentStartMs >= durationMs) {\n break;\n }\n\n // Only include segments that overlap with requested time range\n if (segmentStartMs < toMs && segmentEndMs > fromMs) {\n segments.push({\n segmentId: i + 1, // Convert to 1-based\n startMs: segmentStartMs,\n endMs: segmentEndMs,\n });\n }\n\n cumulativeTime += segmentDuration;\n\n // If we've reached or exceeded file duration, stop\n if (cumulativeTime >= durationMs) {\n break;\n }\n }\n\n return segments;\n }\n\n // Fall back to fixed duration calculation for backward compatibility\n const segmentDurationMs = rendition.segmentDurationMs || 1000;\n const startSegmentIndex = Math.floor(fromMs / segmentDurationMs);\n const endSegmentIndex = Math.floor(toMs / segmentDurationMs);\n\n for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {\n const segmentId = i + 1; // Convert to 1-based\n const segmentStartMs = i * segmentDurationMs;\n const segmentEndMs = Math.min((i + 1) * segmentDurationMs, durationMs);\n\n // Don't include segments that start at or beyond the file duration\n if (segmentStartMs >= durationMs) {\n break;\n }\n\n // Only include segments that overlap with requested time range\n if (segmentStartMs < toMs && segmentEndMs > fromMs) {\n segments.push({\n segmentId,\n startMs: segmentStartMs,\n endMs: segmentEndMs,\n });\n }\n }\n\n return segments;\n }\n\n /**\n * Check if a segment is cached for a given rendition\n * This needs to check the URL-based cache since that's where segments are actually stored\n */\n isSegmentCached(\n segmentId: number,\n rendition: AudioRendition | VideoRendition,\n ): boolean {\n try {\n // Check if this is a JIT engine by looking for urlGenerator property\n const maybeJitEngine = this as any;\n if (\n maybeJitEngine.urlGenerator &&\n typeof maybeJitEngine.urlGenerator.generateSegmentUrl === \"function\"\n ) {\n // This is a JIT engine - generate the URL and check URL-based cache\n if (!rendition.id) {\n return false;\n }\n\n const segmentUrl = maybeJitEngine.urlGenerator.generateSegmentUrl(\n segmentId,\n rendition.id,\n maybeJitEngine,\n );\n const urlIsCached = mediaCache.has(segmentUrl);\n\n return urlIsCached;\n }\n // For other engine types, fall back to the old segment-based key approach\n const cacheKey = `${rendition.src}-${rendition.id || \"default\"}-${segmentId}-${rendition.trackId}`;\n const isCached = mediaCache.has(cacheKey);\n return isCached;\n } catch (error) {\n console.warn(\n `🎬 BaseMediaEngine: Error checking if segment ${segmentId} is cached:`,\n error,\n );\n return false;\n }\n }\n\n /**\n * Get cached segment IDs from a list for a given rendition\n */\n getCachedSegments(\n segmentIds: number[],\n rendition: AudioRendition | VideoRendition,\n ): Set<number> {\n return new Set(\n segmentIds.filter((id) => this.isSegmentCached(id, rendition)),\n );\n }\n\n /**\n * Extract thumbnail canvases at multiple timestamps efficiently\n * Default implementation provides helpful error information\n */\n async extractThumbnails(\n timestamps: number[],\n ): Promise<(ThumbnailResult | null)[]> {\n const engineName = this.constructor.name;\n console.warn(\n `${engineName}: extractThumbnails not properly implemented. ` +\n \"This MediaEngine type does not support thumbnail generation. \" +\n \"Supported engines: JitMediaEngine. \" +\n `Requested ${timestamps.length} thumbnail${timestamps.length === 1 ? \"\" : \"s\"}.`,\n );\n return timestamps.map(() => null);\n }\n\n abstract convertToSegmentRelativeTimestamps(\n globalTimestamps: number[],\n segmentId: number,\n rendition: VideoRendition,\n ): number[];\n}\n"],"mappings":";;;;;AAaA,MAAa,aAAa,IAAI,kBAA0B,MAAM,OAAO,KAAK;AAC1E,MAAa,4BAA4B,IAAI,qBAAqB;AAElE,IAAsB,kBAAtB,MAAsC;CAGpC,YAAY,MAAe;AACzB,OAAK,OAAO;;;;;;CAUd,oBAAgD;AAC9C,SAAO,KAAK;;;;;;CAOd,oBAAgD;AAC9C,SAAO,KAAK;;;;;CAMd,AAAQ,mBACN,WACA,WACQ;AACR,SAAO,GAAG,UAAU,IAAI,GAAG,UAAU,GAAG,GAAG,UAAU,GAAG,UAAU;;;;;;CAOpE,MAAgB,eACd,KACA,SAKc;AACd,SAAO,SACL,8BACA;GACE,KAAK,IAAI,SAAS,MAAM,GAAG,IAAI,UAAU,GAAG,IAAI,CAAC,OAAO;GACxD,cAAc,QAAQ;GACtB,YAAY,CAAC,CAAC,QAAQ;GACvB,EACD,QACA,OAAO,SAAS;GACd,MAAM,KAAK,YAAY,KAAK;GAC5B,MAAM,EAAE,cAAc,SAAS,WAAW;GAI1C,MAAM,WAAW,UAAU,GAAG,IAAI,GAAG,KAAK,UAAU,QAAQ,KAAK;GAGjE,MAAM,KAAK,YAAY,KAAK;GAC5B,MAAM,SAAS,WAAW,IAAI,SAAS;GACvC,MAAM,KAAK,YAAY,KAAK;AAC5B,QAAK,aAAa,iBAAiB,KAAK,OAAO,KAAK,MAAM,IAAK,GAAG,IAAK;AAEvE,OAAI,QAAQ;AACV,SAAK,aAAa,YAAY,KAAK;AAGnC,QAAI,QAAQ;KACV,MAAM,KAAK,YAAY,KAAK;KAC5B,MAAMA,WAAS,MAAM,KAAK,4BACxB,QACA,OACD;KACD,MAAM,KAAK,YAAY,KAAK;AAC5B,UAAK,aACH,iBACA,KAAK,OAAO,KAAK,MAAM,IAAI,GAAG,IAC/B;AACD,UAAK,aACH,mBACA,KAAK,OAAO,KAAK,MAAM,IAAI,GAAG,IAC/B;AACD,YAAOA;;AAET,SAAK,aACH,mBACA,KAAK,OAAO,KAAK,MAAM,IAAI,GAAG,IAC/B;AACD,WAAO;;AAGT,QAAK,aAAa,YAAY,MAAM;GAIpC,MAAM,UAAU,0BAA0B,eACxC,UACA,YAAY;IACV,MAAM,aAAa,YAAY,KAAK;AACpC,QAAI;KAGF,MAAM,WAAW,MAAM,KAAK,KAAK,MAAM,KAAK,EAAE,SAAS,CAAC;KACxD,MAAM,WAAW,YAAY,KAAK;AAClC,UAAK,aAAa,WAAW,WAAW,WAAW;AAEnD,SAAI,iBAAiB,OACnB,QAAO,SAAS,MAAM;KAExB,MAAM,SAAS,MAAM,SAAS,aAAa;AAC3C,UAAK,aAAa,aAAa,OAAO,WAAW;AACjD,YAAO;aACA,OAAO;AAEd,SACE,iBAAiB,gBACjB,MAAM,SAAS,aAGf,YAAW,OAAO,SAAS;AAE7B,WAAM;;KAGX;AAGD,cAAW,IAAI,UAAU,QAAQ;AAGjC,WAAQ,OAAO,UAAU;AAEvB,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,aAClD,YAAW,OAAO,SAAS;KAE7B;AAGF,OAAI,QAAQ;IACV,MAAMA,WAAS,MAAM,KAAK,4BACxB,SACA,OACD;IACD,MAAMC,SAAO,YAAY,KAAK;AAC9B,SAAK,aACH,gBACA,KAAK,OAAOA,SAAO,MAAM,IAAI,GAAG,IACjC;AACD,WAAOD;;GAGT,MAAM,SAAS,MAAM;GACrB,MAAM,OAAO,YAAY,KAAK;AAC9B,QAAK,aAAa,gBAAgB,KAAK,OAAO,OAAO,MAAM,IAAI,GAAG,IAAI;AACtE,UAAO;IAEV;;;;;;;CAQH,AAAQ,4BACN,SACA,QACY;AAEZ,MAAI,OAAO,QACT,OAAM,IAAI,aAAa,WAAW,aAAa;AAKjD,SAAO,QAAQ,KAAK,CAClB,SACA,IAAI,SAAgB,GAAG,WAAW;AAChC,UAAO,iBAAiB,eAAe;AACrC,WAAO,IAAI,aAAa,WAAW,aAAa,CAAC;KACjD;IACF,CACH,CAAC;;CAIJ,MAAM,WAAW,KAAa,QAA4C;AAExE,MAAI,QAAQ,QACV,OAAM,IAAI,aAAa,WAAW,aAAa;AAEjD,SAAO,KAAK,eAAe,KAAK;GAAE,cAAc;GAAe;GAAQ,CAAC;;CAG1E,MAAM,cAAc,KAAa,QAAoC;AAEnE,MAAI,QAAQ,QACV,OAAM,IAAI,aAAa,WAAW,aAAa;AAEjD,SAAO,KAAK,eAAe,KAAK;GAAE,cAAc;GAAQ;GAAQ,CAAC;;CAGnE,MAAM,sBACJ,KACA,SACA,QACsB;AAEtB,MAAI,QAAQ,QACV,OAAM,IAAI,aAAa,WAAW,aAAa;AAEjD,SAAO,KAAK,eAAe,KAAK;GAC9B,cAAc;GACd;GACA;GACD,CAAC;;CAIJ,MAAM,gBACJ,KACA,QACsB;AACtB,SAAO,KAAK,WAAW,KAAK,OAAO;;CAGrC,MAAM,mBAAmB,KAAa,QAAoC;AACxE,SAAO,KAAK,cAAc,KAAK,OAAO;;CAGxC,MAAM,2BACJ,KACA,SACA,QACsB;AACtB,SAAO,KAAK,sBAAsB,KAAK,SAAS,OAAO;;;;;;CAyBzD,MAAM,mCACJ,WACA,WACA,SACsB;EACtB,MAAM,WAAW,KAAK,mBAAmB,WAAW,UAAU;AAE9D,SAAO,0BAA0B,eAAe,UAAU,YAAY;AACpE,UAAO,KAAK,kBAAkB,WAAW,UAAU;IACnD;;;;;CAMJ,sBACE,WACA,WACS;EACT,MAAM,WAAW,KAAK,mBAAmB,WAAW,UAAU;AAC9D,SAAO,0BAA0B,UAAU,SAAS;;;;;CAMtD,+BAAuC;AACrC,SAAO,0BAA0B,iBAAiB;;;;;CAMpD,2BAAiC;AAC/B,4BAA0B,OAAO;;;;;;CAOnC,2BACE,QACA,MACA,WACA,YACoB;AAEpB,MAAI,UAAU,KACZ,QAAO,EAAE;EAGX,MAAME,WAA+B,EAAE;AAGvC,MACE,UAAU,sBACV,UAAU,mBAAmB,SAAS,GACtC;GACA,IAAI,iBAAiB;AAErB,QAAK,IAAI,IAAI,GAAG,IAAI,UAAU,mBAAmB,QAAQ,KAAK;IAC5D,MAAM,kBAAkB,UAAU,mBAAmB;AACrD,QAAI,oBAAoB,OACtB;IAEF,MAAM,iBAAiB;IACvB,MAAM,eAAe,KAAK,IACxB,iBAAiB,iBACjB,WACD;AAGD,QAAI,kBAAkB,WACpB;AAIF,QAAI,iBAAiB,QAAQ,eAAe,OAC1C,UAAS,KAAK;KACZ,WAAW,IAAI;KACf,SAAS;KACT,OAAO;KACR,CAAC;AAGJ,sBAAkB;AAGlB,QAAI,kBAAkB,WACpB;;AAIJ,UAAO;;EAIT,MAAM,oBAAoB,UAAU,qBAAqB;EACzD,MAAM,oBAAoB,KAAK,MAAM,SAAS,kBAAkB;EAChE,MAAM,kBAAkB,KAAK,MAAM,OAAO,kBAAkB;AAE5D,OAAK,IAAI,IAAI,mBAAmB,KAAK,iBAAiB,KAAK;GACzD,MAAM,YAAY,IAAI;GACtB,MAAM,iBAAiB,IAAI;GAC3B,MAAM,eAAe,KAAK,KAAK,IAAI,KAAK,mBAAmB,WAAW;AAGtE,OAAI,kBAAkB,WACpB;AAIF,OAAI,iBAAiB,QAAQ,eAAe,OAC1C,UAAS,KAAK;IACZ;IACA,SAAS;IACT,OAAO;IACR,CAAC;;AAIN,SAAO;;;;;;CAOT,gBACE,WACA,WACS;AACT,MAAI;GAEF,MAAM,iBAAiB;AACvB,OACE,eAAe,gBACf,OAAO,eAAe,aAAa,uBAAuB,YAC1D;AAEA,QAAI,CAAC,UAAU,GACb,QAAO;IAGT,MAAM,aAAa,eAAe,aAAa,mBAC7C,WACA,UAAU,IACV,eACD;AAGD,WAFoB,WAAW,IAAI,WAAW;;GAKhD,MAAM,WAAW,GAAG,UAAU,IAAI,GAAG,UAAU,MAAM,UAAU,GAAG,UAAU,GAAG,UAAU;AAEzF,UADiB,WAAW,IAAI,SAAS;WAElC,OAAO;AACd,WAAQ,KACN,iDAAiD,UAAU,cAC3D,MACD;AACD,UAAO;;;;;;CAOX,kBACE,YACA,WACa;AACb,SAAO,IAAI,IACT,WAAW,QAAQ,OAAO,KAAK,gBAAgB,IAAI,UAAU,CAAC,CAC/D;;;;;;CAOH,MAAM,kBACJ,YACqC;EACrC,MAAM,aAAa,KAAK,YAAY;AACpC,UAAQ,KACN,GAAG,WAAW,0JAGC,WAAW,OAAO,YAAY,WAAW,WAAW,IAAI,KAAK,IAAI,GACjF;AACD,SAAO,WAAW,UAAU,KAAK"}
1
+ {"version":3,"file":"BaseMediaEngine.js","names":["result","tEnd","segments: SegmentTimeRange[]"],"sources":["../../../src/elements/EFMedia/BaseMediaEngine.ts"],"sourcesContent":["import { withSpan } from \"../../otel/tracingHelpers.js\";\nimport { RequestDeduplicator } from \"../../transcoding/cache/RequestDeduplicator.js\";\nimport type {\n AudioRendition,\n SegmentTimeRange,\n ThumbnailResult,\n VideoRendition,\n} from \"../../transcoding/types\";\nimport { SizeAwareLRUCache } from \"../../utils/LRUCache.js\";\nimport type { EFMedia } from \"../EFMedia.js\";\nimport type { MediaRendition } from \"./shared/MediaTaskUtils.js\";\n\n// Global instances shared across all media engines\nexport const mediaCache = new SizeAwareLRUCache<string>(100 * 1024 * 1024); // 100MB cache limit\nexport const globalRequestDeduplicator = new RequestDeduplicator();\n\nexport abstract class BaseMediaEngine {\n protected host: EFMedia;\n\n constructor(host: EFMedia) {\n this.host = host;\n }\n\n abstract get videoRendition(): VideoRendition | undefined;\n abstract get audioRendition(): AudioRendition | undefined;\n\n /**\n * Get video rendition if available. Returns undefined for audio-only assets.\n * Callers should handle undefined gracefully.\n */\n getVideoRendition(): VideoRendition | undefined {\n return this.videoRendition;\n }\n\n /**\n * Get audio rendition if available. Returns undefined for video-only assets.\n * Callers should handle undefined gracefully.\n */\n getAudioRendition(): AudioRendition | undefined {\n return this.audioRendition;\n }\n\n /**\n * Generate cache key for segment requests\n */\n private getSegmentCacheKey(\n segmentId: number,\n rendition: { src: string; trackId: number | undefined; id?: string },\n ): string {\n return `${rendition.src}-${rendition.id}-${segmentId}-${rendition.trackId}`;\n }\n\n /**\n * Unified fetch method with caching and global deduplication\n * All requests (media, manifest, init segments) go through this method\n */\n protected async fetchWithCache(\n url: string,\n options: {\n responseType: \"arrayBuffer\" | \"json\";\n headers?: Record<string, string>;\n signal?: AbortSignal;\n },\n ): Promise<any> {\n return withSpan(\n \"mediaEngine.fetchWithCache\",\n {\n url: url.length > 100 ? `${url.substring(0, 100)}...` : url,\n responseType: options.responseType,\n hasHeaders: !!options.headers,\n },\n undefined,\n async (span) => {\n const t0 = performance.now();\n const { responseType, headers, signal } = options;\n\n // Create cache key that includes URL and headers for proper isolation\n // Note: We don't include signal in cache key as it would prevent proper deduplication\n const cacheKey = headers ? `${url}:${JSON.stringify(headers)}` : url;\n\n // Check cache first\n const t1 = performance.now();\n const cached = mediaCache.get(cacheKey);\n const t2 = performance.now();\n span.setAttribute(\"cacheLookupMs\", Math.round((t2 - t1) * 1000) / 1000);\n\n if (cached) {\n span.setAttribute(\"cacheHit\", true);\n // If we have a cached promise, we need to handle the caller's abort signal\n // without affecting the underlying request that other instances might be using\n if (signal) {\n const t3 = performance.now();\n const result = await this.handleAbortForCachedRequest(\n cached,\n signal,\n );\n const t4 = performance.now();\n span.setAttribute(\n \"handleAbortMs\",\n Math.round((t4 - t3) * 100) / 100,\n );\n span.setAttribute(\n \"totalCacheHitMs\",\n Math.round((t4 - t0) * 100) / 100,\n );\n return result;\n }\n span.setAttribute(\n \"totalCacheHitMs\",\n Math.round((t2 - t0) * 100) / 100,\n );\n return cached;\n }\n\n span.setAttribute(\"cacheHit\", false);\n\n // Use global deduplicator to prevent concurrent requests for the same resource\n // Note: We do NOT pass the signal to the deduplicator - each caller manages their own abort\n const promise = globalRequestDeduplicator.executeRequest(\n cacheKey,\n async () => {\n const fetchStart = performance.now();\n try {\n // Make the fetch request WITHOUT the signal - let each caller handle their own abort\n // This prevents one instance's abort from affecting other instances using the shared cache\n const response = await this.host.fetch(url, { headers });\n const fetchEnd = performance.now();\n span.setAttribute(\"fetchMs\", fetchEnd - fetchStart);\n\n if (responseType === \"json\") {\n return response.json();\n }\n const buffer = await response.arrayBuffer();\n span.setAttribute(\"sizeBytes\", buffer.byteLength);\n return buffer;\n } catch (error) {\n // If the request was aborted, don't cache the error\n if (\n error instanceof DOMException &&\n error.name === \"AbortError\"\n ) {\n // Remove from cache so other requests can retry\n mediaCache.delete(cacheKey);\n }\n throw error;\n }\n },\n );\n\n // Cache the promise (not the result) to handle concurrent requests\n mediaCache.set(cacheKey, promise);\n\n // Handle the case where the promise might be aborted\n promise.catch((error) => {\n // If the request was aborted, remove it from cache to prevent corrupted data\n if (error instanceof DOMException && error.name === \"AbortError\") {\n mediaCache.delete(cacheKey);\n }\n });\n\n // If the caller has a signal, handle abort logic without affecting the underlying request\n if (signal) {\n const result = await this.handleAbortForCachedRequest(\n promise,\n signal,\n );\n const tEnd = performance.now();\n span.setAttribute(\n \"totalFetchMs\",\n Math.round((tEnd - t0) * 100) / 100,\n );\n return result;\n }\n\n const result = await promise;\n const tEnd = performance.now();\n span.setAttribute(\"totalFetchMs\", Math.round((tEnd - t0) * 100) / 100);\n return result;\n },\n );\n }\n\n /**\n * Handles abort logic for a cached request without affecting the underlying fetch\n * This allows multiple instances to share the same cached request while each\n * manages their own abort behavior\n */\n private handleAbortForCachedRequest<T>(\n promise: Promise<T>,\n signal: AbortSignal,\n ): Promise<T> {\n // If signal is already aborted, reject immediately\n if (signal.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n\n // Return a promise that respects the caller's abort signal\n // but doesn't affect the underlying cached request\n return Promise.race([\n promise,\n new Promise<never>((_, reject) => {\n signal.addEventListener(\"abort\", () => {\n reject(new DOMException(\"Aborted\", \"AbortError\"));\n });\n }),\n ]);\n }\n\n // Public wrapper methods that delegate to fetchWithCache\n async fetchMedia(url: string, signal?: AbortSignal): Promise<ArrayBuffer> {\n // Check abort signal immediately before any processing\n if (signal?.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n return this.fetchWithCache(url, { responseType: \"arrayBuffer\", signal });\n }\n\n async fetchManifest(url: string, signal?: AbortSignal): Promise<any> {\n // Check abort signal immediately before any processing\n if (signal?.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n return this.fetchWithCache(url, { responseType: \"json\", signal });\n }\n\n async fetchMediaWithHeaders(\n url: string,\n headers: Record<string, string>,\n signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n // Check abort signal immediately before any processing\n if (signal?.aborted) {\n throw new DOMException(\"Aborted\", \"AbortError\");\n }\n return this.fetchWithCache(url, {\n responseType: \"arrayBuffer\",\n headers,\n signal,\n });\n }\n\n // Legacy methods for backward compatibility\n async fetchMediaCache(\n url: string,\n signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n return this.fetchMedia(url, signal);\n }\n\n async fetchManifestCache(url: string, signal?: AbortSignal): Promise<any> {\n return this.fetchManifest(url, signal);\n }\n\n async fetchMediaCacheWithHeaders(\n url: string,\n headers: Record<string, string>,\n signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n return this.fetchMediaWithHeaders(url, headers, signal);\n }\n\n /**\n * Abstract method for actual segment fetching - implemented by subclasses\n */\n abstract fetchMediaSegment(\n segmentId: number,\n rendition: { trackId: number | undefined; src: string },\n ): Promise<ArrayBuffer>;\n\n abstract fetchInitSegment(\n rendition: { trackId: number | undefined; src: string },\n signal: AbortSignal,\n ): Promise<ArrayBuffer>;\n\n abstract computeSegmentId(\n desiredSeekTimeMs: number,\n rendition: MediaRendition,\n ): number | undefined;\n\n /**\n * Fetch media segment with built-in deduplication\n * Now uses global deduplication for all requests\n */\n async fetchMediaSegmentWithDeduplication(\n segmentId: number,\n rendition: { trackId: number | undefined; src: string },\n _signal?: AbortSignal,\n ): Promise<ArrayBuffer> {\n const cacheKey = this.getSegmentCacheKey(segmentId, rendition);\n\n return globalRequestDeduplicator.executeRequest(cacheKey, async () => {\n return this.fetchMediaSegment(segmentId, rendition);\n });\n }\n\n /**\n * Check if a segment is currently being fetched\n */\n isSegmentBeingFetched(\n segmentId: number,\n rendition: { src: string; trackId: number | undefined },\n ): boolean {\n const cacheKey = this.getSegmentCacheKey(segmentId, rendition);\n return globalRequestDeduplicator.isPending(cacheKey);\n }\n\n /**\n * Get count of active segment requests (for debugging/monitoring)\n */\n getActiveSegmentRequestCount(): number {\n return globalRequestDeduplicator.getPendingCount();\n }\n\n /**\n * Cancel all active segment requests (for cleanup)\n */\n cancelAllSegmentRequests(): void {\n globalRequestDeduplicator.clear();\n }\n\n /**\n * Calculate audio segments needed for a time range\n * Each media engine implements this based on their segment structure\n */\n calculateAudioSegmentRange(\n fromMs: number,\n toMs: number,\n rendition: AudioRendition,\n durationMs: number,\n ): SegmentTimeRange[] {\n // Default implementation for uniform segments (used by JitMediaEngine)\n if (fromMs >= toMs) {\n return [];\n }\n\n const segments: SegmentTimeRange[] = [];\n\n // Use actual segment durations if available (more accurate)\n if (\n rendition.segmentDurationsMs &&\n rendition.segmentDurationsMs.length > 0\n ) {\n let cumulativeTime = 0;\n\n for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {\n const segmentDuration = rendition.segmentDurationsMs[i];\n if (segmentDuration === undefined) {\n continue; // Skip undefined segment durations\n }\n const segmentStartMs = cumulativeTime;\n const segmentEndMs = Math.min(\n cumulativeTime + segmentDuration,\n durationMs,\n );\n\n // Don't include segments that start at or beyond the file duration\n if (segmentStartMs >= durationMs) {\n break;\n }\n\n // Only include segments that overlap with requested time range\n if (segmentStartMs < toMs && segmentEndMs > fromMs) {\n segments.push({\n segmentId: i + 1, // Convert to 1-based\n startMs: segmentStartMs,\n endMs: segmentEndMs,\n });\n }\n\n cumulativeTime += segmentDuration;\n\n // If we've reached or exceeded file duration, stop\n if (cumulativeTime >= durationMs) {\n break;\n }\n }\n\n return segments;\n }\n\n // Fall back to fixed duration calculation for backward compatibility\n const segmentDurationMs = rendition.segmentDurationMs || 1000;\n const startSegmentIndex = Math.floor(fromMs / segmentDurationMs);\n const endSegmentIndex = Math.floor(toMs / segmentDurationMs);\n\n for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {\n const segmentId = i + 1; // Convert to 1-based\n const segmentStartMs = i * segmentDurationMs;\n const segmentEndMs = Math.min((i + 1) * segmentDurationMs, durationMs);\n\n // Don't include segments that start at or beyond the file duration\n if (segmentStartMs >= durationMs) {\n break;\n }\n\n // Only include segments that overlap with requested time range\n if (segmentStartMs < toMs && segmentEndMs > fromMs) {\n segments.push({\n segmentId,\n startMs: segmentStartMs,\n endMs: segmentEndMs,\n });\n }\n }\n\n return segments;\n }\n\n /**\n * Check if a segment is cached for a given rendition\n * This needs to check the URL-based cache since that's where segments are actually stored\n */\n isSegmentCached(\n segmentId: number,\n rendition: AudioRendition | VideoRendition,\n ): boolean {\n try {\n // Check if this is a JIT engine by looking for urlGenerator property\n const maybeJitEngine = this as any;\n if (\n maybeJitEngine.urlGenerator &&\n typeof maybeJitEngine.urlGenerator.generateSegmentUrl === \"function\"\n ) {\n // This is a JIT engine - generate the URL and check URL-based cache\n if (!rendition.id) {\n return false;\n }\n\n const segmentUrl = maybeJitEngine.urlGenerator.generateSegmentUrl(\n segmentId,\n rendition.id,\n maybeJitEngine,\n );\n const urlIsCached = mediaCache.has(segmentUrl);\n\n return urlIsCached;\n }\n // For other engine types, fall back to the old segment-based key approach\n const cacheKey = `${rendition.src}-${rendition.id || \"default\"}-${segmentId}-${rendition.trackId}`;\n const isCached = mediaCache.has(cacheKey);\n return isCached;\n } catch (error) {\n console.warn(\n `🎬 BaseMediaEngine: Error checking if segment ${segmentId} is cached:`,\n error,\n );\n return false;\n }\n }\n\n /**\n * Get cached segment IDs from a list for a given rendition\n */\n getCachedSegments(\n segmentIds: number[],\n rendition: AudioRendition | VideoRendition,\n ): Set<number> {\n return new Set(\n segmentIds.filter((id) => this.isSegmentCached(id, rendition)),\n );\n }\n\n /**\n * Extract thumbnail canvases at multiple timestamps efficiently\n * Default implementation provides helpful error information\n */\n async extractThumbnails(\n timestamps: number[],\n ): Promise<(ThumbnailResult | null)[]> {\n const engineName = this.constructor.name;\n console.warn(\n `${engineName}: extractThumbnails not properly implemented. ` +\n \"This MediaEngine type does not support thumbnail generation. \" +\n \"Supported engines: JitMediaEngine. \" +\n `Requested ${timestamps.length} thumbnail${timestamps.length === 1 ? \"\" : \"s\"}.`,\n );\n return timestamps.map(() => null);\n }\n\n abstract convertToSegmentRelativeTimestamps(\n globalTimestamps: number[],\n segmentId: number,\n rendition: VideoRendition,\n ): number[];\n\n /**\n * Get buffer configuration for this media engine\n * Can be overridden by subclasses to provide custom buffer settings\n */\n getBufferConfig(): {\n videoBufferDurationMs: number;\n audioBufferDurationMs: number;\n maxVideoBufferFetches: number;\n maxAudioBufferFetches: number;\n bufferThresholdMs: number;\n } {\n return {\n videoBufferDurationMs: 10000, // 10 seconds\n audioBufferDurationMs: 10000, // 10 seconds\n maxVideoBufferFetches: 3,\n maxAudioBufferFetches: 3,\n bufferThresholdMs: 30000, // 30 seconds - timeline-aware buffering threshold\n };\n }\n}\n"],"mappings":";;;;;AAaA,MAAa,aAAa,IAAI,kBAA0B,MAAM,OAAO,KAAK;AAC1E,MAAa,4BAA4B,IAAI,qBAAqB;AAElE,IAAsB,kBAAtB,MAAsC;CAGpC,YAAY,MAAe;AACzB,OAAK,OAAO;;;;;;CAUd,oBAAgD;AAC9C,SAAO,KAAK;;;;;;CAOd,oBAAgD;AAC9C,SAAO,KAAK;;;;;CAMd,AAAQ,mBACN,WACA,WACQ;AACR,SAAO,GAAG,UAAU,IAAI,GAAG,UAAU,GAAG,GAAG,UAAU,GAAG,UAAU;;;;;;CAOpE,MAAgB,eACd,KACA,SAKc;AACd,SAAO,SACL,8BACA;GACE,KAAK,IAAI,SAAS,MAAM,GAAG,IAAI,UAAU,GAAG,IAAI,CAAC,OAAO;GACxD,cAAc,QAAQ;GACtB,YAAY,CAAC,CAAC,QAAQ;GACvB,EACD,QACA,OAAO,SAAS;GACd,MAAM,KAAK,YAAY,KAAK;GAC5B,MAAM,EAAE,cAAc,SAAS,WAAW;GAI1C,MAAM,WAAW,UAAU,GAAG,IAAI,GAAG,KAAK,UAAU,QAAQ,KAAK;GAGjE,MAAM,KAAK,YAAY,KAAK;GAC5B,MAAM,SAAS,WAAW,IAAI,SAAS;GACvC,MAAM,KAAK,YAAY,KAAK;AAC5B,QAAK,aAAa,iBAAiB,KAAK,OAAO,KAAK,MAAM,IAAK,GAAG,IAAK;AAEvE,OAAI,QAAQ;AACV,SAAK,aAAa,YAAY,KAAK;AAGnC,QAAI,QAAQ;KACV,MAAM,KAAK,YAAY,KAAK;KAC5B,MAAMA,WAAS,MAAM,KAAK,4BACxB,QACA,OACD;KACD,MAAM,KAAK,YAAY,KAAK;AAC5B,UAAK,aACH,iBACA,KAAK,OAAO,KAAK,MAAM,IAAI,GAAG,IAC/B;AACD,UAAK,aACH,mBACA,KAAK,OAAO,KAAK,MAAM,IAAI,GAAG,IAC/B;AACD,YAAOA;;AAET,SAAK,aACH,mBACA,KAAK,OAAO,KAAK,MAAM,IAAI,GAAG,IAC/B;AACD,WAAO;;AAGT,QAAK,aAAa,YAAY,MAAM;GAIpC,MAAM,UAAU,0BAA0B,eACxC,UACA,YAAY;IACV,MAAM,aAAa,YAAY,KAAK;AACpC,QAAI;KAGF,MAAM,WAAW,MAAM,KAAK,KAAK,MAAM,KAAK,EAAE,SAAS,CAAC;KACxD,MAAM,WAAW,YAAY,KAAK;AAClC,UAAK,aAAa,WAAW,WAAW,WAAW;AAEnD,SAAI,iBAAiB,OACnB,QAAO,SAAS,MAAM;KAExB,MAAM,SAAS,MAAM,SAAS,aAAa;AAC3C,UAAK,aAAa,aAAa,OAAO,WAAW;AACjD,YAAO;aACA,OAAO;AAEd,SACE,iBAAiB,gBACjB,MAAM,SAAS,aAGf,YAAW,OAAO,SAAS;AAE7B,WAAM;;KAGX;AAGD,cAAW,IAAI,UAAU,QAAQ;AAGjC,WAAQ,OAAO,UAAU;AAEvB,QAAI,iBAAiB,gBAAgB,MAAM,SAAS,aAClD,YAAW,OAAO,SAAS;KAE7B;AAGF,OAAI,QAAQ;IACV,MAAMA,WAAS,MAAM,KAAK,4BACxB,SACA,OACD;IACD,MAAMC,SAAO,YAAY,KAAK;AAC9B,SAAK,aACH,gBACA,KAAK,OAAOA,SAAO,MAAM,IAAI,GAAG,IACjC;AACD,WAAOD;;GAGT,MAAM,SAAS,MAAM;GACrB,MAAM,OAAO,YAAY,KAAK;AAC9B,QAAK,aAAa,gBAAgB,KAAK,OAAO,OAAO,MAAM,IAAI,GAAG,IAAI;AACtE,UAAO;IAEV;;;;;;;CAQH,AAAQ,4BACN,SACA,QACY;AAEZ,MAAI,OAAO,QACT,OAAM,IAAI,aAAa,WAAW,aAAa;AAKjD,SAAO,QAAQ,KAAK,CAClB,SACA,IAAI,SAAgB,GAAG,WAAW;AAChC,UAAO,iBAAiB,eAAe;AACrC,WAAO,IAAI,aAAa,WAAW,aAAa,CAAC;KACjD;IACF,CACH,CAAC;;CAIJ,MAAM,WAAW,KAAa,QAA4C;AAExE,MAAI,QAAQ,QACV,OAAM,IAAI,aAAa,WAAW,aAAa;AAEjD,SAAO,KAAK,eAAe,KAAK;GAAE,cAAc;GAAe;GAAQ,CAAC;;CAG1E,MAAM,cAAc,KAAa,QAAoC;AAEnE,MAAI,QAAQ,QACV,OAAM,IAAI,aAAa,WAAW,aAAa;AAEjD,SAAO,KAAK,eAAe,KAAK;GAAE,cAAc;GAAQ;GAAQ,CAAC;;CAGnE,MAAM,sBACJ,KACA,SACA,QACsB;AAEtB,MAAI,QAAQ,QACV,OAAM,IAAI,aAAa,WAAW,aAAa;AAEjD,SAAO,KAAK,eAAe,KAAK;GAC9B,cAAc;GACd;GACA;GACD,CAAC;;CAIJ,MAAM,gBACJ,KACA,QACsB;AACtB,SAAO,KAAK,WAAW,KAAK,OAAO;;CAGrC,MAAM,mBAAmB,KAAa,QAAoC;AACxE,SAAO,KAAK,cAAc,KAAK,OAAO;;CAGxC,MAAM,2BACJ,KACA,SACA,QACsB;AACtB,SAAO,KAAK,sBAAsB,KAAK,SAAS,OAAO;;;;;;CAyBzD,MAAM,mCACJ,WACA,WACA,SACsB;EACtB,MAAM,WAAW,KAAK,mBAAmB,WAAW,UAAU;AAE9D,SAAO,0BAA0B,eAAe,UAAU,YAAY;AACpE,UAAO,KAAK,kBAAkB,WAAW,UAAU;IACnD;;;;;CAMJ,sBACE,WACA,WACS;EACT,MAAM,WAAW,KAAK,mBAAmB,WAAW,UAAU;AAC9D,SAAO,0BAA0B,UAAU,SAAS;;;;;CAMtD,+BAAuC;AACrC,SAAO,0BAA0B,iBAAiB;;;;;CAMpD,2BAAiC;AAC/B,4BAA0B,OAAO;;;;;;CAOnC,2BACE,QACA,MACA,WACA,YACoB;AAEpB,MAAI,UAAU,KACZ,QAAO,EAAE;EAGX,MAAME,WAA+B,EAAE;AAGvC,MACE,UAAU,sBACV,UAAU,mBAAmB,SAAS,GACtC;GACA,IAAI,iBAAiB;AAErB,QAAK,IAAI,IAAI,GAAG,IAAI,UAAU,mBAAmB,QAAQ,KAAK;IAC5D,MAAM,kBAAkB,UAAU,mBAAmB;AACrD,QAAI,oBAAoB,OACtB;IAEF,MAAM,iBAAiB;IACvB,MAAM,eAAe,KAAK,IACxB,iBAAiB,iBACjB,WACD;AAGD,QAAI,kBAAkB,WACpB;AAIF,QAAI,iBAAiB,QAAQ,eAAe,OAC1C,UAAS,KAAK;KACZ,WAAW,IAAI;KACf,SAAS;KACT,OAAO;KACR,CAAC;AAGJ,sBAAkB;AAGlB,QAAI,kBAAkB,WACpB;;AAIJ,UAAO;;EAIT,MAAM,oBAAoB,UAAU,qBAAqB;EACzD,MAAM,oBAAoB,KAAK,MAAM,SAAS,kBAAkB;EAChE,MAAM,kBAAkB,KAAK,MAAM,OAAO,kBAAkB;AAE5D,OAAK,IAAI,IAAI,mBAAmB,KAAK,iBAAiB,KAAK;GACzD,MAAM,YAAY,IAAI;GACtB,MAAM,iBAAiB,IAAI;GAC3B,MAAM,eAAe,KAAK,KAAK,IAAI,KAAK,mBAAmB,WAAW;AAGtE,OAAI,kBAAkB,WACpB;AAIF,OAAI,iBAAiB,QAAQ,eAAe,OAC1C,UAAS,KAAK;IACZ;IACA,SAAS;IACT,OAAO;IACR,CAAC;;AAIN,SAAO;;;;;;CAOT,gBACE,WACA,WACS;AACT,MAAI;GAEF,MAAM,iBAAiB;AACvB,OACE,eAAe,gBACf,OAAO,eAAe,aAAa,uBAAuB,YAC1D;AAEA,QAAI,CAAC,UAAU,GACb,QAAO;IAGT,MAAM,aAAa,eAAe,aAAa,mBAC7C,WACA,UAAU,IACV,eACD;AAGD,WAFoB,WAAW,IAAI,WAAW;;GAKhD,MAAM,WAAW,GAAG,UAAU,IAAI,GAAG,UAAU,MAAM,UAAU,GAAG,UAAU,GAAG,UAAU;AAEzF,UADiB,WAAW,IAAI,SAAS;WAElC,OAAO;AACd,WAAQ,KACN,iDAAiD,UAAU,cAC3D,MACD;AACD,UAAO;;;;;;CAOX,kBACE,YACA,WACa;AACb,SAAO,IAAI,IACT,WAAW,QAAQ,OAAO,KAAK,gBAAgB,IAAI,UAAU,CAAC,CAC/D;;;;;;CAOH,MAAM,kBACJ,YACqC;EACrC,MAAM,aAAa,KAAK,YAAY;AACpC,UAAQ,KACN,GAAG,WAAW,0JAGC,WAAW,OAAO,YAAY,WAAW,WAAW,IAAI,KAAK,IAAI,GACjF;AACD,SAAO,WAAW,UAAU,KAAK;;;;;;CAanC,kBAME;AACA,SAAO;GACL,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,mBAAmB;GACpB"}
@@ -99,7 +99,8 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
99
99
  videoBufferDurationMs: 8e3,
100
100
  audioBufferDurationMs: 8e3,
101
101
  maxVideoBufferFetches: 3,
102
- maxAudioBufferFetches: 3
102
+ maxAudioBufferFetches: 3,
103
+ bufferThresholdMs: 3e4
103
104
  };
104
105
  }
105
106
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"JitMediaEngine.js","names":["rendition: VideoRendition"],"sources":["../../../src/elements/EFMedia/JitMediaEngine.ts"],"sourcesContent":["import type {\n AudioRendition,\n MediaEngine,\n RenditionId,\n ThumbnailResult,\n VideoRendition,\n} from \"../../transcoding/types\";\nimport type { ManifestResponse } from \"../../transcoding/types/index.js\";\nimport type { UrlGenerator } from \"../../transcoding/utils/UrlGenerator\";\nimport type { EFMedia } from \"../EFMedia.js\";\nimport { BaseMediaEngine } from \"./BaseMediaEngine\";\nimport { ThumbnailExtractor } from \"./shared/ThumbnailExtractor.js\";\n\nexport class JitMediaEngine extends BaseMediaEngine implements MediaEngine {\n private urlGenerator: UrlGenerator;\n private data: ManifestResponse = {} as ManifestResponse;\n private thumbnailExtractor: ThumbnailExtractor;\n\n static async fetch(host: EFMedia, urlGenerator: UrlGenerator, url: string) {\n const engine = new JitMediaEngine(host, urlGenerator);\n const data = await engine.fetchManifest(url);\n engine.data = data;\n return engine;\n }\n\n constructor(host: EFMedia, urlGenerator: UrlGenerator) {\n super(host);\n this.urlGenerator = urlGenerator;\n this.thumbnailExtractor = new ThumbnailExtractor(this);\n }\n\n get durationMs() {\n return this.data.durationMs;\n }\n\n get src() {\n return this.data.sourceUrl;\n }\n\n get audioRendition(): AudioRendition | undefined {\n if (!this.data.audioRenditions || this.data.audioRenditions.length === 0) {\n return undefined;\n }\n\n const rendition = this.data.audioRenditions[0];\n if (!rendition) return undefined;\n\n return {\n id: rendition.id as RenditionId,\n trackId: undefined,\n src: this.data.sourceUrl,\n segmentDurationMs: rendition.segmentDurationMs,\n segmentDurationsMs: rendition.segmentDurationsMs,\n };\n }\n\n get videoRendition(): VideoRendition | undefined {\n if (!this.data.videoRenditions || this.data.videoRenditions.length === 0) {\n return undefined;\n }\n\n const rendition = this.data.videoRenditions[0];\n if (!rendition) return undefined;\n\n return {\n id: rendition.id as RenditionId,\n trackId: undefined,\n src: this.data.sourceUrl,\n segmentDurationMs: rendition.segmentDurationMs,\n segmentDurationsMs: rendition.segmentDurationsMs,\n };\n }\n\n get templates() {\n return this.data.endpoints;\n }\n\n async fetchInitSegment(\n rendition: { id?: RenditionId; trackId: number | undefined; src: string },\n signal: AbortSignal,\n ) {\n if (!rendition.id) {\n throw new Error(\"Rendition ID is required for JIT metadata\");\n }\n const url = this.urlGenerator.generateSegmentUrl(\n \"init\",\n rendition.id,\n this,\n );\n\n // Use unified fetch method\n return this.fetchMedia(url, signal);\n }\n\n async fetchMediaSegment(\n segmentId: number,\n rendition: { id?: RenditionId; trackId: number | undefined; src: string },\n ) {\n if (!rendition.id) {\n throw new Error(\"Rendition ID is required for JIT metadata\");\n }\n const url = this.urlGenerator.generateSegmentUrl(\n segmentId,\n rendition.id,\n this,\n );\n return this.fetchMedia(url);\n }\n\n computeSegmentId(\n desiredSeekTimeMs: number,\n rendition: VideoRendition | AudioRendition,\n ) {\n // Don't request segments beyond the actual file duration\n // Note: seeking to exactly durationMs should be allowed (it's the last moment of the file)\n if (desiredSeekTimeMs > this.durationMs) {\n return undefined;\n }\n\n // Use actual segment durations if available (more accurate)\n if (\n rendition.segmentDurationsMs &&\n rendition.segmentDurationsMs.length > 0\n ) {\n let cumulativeTime = 0;\n\n for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {\n const segmentDuration = rendition.segmentDurationsMs[i];\n if (segmentDuration === undefined) {\n throw new Error(\"Segment duration is required for JIT metadata\");\n }\n const segmentStartMs = cumulativeTime;\n const segmentEndMs = cumulativeTime + segmentDuration;\n\n // Check if the desired seek time falls within this segment\n // Special case: for the last segment, include the exact end time\n const isLastSegment = i === rendition.segmentDurationsMs.length - 1;\n const includesEndTime =\n isLastSegment && desiredSeekTimeMs === this.durationMs;\n\n if (\n desiredSeekTimeMs >= segmentStartMs &&\n (desiredSeekTimeMs < segmentEndMs || includesEndTime)\n ) {\n return i + 1; // Convert 0-based to 1-based segment ID\n }\n\n cumulativeTime += segmentDuration;\n\n // If we've reached or exceeded file duration, stop\n if (cumulativeTime >= this.durationMs) {\n break;\n }\n }\n\n // If we didn't find a segment, return undefined\n return undefined;\n }\n\n // Fall back to fixed duration calculation for backward compatibility\n if (!rendition.segmentDurationMs) {\n throw new Error(\"Segment duration is required for JIT metadata\");\n }\n\n const segmentIndex = Math.floor(\n desiredSeekTimeMs / rendition.segmentDurationMs,\n );\n\n // Calculate the actual segment start time\n const segmentStartMs = segmentIndex * rendition.segmentDurationMs;\n\n // If this segment would start at or beyond file duration, it doesn't exist\n if (segmentStartMs >= this.durationMs) {\n return undefined;\n }\n\n return segmentIndex + 1; // Convert 0-based to 1-based\n }\n\n getScrubVideoRendition(): VideoRendition | undefined {\n if (!this.data.videoRenditions) return undefined;\n\n const scrubManifestRendition = this.data.videoRenditions.find(\n (r) => r.id === \"scrub\",\n );\n\n if (!scrubManifestRendition) return this.videoRendition; // Fallback to main\n\n return {\n id: scrubManifestRendition.id as any,\n trackId: undefined,\n src: this.src,\n segmentDurationMs: scrubManifestRendition.segmentDurationMs,\n segmentDurationsMs: scrubManifestRendition.segmentDurationsMs,\n };\n }\n\n /**\n * Get preferred buffer configuration for JIT transcoding\n * Uses higher buffering since transcoding introduces latency\n */\n getBufferConfig() {\n return {\n // Buffer more aggressively for JIT transcoding to smooth over latency\n videoBufferDurationMs: 8000,\n audioBufferDurationMs: 8000,\n maxVideoBufferFetches: 3,\n maxAudioBufferFetches: 3,\n };\n }\n\n /**\n * Extract thumbnail canvases using same rendition priority as video playback for frame alignment\n */\n async extractThumbnails(\n timestamps: number[],\n ): Promise<(ThumbnailResult | null)[]> {\n // Use same rendition priority as video: try main rendition first for frame alignment\n let rendition: VideoRendition;\n try {\n const mainRendition = this.getVideoRendition();\n if (mainRendition) {\n rendition = mainRendition;\n } else {\n const scrubRendition = this.getScrubVideoRendition();\n if (scrubRendition) {\n rendition = scrubRendition;\n } else {\n throw new Error(\"No video rendition available\");\n }\n }\n } catch (error) {\n console.warn(\n \"JitMediaEngine: No video rendition available for thumbnails\",\n error,\n );\n return timestamps.map(() => null);\n }\n\n // Use shared thumbnail extraction logic\n return this.thumbnailExtractor.extractThumbnails(\n timestamps,\n rendition,\n this.durationMs,\n );\n }\n\n convertToSegmentRelativeTimestamps(\n globalTimestamps: number[],\n _segmentId: number,\n _rendition: VideoRendition,\n ): number[] {\n return globalTimestamps.map((timestamp) => timestamp / 1000);\n }\n}\n"],"mappings":";;;;AAaA,IAAa,iBAAb,MAAa,uBAAuB,gBAAuC;CAKzE,aAAa,MAAM,MAAe,cAA4B,KAAa;EACzE,MAAM,SAAS,IAAI,eAAe,MAAM,aAAa;AAErD,SAAO,OADM,MAAM,OAAO,cAAc,IAAI;AAE5C,SAAO;;CAGT,YAAY,MAAe,cAA4B;AACrD,QAAM,KAAK;cAXoB,EAAE;AAYjC,OAAK,eAAe;AACpB,OAAK,qBAAqB,IAAI,mBAAmB,KAAK;;CAGxD,IAAI,aAAa;AACf,SAAO,KAAK,KAAK;;CAGnB,IAAI,MAAM;AACR,SAAO,KAAK,KAAK;;CAGnB,IAAI,iBAA6C;AAC/C,MAAI,CAAC,KAAK,KAAK,mBAAmB,KAAK,KAAK,gBAAgB,WAAW,EACrE;EAGF,MAAM,YAAY,KAAK,KAAK,gBAAgB;AAC5C,MAAI,CAAC,UAAW,QAAO;AAEvB,SAAO;GACL,IAAI,UAAU;GACd,SAAS;GACT,KAAK,KAAK,KAAK;GACf,mBAAmB,UAAU;GAC7B,oBAAoB,UAAU;GAC/B;;CAGH,IAAI,iBAA6C;AAC/C,MAAI,CAAC,KAAK,KAAK,mBAAmB,KAAK,KAAK,gBAAgB,WAAW,EACrE;EAGF,MAAM,YAAY,KAAK,KAAK,gBAAgB;AAC5C,MAAI,CAAC,UAAW,QAAO;AAEvB,SAAO;GACL,IAAI,UAAU;GACd,SAAS;GACT,KAAK,KAAK,KAAK;GACf,mBAAmB,UAAU;GAC7B,oBAAoB,UAAU;GAC/B;;CAGH,IAAI,YAAY;AACd,SAAO,KAAK,KAAK;;CAGnB,MAAM,iBACJ,WACA,QACA;AACA,MAAI,CAAC,UAAU,GACb,OAAM,IAAI,MAAM,4CAA4C;EAE9D,MAAM,MAAM,KAAK,aAAa,mBAC5B,QACA,UAAU,IACV,KACD;AAGD,SAAO,KAAK,WAAW,KAAK,OAAO;;CAGrC,MAAM,kBACJ,WACA,WACA;AACA,MAAI,CAAC,UAAU,GACb,OAAM,IAAI,MAAM,4CAA4C;EAE9D,MAAM,MAAM,KAAK,aAAa,mBAC5B,WACA,UAAU,IACV,KACD;AACD,SAAO,KAAK,WAAW,IAAI;;CAG7B,iBACE,mBACA,WACA;AAGA,MAAI,oBAAoB,KAAK,WAC3B;AAIF,MACE,UAAU,sBACV,UAAU,mBAAmB,SAAS,GACtC;GACA,IAAI,iBAAiB;AAErB,QAAK,IAAI,IAAI,GAAG,IAAI,UAAU,mBAAmB,QAAQ,KAAK;IAC5D,MAAM,kBAAkB,UAAU,mBAAmB;AACrD,QAAI,oBAAoB,OACtB,OAAM,IAAI,MAAM,gDAAgD;IAElE,MAAM,iBAAiB;IACvB,MAAM,eAAe,iBAAiB;IAKtC,MAAM,kBADgB,MAAM,UAAU,mBAAmB,SAAS,KAE/C,sBAAsB,KAAK;AAE9C,QACE,qBAAqB,mBACpB,oBAAoB,gBAAgB,iBAErC,QAAO,IAAI;AAGb,sBAAkB;AAGlB,QAAI,kBAAkB,KAAK,WACzB;;AAKJ;;AAIF,MAAI,CAAC,UAAU,kBACb,OAAM,IAAI,MAAM,gDAAgD;EAGlE,MAAM,eAAe,KAAK,MACxB,oBAAoB,UAAU,kBAC/B;AAMD,MAHuB,eAAe,UAAU,qBAG1B,KAAK,WACzB;AAGF,SAAO,eAAe;;CAGxB,yBAAqD;AACnD,MAAI,CAAC,KAAK,KAAK,gBAAiB,QAAO;EAEvC,MAAM,yBAAyB,KAAK,KAAK,gBAAgB,MACtD,MAAM,EAAE,OAAO,QACjB;AAED,MAAI,CAAC,uBAAwB,QAAO,KAAK;AAEzC,SAAO;GACL,IAAI,uBAAuB;GAC3B,SAAS;GACT,KAAK,KAAK;GACV,mBAAmB,uBAAuB;GAC1C,oBAAoB,uBAAuB;GAC5C;;;;;;CAOH,kBAAkB;AAChB,SAAO;GAEL,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACxB;;;;;CAMH,MAAM,kBACJ,YACqC;EAErC,IAAIA;AACJ,MAAI;GACF,MAAM,gBAAgB,KAAK,mBAAmB;AAC9C,OAAI,cACF,aAAY;QACP;IACL,MAAM,iBAAiB,KAAK,wBAAwB;AACpD,QAAI,eACF,aAAY;QAEZ,OAAM,IAAI,MAAM,+BAA+B;;WAG5C,OAAO;AACd,WAAQ,KACN,+DACA,MACD;AACD,UAAO,WAAW,UAAU,KAAK;;AAInC,SAAO,KAAK,mBAAmB,kBAC7B,YACA,WACA,KAAK,WACN;;CAGH,mCACE,kBACA,YACA,YACU;AACV,SAAO,iBAAiB,KAAK,cAAc,YAAY,IAAK"}
1
+ {"version":3,"file":"JitMediaEngine.js","names":["rendition: VideoRendition"],"sources":["../../../src/elements/EFMedia/JitMediaEngine.ts"],"sourcesContent":["import type {\n AudioRendition,\n MediaEngine,\n RenditionId,\n ThumbnailResult,\n VideoRendition,\n} from \"../../transcoding/types\";\nimport type { ManifestResponse } from \"../../transcoding/types/index.js\";\nimport type { UrlGenerator } from \"../../transcoding/utils/UrlGenerator\";\nimport type { EFMedia } from \"../EFMedia.js\";\nimport { BaseMediaEngine } from \"./BaseMediaEngine\";\nimport { ThumbnailExtractor } from \"./shared/ThumbnailExtractor.js\";\n\nexport class JitMediaEngine extends BaseMediaEngine implements MediaEngine {\n private urlGenerator: UrlGenerator;\n private data: ManifestResponse = {} as ManifestResponse;\n private thumbnailExtractor: ThumbnailExtractor;\n\n static async fetch(host: EFMedia, urlGenerator: UrlGenerator, url: string) {\n const engine = new JitMediaEngine(host, urlGenerator);\n const data = await engine.fetchManifest(url);\n engine.data = data;\n return engine;\n }\n\n constructor(host: EFMedia, urlGenerator: UrlGenerator) {\n super(host);\n this.urlGenerator = urlGenerator;\n this.thumbnailExtractor = new ThumbnailExtractor(this);\n }\n\n get durationMs() {\n return this.data.durationMs;\n }\n\n get src() {\n return this.data.sourceUrl;\n }\n\n get audioRendition(): AudioRendition | undefined {\n if (!this.data.audioRenditions || this.data.audioRenditions.length === 0) {\n return undefined;\n }\n\n const rendition = this.data.audioRenditions[0];\n if (!rendition) return undefined;\n\n return {\n id: rendition.id as RenditionId,\n trackId: undefined,\n src: this.data.sourceUrl,\n segmentDurationMs: rendition.segmentDurationMs,\n segmentDurationsMs: rendition.segmentDurationsMs,\n };\n }\n\n get videoRendition(): VideoRendition | undefined {\n if (!this.data.videoRenditions || this.data.videoRenditions.length === 0) {\n return undefined;\n }\n\n const rendition = this.data.videoRenditions[0];\n if (!rendition) return undefined;\n\n return {\n id: rendition.id as RenditionId,\n trackId: undefined,\n src: this.data.sourceUrl,\n segmentDurationMs: rendition.segmentDurationMs,\n segmentDurationsMs: rendition.segmentDurationsMs,\n };\n }\n\n get templates() {\n return this.data.endpoints;\n }\n\n async fetchInitSegment(\n rendition: { id?: RenditionId; trackId: number | undefined; src: string },\n signal: AbortSignal,\n ) {\n if (!rendition.id) {\n throw new Error(\"Rendition ID is required for JIT metadata\");\n }\n const url = this.urlGenerator.generateSegmentUrl(\n \"init\",\n rendition.id,\n this,\n );\n\n // Use unified fetch method\n return this.fetchMedia(url, signal);\n }\n\n async fetchMediaSegment(\n segmentId: number,\n rendition: { id?: RenditionId; trackId: number | undefined; src: string },\n ) {\n if (!rendition.id) {\n throw new Error(\"Rendition ID is required for JIT metadata\");\n }\n const url = this.urlGenerator.generateSegmentUrl(\n segmentId,\n rendition.id,\n this,\n );\n return this.fetchMedia(url);\n }\n\n computeSegmentId(\n desiredSeekTimeMs: number,\n rendition: VideoRendition | AudioRendition,\n ) {\n // Don't request segments beyond the actual file duration\n // Note: seeking to exactly durationMs should be allowed (it's the last moment of the file)\n if (desiredSeekTimeMs > this.durationMs) {\n return undefined;\n }\n\n // Use actual segment durations if available (more accurate)\n if (\n rendition.segmentDurationsMs &&\n rendition.segmentDurationsMs.length > 0\n ) {\n let cumulativeTime = 0;\n\n for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {\n const segmentDuration = rendition.segmentDurationsMs[i];\n if (segmentDuration === undefined) {\n throw new Error(\"Segment duration is required for JIT metadata\");\n }\n const segmentStartMs = cumulativeTime;\n const segmentEndMs = cumulativeTime + segmentDuration;\n\n // Check if the desired seek time falls within this segment\n // Special case: for the last segment, include the exact end time\n const isLastSegment = i === rendition.segmentDurationsMs.length - 1;\n const includesEndTime =\n isLastSegment && desiredSeekTimeMs === this.durationMs;\n\n if (\n desiredSeekTimeMs >= segmentStartMs &&\n (desiredSeekTimeMs < segmentEndMs || includesEndTime)\n ) {\n return i + 1; // Convert 0-based to 1-based segment ID\n }\n\n cumulativeTime += segmentDuration;\n\n // If we've reached or exceeded file duration, stop\n if (cumulativeTime >= this.durationMs) {\n break;\n }\n }\n\n // If we didn't find a segment, return undefined\n return undefined;\n }\n\n // Fall back to fixed duration calculation for backward compatibility\n if (!rendition.segmentDurationMs) {\n throw new Error(\"Segment duration is required for JIT metadata\");\n }\n\n const segmentIndex = Math.floor(\n desiredSeekTimeMs / rendition.segmentDurationMs,\n );\n\n // Calculate the actual segment start time\n const segmentStartMs = segmentIndex * rendition.segmentDurationMs;\n\n // If this segment would start at or beyond file duration, it doesn't exist\n if (segmentStartMs >= this.durationMs) {\n return undefined;\n }\n\n return segmentIndex + 1; // Convert 0-based to 1-based\n }\n\n getScrubVideoRendition(): VideoRendition | undefined {\n if (!this.data.videoRenditions) return undefined;\n\n const scrubManifestRendition = this.data.videoRenditions.find(\n (r) => r.id === \"scrub\",\n );\n\n if (!scrubManifestRendition) return this.videoRendition; // Fallback to main\n\n return {\n id: scrubManifestRendition.id as any,\n trackId: undefined,\n src: this.src,\n segmentDurationMs: scrubManifestRendition.segmentDurationMs,\n segmentDurationsMs: scrubManifestRendition.segmentDurationsMs,\n };\n }\n\n /**\n * Get preferred buffer configuration for JIT transcoding\n * Uses higher buffering since transcoding introduces latency\n */\n getBufferConfig() {\n return {\n // Buffer more aggressively for JIT transcoding to smooth over latency\n videoBufferDurationMs: 8000,\n audioBufferDurationMs: 8000,\n maxVideoBufferFetches: 3,\n maxAudioBufferFetches: 3,\n bufferThresholdMs: 30000, // Timeline-aware buffering threshold\n };\n }\n\n /**\n * Extract thumbnail canvases using same rendition priority as video playback for frame alignment\n */\n async extractThumbnails(\n timestamps: number[],\n ): Promise<(ThumbnailResult | null)[]> {\n // Use same rendition priority as video: try main rendition first for frame alignment\n let rendition: VideoRendition;\n try {\n const mainRendition = this.getVideoRendition();\n if (mainRendition) {\n rendition = mainRendition;\n } else {\n const scrubRendition = this.getScrubVideoRendition();\n if (scrubRendition) {\n rendition = scrubRendition;\n } else {\n throw new Error(\"No video rendition available\");\n }\n }\n } catch (error) {\n console.warn(\n \"JitMediaEngine: No video rendition available for thumbnails\",\n error,\n );\n return timestamps.map(() => null);\n }\n\n // Use shared thumbnail extraction logic\n return this.thumbnailExtractor.extractThumbnails(\n timestamps,\n rendition,\n this.durationMs,\n );\n }\n\n convertToSegmentRelativeTimestamps(\n globalTimestamps: number[],\n _segmentId: number,\n _rendition: VideoRendition,\n ): number[] {\n return globalTimestamps.map((timestamp) => timestamp / 1000);\n }\n}\n"],"mappings":";;;;AAaA,IAAa,iBAAb,MAAa,uBAAuB,gBAAuC;CAKzE,aAAa,MAAM,MAAe,cAA4B,KAAa;EACzE,MAAM,SAAS,IAAI,eAAe,MAAM,aAAa;AAErD,SAAO,OADM,MAAM,OAAO,cAAc,IAAI;AAE5C,SAAO;;CAGT,YAAY,MAAe,cAA4B;AACrD,QAAM,KAAK;cAXoB,EAAE;AAYjC,OAAK,eAAe;AACpB,OAAK,qBAAqB,IAAI,mBAAmB,KAAK;;CAGxD,IAAI,aAAa;AACf,SAAO,KAAK,KAAK;;CAGnB,IAAI,MAAM;AACR,SAAO,KAAK,KAAK;;CAGnB,IAAI,iBAA6C;AAC/C,MAAI,CAAC,KAAK,KAAK,mBAAmB,KAAK,KAAK,gBAAgB,WAAW,EACrE;EAGF,MAAM,YAAY,KAAK,KAAK,gBAAgB;AAC5C,MAAI,CAAC,UAAW,QAAO;AAEvB,SAAO;GACL,IAAI,UAAU;GACd,SAAS;GACT,KAAK,KAAK,KAAK;GACf,mBAAmB,UAAU;GAC7B,oBAAoB,UAAU;GAC/B;;CAGH,IAAI,iBAA6C;AAC/C,MAAI,CAAC,KAAK,KAAK,mBAAmB,KAAK,KAAK,gBAAgB,WAAW,EACrE;EAGF,MAAM,YAAY,KAAK,KAAK,gBAAgB;AAC5C,MAAI,CAAC,UAAW,QAAO;AAEvB,SAAO;GACL,IAAI,UAAU;GACd,SAAS;GACT,KAAK,KAAK,KAAK;GACf,mBAAmB,UAAU;GAC7B,oBAAoB,UAAU;GAC/B;;CAGH,IAAI,YAAY;AACd,SAAO,KAAK,KAAK;;CAGnB,MAAM,iBACJ,WACA,QACA;AACA,MAAI,CAAC,UAAU,GACb,OAAM,IAAI,MAAM,4CAA4C;EAE9D,MAAM,MAAM,KAAK,aAAa,mBAC5B,QACA,UAAU,IACV,KACD;AAGD,SAAO,KAAK,WAAW,KAAK,OAAO;;CAGrC,MAAM,kBACJ,WACA,WACA;AACA,MAAI,CAAC,UAAU,GACb,OAAM,IAAI,MAAM,4CAA4C;EAE9D,MAAM,MAAM,KAAK,aAAa,mBAC5B,WACA,UAAU,IACV,KACD;AACD,SAAO,KAAK,WAAW,IAAI;;CAG7B,iBACE,mBACA,WACA;AAGA,MAAI,oBAAoB,KAAK,WAC3B;AAIF,MACE,UAAU,sBACV,UAAU,mBAAmB,SAAS,GACtC;GACA,IAAI,iBAAiB;AAErB,QAAK,IAAI,IAAI,GAAG,IAAI,UAAU,mBAAmB,QAAQ,KAAK;IAC5D,MAAM,kBAAkB,UAAU,mBAAmB;AACrD,QAAI,oBAAoB,OACtB,OAAM,IAAI,MAAM,gDAAgD;IAElE,MAAM,iBAAiB;IACvB,MAAM,eAAe,iBAAiB;IAKtC,MAAM,kBADgB,MAAM,UAAU,mBAAmB,SAAS,KAE/C,sBAAsB,KAAK;AAE9C,QACE,qBAAqB,mBACpB,oBAAoB,gBAAgB,iBAErC,QAAO,IAAI;AAGb,sBAAkB;AAGlB,QAAI,kBAAkB,KAAK,WACzB;;AAKJ;;AAIF,MAAI,CAAC,UAAU,kBACb,OAAM,IAAI,MAAM,gDAAgD;EAGlE,MAAM,eAAe,KAAK,MACxB,oBAAoB,UAAU,kBAC/B;AAMD,MAHuB,eAAe,UAAU,qBAG1B,KAAK,WACzB;AAGF,SAAO,eAAe;;CAGxB,yBAAqD;AACnD,MAAI,CAAC,KAAK,KAAK,gBAAiB,QAAO;EAEvC,MAAM,yBAAyB,KAAK,KAAK,gBAAgB,MACtD,MAAM,EAAE,OAAO,QACjB;AAED,MAAI,CAAC,uBAAwB,QAAO,KAAK;AAEzC,SAAO;GACL,IAAI,uBAAuB;GAC3B,SAAS;GACT,KAAK,KAAK;GACV,mBAAmB,uBAAuB;GAC1C,oBAAoB,uBAAuB;GAC5C;;;;;;CAOH,kBAAkB;AAChB,SAAO;GAEL,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,uBAAuB;GACvB,mBAAmB;GACpB;;;;;CAMH,MAAM,kBACJ,YACqC;EAErC,IAAIA;AACJ,MAAI;GACF,MAAM,gBAAgB,KAAK,mBAAmB;AAC9C,OAAI,cACF,aAAY;QACP;IACL,MAAM,iBAAiB,KAAK,wBAAwB;AACpD,QAAI,eACF,aAAY;QAEZ,OAAM,IAAI,MAAM,+BAA+B;;WAG5C,OAAO;AACd,WAAQ,KACN,+DACA,MACD;AACD,UAAO,WAAW,UAAU,KAAK;;AAInC,SAAO,KAAK,mBAAmB,kBAC7B,YACA,WACA,KAAK,WACN;;CAGH,mCACE,kBACA,YACA,YACU;AACV,SAAO,iBAAiB,KAAK,cAAc,YAAY,IAAK"}
@@ -26,11 +26,18 @@ const makeAudioBufferTask = (host) => {
26
26
  const mediaEngine = await getLatestMediaEngine(host, signal);
27
27
  if (!mediaEngine.audioRendition) return currentState;
28
28
  const engineConfig = mediaEngine.getBufferConfig();
29
- return manageMediaBuffer(seekTimeMs, {
29
+ const currentConfig = {
30
30
  bufferDurationMs: engineConfig.audioBufferDurationMs,
31
31
  maxParallelFetches: engineConfig.maxAudioBufferFetches,
32
- enableBuffering: host.enableAudioBuffering
33
- }, currentState, host.intrinsicDurationMs || 1e4, signal, {
32
+ enableBuffering: host.enableAudioBuffering,
33
+ bufferThresholdMs: engineConfig.bufferThresholdMs
34
+ };
35
+ const timelineContext = host.rootTimegroup?.currentTimeMs !== void 0 ? {
36
+ elementStartMs: host.startTimeMs,
37
+ elementEndMs: host.endTimeMs,
38
+ playheadMs: host.rootTimegroup.currentTimeMs
39
+ } : void 0;
40
+ return manageMediaBuffer(seekTimeMs, currentConfig, currentState, host.intrinsicDurationMs || 1e4, signal, {
34
41
  computeSegmentId: async (timeMs, rendition) => {
35
42
  return (await getLatestMediaEngine(host, signal)).computeSegmentId(timeMs, rendition);
36
43
  },
@@ -48,7 +55,7 @@ const makeAudioBufferTask = (host) => {
48
55
  return audioRendition;
49
56
  },
50
57
  logError: console.error
51
- });
58
+ }, timelineContext);
52
59
  }
53
60
  });
54
61
  };
@@ -1 +1 @@
1
- {"version":3,"file":"makeAudioBufferTask.js","names":["currentState: AudioBufferState","mediaEngine"],"sources":["../../../../src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts"],"sourcesContent":["import { Task } from \"@lit/task\";\n\nimport { EF_INTERACTIVE } from \"../../../EF_INTERACTIVE\";\nimport { EF_RENDERING } from \"../../../EF_RENDERING\";\nimport type { AudioRendition } from \"../../../transcoding/types\";\nimport type { EFMedia } from \"../../EFMedia\";\nimport {\n type MediaBufferConfig,\n type MediaBufferState,\n manageMediaBuffer,\n} from \"../shared/BufferUtils\";\nimport { getLatestMediaEngine } from \"../tasks/makeMediaEngineTask\";\n\n/**\n * Configuration for audio buffering - extends the generic interface\n */\nexport interface AudioBufferConfig extends MediaBufferConfig {}\n\n/**\n * State of the audio buffer - uses the generic interface\n */\nexport interface AudioBufferState extends MediaBufferState {}\n\ntype AudioBufferTask = Task<readonly [number], AudioBufferState>;\nexport const makeAudioBufferTask = (host: EFMedia): AudioBufferTask => {\n let currentState: AudioBufferState = {\n currentSeekTimeMs: 0,\n requestedSegments: new Set(),\n activeRequests: new Set(),\n requestQueue: [],\n };\n\n return new Task(host, {\n autoRun: EF_INTERACTIVE, // Make lazy - only run when element becomes timeline-active\n args: () => [host.desiredSeekTimeMs] as const,\n onError: (error) => {\n console.error(\"audioBufferTask error\", error);\n },\n onComplete: (value) => {\n currentState = value;\n },\n task: async ([seekTimeMs], { signal }) => {\n // Skip buffering entirely in rendering mode\n if (EF_RENDERING()) {\n return currentState; // Return existing state without any buffering activity\n }\n\n // Get media engine to potentially override buffer configuration\n const mediaEngine = await getLatestMediaEngine(host, signal);\n\n // Return existing state if no audio rendition available\n if (!mediaEngine.audioRendition) {\n return currentState;\n }\n\n // Use media engine's buffer config, falling back to host properties\n const engineConfig = mediaEngine.getBufferConfig();\n const bufferDurationMs = engineConfig.audioBufferDurationMs;\n const maxParallelFetches = engineConfig.maxAudioBufferFetches;\n\n const currentConfig: AudioBufferConfig = {\n bufferDurationMs,\n maxParallelFetches,\n enableBuffering: host.enableAudioBuffering,\n };\n\n return manageMediaBuffer<AudioRendition>(\n seekTimeMs,\n currentConfig,\n currentState,\n (host as any).intrinsicDurationMs || 10000,\n signal,\n {\n computeSegmentId: async (timeMs, rendition) => {\n // Use media engine's computeSegmentId\n const mediaEngine = await getLatestMediaEngine(host, signal);\n return mediaEngine.computeSegmentId(timeMs, rendition);\n },\n prefetchSegment: async (segmentId, rendition) => {\n // Trigger prefetch through BaseMediaEngine - let it handle caching\n const mediaEngine = await getLatestMediaEngine(host, signal);\n await mediaEngine.fetchMediaSegment(segmentId, rendition);\n // Don't return data - just ensure it's cached in BaseMediaEngine\n },\n isSegmentCached: (segmentId, rendition) => {\n // Check if segment is already cached in BaseMediaEngine\n const mediaEngine = host.mediaEngineTask.value;\n if (!mediaEngine) return false;\n\n return mediaEngine.isSegmentCached(segmentId, rendition);\n },\n getRendition: async () => {\n const mediaEngine = await getLatestMediaEngine(host, signal);\n const audioRendition = mediaEngine.audioRendition;\n if (!audioRendition) {\n throw new Error(\"Audio rendition not available\");\n }\n return audioRendition;\n },\n logError: console.error,\n },\n );\n },\n });\n};\n"],"mappings":";;;;;;;AAwBA,MAAa,uBAAuB,SAAmC;CACrE,IAAIA,eAAiC;EACnC,mBAAmB;EACnB,mCAAmB,IAAI,KAAK;EAC5B,gCAAgB,IAAI,KAAK;EACzB,cAAc,EAAE;EACjB;AAED,QAAO,IAAI,KAAK,MAAM;EACpB,SAAS;EACT,YAAY,CAAC,KAAK,kBAAkB;EACpC,UAAU,UAAU;AAClB,WAAQ,MAAM,yBAAyB,MAAM;;EAE/C,aAAa,UAAU;AACrB,kBAAe;;EAEjB,MAAM,OAAO,CAAC,aAAa,EAAE,aAAa;AAExC,OAAI,cAAc,CAChB,QAAO;GAIT,MAAM,cAAc,MAAM,qBAAqB,MAAM,OAAO;AAG5D,OAAI,CAAC,YAAY,eACf,QAAO;GAIT,MAAM,eAAe,YAAY,iBAAiB;AAUlD,UAAO,kBACL,YAPuC;IACvC,kBAJuB,aAAa;IAKpC,oBAJyB,aAAa;IAKtC,iBAAiB,KAAK;IACvB,EAKC,cACC,KAAa,uBAAuB,KACrC,QACA;IACE,kBAAkB,OAAO,QAAQ,cAAc;AAG7C,aADoB,MAAM,qBAAqB,MAAM,OAAO,EACzC,iBAAiB,QAAQ,UAAU;;IAExD,iBAAiB,OAAO,WAAW,cAAc;AAG/C,YADoB,MAAM,qBAAqB,MAAM,OAAO,EAC1C,kBAAkB,WAAW,UAAU;;IAG3D,kBAAkB,WAAW,cAAc;KAEzC,MAAMC,gBAAc,KAAK,gBAAgB;AACzC,SAAI,CAACA,cAAa,QAAO;AAEzB,YAAOA,cAAY,gBAAgB,WAAW,UAAU;;IAE1D,cAAc,YAAY;KAExB,MAAM,kBADc,MAAM,qBAAqB,MAAM,OAAO,EACzB;AACnC,SAAI,CAAC,eACH,OAAM,IAAI,MAAM,gCAAgC;AAElD,YAAO;;IAET,UAAU,QAAQ;IACnB,CACF;;EAEJ,CAAC"}
1
+ {"version":3,"file":"makeAudioBufferTask.js","names":["currentState: AudioBufferState","currentConfig: AudioBufferConfig","mediaEngine"],"sources":["../../../../src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts"],"sourcesContent":["import { Task } from \"@lit/task\";\n\nimport { EF_INTERACTIVE } from \"../../../EF_INTERACTIVE\";\nimport { EF_RENDERING } from \"../../../EF_RENDERING\";\nimport type { AudioRendition } from \"../../../transcoding/types\";\nimport type { EFMedia } from \"../../EFMedia\";\nimport {\n type MediaBufferConfig,\n type MediaBufferState,\n manageMediaBuffer,\n} from \"../shared/BufferUtils\";\nimport { getLatestMediaEngine } from \"../tasks/makeMediaEngineTask\";\n\n/**\n * Configuration for audio buffering - extends the generic interface\n */\nexport interface AudioBufferConfig extends MediaBufferConfig {}\n\n/**\n * State of the audio buffer - uses the generic interface\n */\nexport interface AudioBufferState extends MediaBufferState {}\n\ntype AudioBufferTask = Task<readonly [number], AudioBufferState>;\nexport const makeAudioBufferTask = (host: EFMedia): AudioBufferTask => {\n let currentState: AudioBufferState = {\n currentSeekTimeMs: 0,\n requestedSegments: new Set(),\n activeRequests: new Set(),\n requestQueue: [],\n };\n\n return new Task(host, {\n autoRun: EF_INTERACTIVE, // Make lazy - only run when element becomes timeline-active\n args: () => [host.desiredSeekTimeMs] as const,\n onError: (error) => {\n console.error(\"audioBufferTask error\", error);\n },\n onComplete: (value) => {\n currentState = value;\n },\n task: async ([seekTimeMs], { signal }) => {\n // Skip buffering entirely in rendering mode\n if (EF_RENDERING()) {\n return currentState; // Return existing state without any buffering activity\n }\n\n // Get media engine to potentially override buffer configuration\n const mediaEngine = await getLatestMediaEngine(host, signal);\n\n // Return existing state if no audio rendition available\n if (!mediaEngine.audioRendition) {\n return currentState;\n }\n\n // Use media engine's buffer config, falling back to host properties\n const engineConfig = mediaEngine.getBufferConfig();\n const bufferDurationMs = engineConfig.audioBufferDurationMs;\n const maxParallelFetches = engineConfig.maxAudioBufferFetches;\n\n const currentConfig: AudioBufferConfig = {\n bufferDurationMs,\n maxParallelFetches,\n enableBuffering: host.enableAudioBuffering,\n bufferThresholdMs: engineConfig.bufferThresholdMs,\n };\n\n // Timeline context for priority-based buffering\n const timelineContext =\n host.rootTimegroup?.currentTimeMs !== undefined\n ? {\n elementStartMs: host.startTimeMs,\n elementEndMs: host.endTimeMs,\n playheadMs: host.rootTimegroup.currentTimeMs,\n }\n : undefined;\n\n return manageMediaBuffer<AudioRendition>(\n seekTimeMs,\n currentConfig,\n currentState,\n (host as any).intrinsicDurationMs || 10000,\n signal,\n {\n computeSegmentId: async (timeMs, rendition) => {\n // Use media engine's computeSegmentId\n const mediaEngine = await getLatestMediaEngine(host, signal);\n return mediaEngine.computeSegmentId(timeMs, rendition);\n },\n prefetchSegment: async (segmentId, rendition) => {\n // Trigger prefetch through BaseMediaEngine - let it handle caching\n const mediaEngine = await getLatestMediaEngine(host, signal);\n await mediaEngine.fetchMediaSegment(segmentId, rendition);\n // Don't return data - just ensure it's cached in BaseMediaEngine\n },\n isSegmentCached: (segmentId, rendition) => {\n // Check if segment is already cached in BaseMediaEngine\n const mediaEngine = host.mediaEngineTask.value;\n if (!mediaEngine) return false;\n\n return mediaEngine.isSegmentCached(segmentId, rendition);\n },\n getRendition: async () => {\n const mediaEngine = await getLatestMediaEngine(host, signal);\n const audioRendition = mediaEngine.audioRendition;\n if (!audioRendition) {\n throw new Error(\"Audio rendition not available\");\n }\n return audioRendition;\n },\n logError: console.error,\n },\n timelineContext,\n );\n },\n });\n};\n"],"mappings":";;;;;;;AAwBA,MAAa,uBAAuB,SAAmC;CACrE,IAAIA,eAAiC;EACnC,mBAAmB;EACnB,mCAAmB,IAAI,KAAK;EAC5B,gCAAgB,IAAI,KAAK;EACzB,cAAc,EAAE;EACjB;AAED,QAAO,IAAI,KAAK,MAAM;EACpB,SAAS;EACT,YAAY,CAAC,KAAK,kBAAkB;EACpC,UAAU,UAAU;AAClB,WAAQ,MAAM,yBAAyB,MAAM;;EAE/C,aAAa,UAAU;AACrB,kBAAe;;EAEjB,MAAM,OAAO,CAAC,aAAa,EAAE,aAAa;AAExC,OAAI,cAAc,CAChB,QAAO;GAIT,MAAM,cAAc,MAAM,qBAAqB,MAAM,OAAO;AAG5D,OAAI,CAAC,YAAY,eACf,QAAO;GAIT,MAAM,eAAe,YAAY,iBAAiB;GAIlD,MAAMC,gBAAmC;IACvC,kBAJuB,aAAa;IAKpC,oBAJyB,aAAa;IAKtC,iBAAiB,KAAK;IACtB,mBAAmB,aAAa;IACjC;GAGD,MAAM,kBACJ,KAAK,eAAe,kBAAkB,SAClC;IACE,gBAAgB,KAAK;IACrB,cAAc,KAAK;IACnB,YAAY,KAAK,cAAc;IAChC,GACD;AAEN,UAAO,kBACL,YACA,eACA,cACC,KAAa,uBAAuB,KACrC,QACA;IACE,kBAAkB,OAAO,QAAQ,cAAc;AAG7C,aADoB,MAAM,qBAAqB,MAAM,OAAO,EACzC,iBAAiB,QAAQ,UAAU;;IAExD,iBAAiB,OAAO,WAAW,cAAc;AAG/C,YADoB,MAAM,qBAAqB,MAAM,OAAO,EAC1C,kBAAkB,WAAW,UAAU;;IAG3D,kBAAkB,WAAW,cAAc;KAEzC,MAAMC,gBAAc,KAAK,gBAAgB;AACzC,SAAI,CAACA,cAAa,QAAO;AAEzB,YAAOA,cAAY,gBAAgB,WAAW,UAAU;;IAE1D,cAAc,YAAY;KAExB,MAAM,kBADc,MAAM,qBAAqB,MAAM,OAAO,EACzB;AACnC,SAAI,CAAC,eACH,OAAM,IAAI,MAAM,gCAAgC;AAElD,YAAO;;IAET,UAAU,QAAQ;IACnB,EACD,gBACD;;EAEJ,CAAC"}
@@ -24,11 +24,26 @@ const computeBufferQueue = (desiredSegments, requestedSegments) => {
24
24
  return desiredSegments.filter((segmentId) => !requestedSegments.has(segmentId));
25
25
  };
26
26
  /**
27
+ * Calculate distance from element to playhead position
28
+ * Returns 0 if element is currently active, otherwise returns distance in milliseconds
29
+ */
30
+ const calculatePlayheadDistance = (element, playheadMs) => {
31
+ if (playheadMs < element.startTimeMs) return element.startTimeMs - playheadMs;
32
+ if (playheadMs > element.endTimeMs) return playheadMs - element.endTimeMs;
33
+ return 0;
34
+ };
35
+ /**
27
36
  * Core media buffering orchestration logic - prefetch only, no data storage
28
37
  * Integrates with BaseMediaEngine's existing caching and request deduplication
29
38
  */
30
- const manageMediaBuffer = async (seekTimeMs, config, currentState, durationMs, signal, deps) => {
39
+ const manageMediaBuffer = async (seekTimeMs, config, currentState, durationMs, signal, deps, timelineContext) => {
31
40
  if (!config.enableBuffering) return currentState;
41
+ if (timelineContext && config.bufferThresholdMs !== void 0) {
42
+ if (calculatePlayheadDistance({
43
+ startTimeMs: timelineContext.elementStartMs,
44
+ endTimeMs: timelineContext.elementEndMs
45
+ }, timelineContext.playheadMs) > config.bufferThresholdMs) return currentState;
46
+ }
32
47
  const rendition = await deps.getRendition();
33
48
  if (!rendition) return currentState;
34
49
  const newQueue = computeBufferQueue((await computeSegmentRangeAsync(seekTimeMs, seekTimeMs + config.bufferDurationMs, durationMs, rendition, deps.computeSegmentId)).filter((segmentId) => !deps.isSegmentCached(segmentId, rendition)), currentState.requestedSegments);
@@ -1 +1 @@
1
- {"version":3,"file":"BufferUtils.js","names":["segments: number[]"],"sources":["../../../../src/elements/EFMedia/shared/BufferUtils.ts"],"sourcesContent":["import type {\n AudioRendition,\n VideoRendition,\n} from \"../../../transcoding/types\";\n\n/**\n * State interface for media buffering - orchestration only, no data storage\n */\nexport interface MediaBufferState {\n currentSeekTimeMs: number;\n requestedSegments: Set<number>; // Segments we've requested for buffering\n activeRequests: Set<number>; // Segments currently being fetched\n requestQueue: number[]; // Segments queued to be requested\n}\n\n/**\n * Configuration interface for media buffering - generic for both audio and video\n */\nexport interface MediaBufferConfig {\n bufferDurationMs: number;\n maxParallelFetches: number;\n enableBuffering: boolean;\n enableContinuousBuffering?: boolean;\n}\n\n/**\n * Dependencies interface for media buffering - integrates with BaseMediaEngine\n */\nexport interface MediaBufferDependencies<\n T extends AudioRendition | VideoRendition,\n> {\n computeSegmentId: (\n timeMs: number,\n rendition: T,\n ) => Promise<number | undefined>;\n prefetchSegment: (segmentId: number, rendition: T) => Promise<void>; // Just trigger prefetch, don't return data\n isSegmentCached: (segmentId: number, rendition: T) => boolean; // Check BaseMediaEngine cache\n getRendition: () => Promise<T | undefined>;\n logError: (message: string, error: any) => void;\n}\n\n/**\n * Compute segment range for a time window\n * Pure function - determines which segments are needed for a time range\n */\nexport const computeSegmentRange = <T extends AudioRendition | VideoRendition>(\n startTimeMs: number,\n endTimeMs: number,\n rendition: T,\n computeSegmentId: (timeMs: number, rendition: T) => number | undefined,\n): number[] => {\n const segments: number[] = [];\n const segmentDurationMs = (rendition as any).segmentDurationMs || 1000;\n\n // Calculate segment indices that overlap with [startTimeMs, endTimeMs]\n const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);\n const endSegmentIndex = Math.floor(endTimeMs / segmentDurationMs);\n\n for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {\n const segmentId = computeSegmentId(i * segmentDurationMs, rendition);\n if (segmentId !== undefined) {\n segments.push(segmentId);\n }\n }\n\n return segments.filter((id, index, arr) => arr.indexOf(id) === index); // Remove duplicates\n};\n\n/**\n * Async version of computeSegmentRange for when computeSegmentId is async\n */\nexport const computeSegmentRangeAsync = async <\n T extends AudioRendition | VideoRendition,\n>(\n startTimeMs: number,\n endTimeMs: number,\n durationMs: number,\n rendition: T,\n computeSegmentId: (\n timeMs: number,\n rendition: T,\n ) => Promise<number | undefined>,\n): Promise<number[]> => {\n const segments: number[] = [];\n const segmentDurationMs = (rendition as any).segmentDurationMs || 1000;\n\n // Calculate segment indices that overlap with [startTimeMs, endTimeMs]\n const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);\n const endSegmentIndex = Math.floor(\n Math.min(endTimeMs, durationMs) / segmentDurationMs,\n );\n\n for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {\n const timeMs = i * segmentDurationMs;\n if (timeMs < durationMs) {\n const segmentId = await computeSegmentId(timeMs, rendition);\n if (segmentId !== undefined) {\n segments.push(segmentId);\n }\n }\n }\n\n return segments.filter((id, index, arr) => arr.indexOf(id) === index); // Remove duplicates\n};\n\n/**\n * Compute buffer queue based on desired segments and what we've already requested\n * Pure function - determines what new segments should be prefetched\n */\nexport const computeBufferQueue = (\n desiredSegments: number[],\n requestedSegments: Set<number>,\n): number[] => {\n return desiredSegments.filter(\n (segmentId) => !requestedSegments.has(segmentId),\n );\n};\n\n/**\n * Handle seek time change and recompute buffer queue\n * Pure function - computes new queue when seek time changes\n */\nexport const handleSeekTimeChange = <T extends AudioRendition | VideoRendition>(\n newSeekTimeMs: number,\n bufferDurationMs: number,\n rendition: T,\n currentState: MediaBufferState,\n computeSegmentId: (timeMs: number, rendition: T) => number | undefined,\n): { newQueue: number[]; overlappingRequests: number[] } => {\n const endTimeMs = newSeekTimeMs + bufferDurationMs;\n const desiredSegments = computeSegmentRange(\n newSeekTimeMs,\n endTimeMs,\n rendition,\n computeSegmentId,\n );\n\n // Find segments that are already being requested\n const overlappingRequests = desiredSegments.filter((segmentId) =>\n currentState.requestedSegments.has(segmentId),\n );\n\n const newQueue = computeBufferQueue(\n desiredSegments,\n currentState.requestedSegments,\n );\n\n return { newQueue, overlappingRequests };\n};\n\n/**\n * Check if a segment has been requested for buffering\n * Pure function for checking buffer orchestration state\n */\nexport const isSegmentRequested = (\n segmentId: number,\n bufferState: MediaBufferState | undefined,\n): boolean => {\n return bufferState?.requestedSegments.has(segmentId) ?? false;\n};\n\n/**\n * Get requested segments from a list of segment IDs\n * Pure function that returns which segments have been requested for buffering\n */\nexport const getRequestedSegments = (\n segmentIds: number[],\n bufferState: MediaBufferState | undefined,\n): Set<number> => {\n if (!bufferState) {\n return new Set();\n }\n return new Set(\n segmentIds.filter((id) => bufferState.requestedSegments.has(id)),\n );\n};\n\n/**\n * Get unrequested segments from a list of segment IDs\n * Pure function that returns which segments haven't been requested yet\n */\nexport const getUnrequestedSegments = (\n segmentIds: number[],\n bufferState: MediaBufferState | undefined,\n): number[] => {\n if (!bufferState) {\n return segmentIds;\n }\n return segmentIds.filter((id) => !bufferState.requestedSegments.has(id));\n};\n\n/**\n * Core media buffering orchestration logic - prefetch only, no data storage\n * Integrates with BaseMediaEngine's existing caching and request deduplication\n */\nexport const manageMediaBuffer = async <\n T extends AudioRendition | VideoRendition,\n>(\n seekTimeMs: number,\n config: MediaBufferConfig,\n currentState: MediaBufferState,\n durationMs: number,\n signal: AbortSignal,\n deps: MediaBufferDependencies<T>,\n): Promise<MediaBufferState> => {\n if (!config.enableBuffering) {\n return currentState;\n }\n\n const rendition = await deps.getRendition();\n if (!rendition) {\n // Cannot buffer without a rendition\n return currentState;\n }\n const endTimeMs = seekTimeMs + config.bufferDurationMs;\n\n const desiredSegments = await computeSegmentRangeAsync(\n seekTimeMs,\n endTimeMs,\n durationMs,\n rendition,\n deps.computeSegmentId,\n );\n // Filter out segments already cached by BaseMediaEngine\n const uncachedSegments = desiredSegments.filter(\n (segmentId) => !deps.isSegmentCached(segmentId, rendition),\n );\n\n const newQueue = computeBufferQueue(\n uncachedSegments,\n currentState.requestedSegments,\n );\n\n // Shared state for concurrency control - prevents race conditions\n const newRequestedSegments = new Set(currentState.requestedSegments);\n const newActiveRequests = new Set(currentState.activeRequests);\n const remainingQueue = [...newQueue];\n\n // Thread-safe function to start next segment when slot becomes available\n const startNextSegment = (): void => {\n // Check if we have capacity and segments to fetch\n if (\n newActiveRequests.size >= config.maxParallelFetches ||\n remainingQueue.length === 0 ||\n signal.aborted\n ) {\n return;\n }\n\n const nextSegmentId = remainingQueue.shift();\n if (nextSegmentId === undefined) return;\n\n // Skip if already requested or now cached\n if (\n newRequestedSegments.has(nextSegmentId) ||\n deps.isSegmentCached(nextSegmentId, rendition)\n ) {\n startNextSegment(); // Try next segment immediately\n return;\n }\n\n newRequestedSegments.add(nextSegmentId);\n newActiveRequests.add(nextSegmentId);\n\n // Start the prefetch request\n deps\n .prefetchSegment(nextSegmentId, rendition)\n .then(() => {\n if (signal.aborted) return;\n newActiveRequests.delete(nextSegmentId);\n // Start next segment if continuous buffering is enabled\n if (config.enableContinuousBuffering ?? true) {\n startNextSegment();\n }\n })\n .catch((error) => {\n if (signal.aborted) return;\n newActiveRequests.delete(nextSegmentId);\n deps.logError(`Failed to prefetch segment ${nextSegmentId}`, error);\n // Continue even after error if continuous buffering is enabled\n if (config.enableContinuousBuffering ?? true) {\n startNextSegment();\n }\n });\n };\n\n // Start initial batch of requests up to maxParallelFetches limit\n const initialBatchSize = Math.min(config.maxParallelFetches, newQueue.length);\n for (let i = 0; i < initialBatchSize; i++) {\n startNextSegment();\n }\n\n const result = {\n currentSeekTimeMs: seekTimeMs,\n requestedSegments: newRequestedSegments,\n activeRequests: newActiveRequests,\n requestQueue: remainingQueue, // What's left in the queue\n };\n return result;\n};\n"],"mappings":";;;;AAuEA,MAAa,2BAA2B,OAGtC,aACA,WACA,YACA,WACA,qBAIsB;CACtB,MAAMA,WAAqB,EAAE;CAC7B,MAAM,oBAAqB,UAAkB,qBAAqB;CAGlE,MAAM,oBAAoB,KAAK,MAAM,cAAc,kBAAkB;CACrE,MAAM,kBAAkB,KAAK,MAC3B,KAAK,IAAI,WAAW,WAAW,GAAG,kBACnC;AAED,MAAK,IAAI,IAAI,mBAAmB,KAAK,iBAAiB,KAAK;EACzD,MAAM,SAAS,IAAI;AACnB,MAAI,SAAS,YAAY;GACvB,MAAM,YAAY,MAAM,iBAAiB,QAAQ,UAAU;AAC3D,OAAI,cAAc,OAChB,UAAS,KAAK,UAAU;;;AAK9B,QAAO,SAAS,QAAQ,IAAI,OAAO,QAAQ,IAAI,QAAQ,GAAG,KAAK,MAAM;;;;;;AAOvE,MAAa,sBACX,iBACA,sBACa;AACb,QAAO,gBAAgB,QACpB,cAAc,CAAC,kBAAkB,IAAI,UAAU,CACjD;;;;;;AAgFH,MAAa,oBAAoB,OAG/B,YACA,QACA,cACA,YACA,QACA,SAC8B;AAC9B,KAAI,CAAC,OAAO,gBACV,QAAO;CAGT,MAAM,YAAY,MAAM,KAAK,cAAc;AAC3C,KAAI,CAAC,UAEH,QAAO;CAgBT,MAAM,WAAW,oBAZO,MAAM,yBAC5B,YAHgB,aAAa,OAAO,kBAKpC,YACA,WACA,KAAK,iBACN,EAEwC,QACtC,cAAc,CAAC,KAAK,gBAAgB,WAAW,UAAU,CAC3D,EAIC,aAAa,kBACd;CAGD,MAAM,uBAAuB,IAAI,IAAI,aAAa,kBAAkB;CACpE,MAAM,oBAAoB,IAAI,IAAI,aAAa,eAAe;CAC9D,MAAM,iBAAiB,CAAC,GAAG,SAAS;CAGpC,MAAM,yBAA+B;AAEnC,MACE,kBAAkB,QAAQ,OAAO,sBACjC,eAAe,WAAW,KAC1B,OAAO,QAEP;EAGF,MAAM,gBAAgB,eAAe,OAAO;AAC5C,MAAI,kBAAkB,OAAW;AAGjC,MACE,qBAAqB,IAAI,cAAc,IACvC,KAAK,gBAAgB,eAAe,UAAU,EAC9C;AACA,qBAAkB;AAClB;;AAGF,uBAAqB,IAAI,cAAc;AACvC,oBAAkB,IAAI,cAAc;AAGpC,OACG,gBAAgB,eAAe,UAAU,CACzC,WAAW;AACV,OAAI,OAAO,QAAS;AACpB,qBAAkB,OAAO,cAAc;AAEvC,OAAI,OAAO,6BAA6B,KACtC,mBAAkB;IAEpB,CACD,OAAO,UAAU;AAChB,OAAI,OAAO,QAAS;AACpB,qBAAkB,OAAO,cAAc;AACvC,QAAK,SAAS,8BAA8B,iBAAiB,MAAM;AAEnE,OAAI,OAAO,6BAA6B,KACtC,mBAAkB;IAEpB;;CAIN,MAAM,mBAAmB,KAAK,IAAI,OAAO,oBAAoB,SAAS,OAAO;AAC7E,MAAK,IAAI,IAAI,GAAG,IAAI,kBAAkB,IACpC,mBAAkB;AASpB,QANe;EACb,mBAAmB;EACnB,mBAAmB;EACnB,gBAAgB;EAChB,cAAc;EACf"}
1
+ {"version":3,"file":"BufferUtils.js","names":["segments: number[]"],"sources":["../../../../src/elements/EFMedia/shared/BufferUtils.ts"],"sourcesContent":["import type {\n AudioRendition,\n VideoRendition,\n} from \"../../../transcoding/types\";\n\n/**\n * State interface for media buffering - orchestration only, no data storage\n */\nexport interface MediaBufferState {\n currentSeekTimeMs: number;\n requestedSegments: Set<number>; // Segments we've requested for buffering\n activeRequests: Set<number>; // Segments currently being fetched\n requestQueue: number[]; // Segments queued to be requested\n}\n\n/**\n * Configuration interface for media buffering - generic for both audio and video\n */\nexport interface MediaBufferConfig {\n bufferDurationMs: number;\n maxParallelFetches: number;\n enableBuffering: boolean;\n enableContinuousBuffering?: boolean;\n bufferThresholdMs?: number; // Timeline-aware buffering threshold (default: 30000ms)\n}\n\n/**\n * Dependencies interface for media buffering - integrates with BaseMediaEngine\n */\nexport interface MediaBufferDependencies<\n T extends AudioRendition | VideoRendition,\n> {\n computeSegmentId: (\n timeMs: number,\n rendition: T,\n ) => Promise<number | undefined>;\n prefetchSegment: (segmentId: number, rendition: T) => Promise<void>; // Just trigger prefetch, don't return data\n isSegmentCached: (segmentId: number, rendition: T) => boolean; // Check BaseMediaEngine cache\n getRendition: () => Promise<T | undefined>;\n logError: (message: string, error: any) => void;\n}\n\n/**\n * Compute segment range for a time window\n * Pure function - determines which segments are needed for a time range\n */\nexport const computeSegmentRange = <T extends AudioRendition | VideoRendition>(\n startTimeMs: number,\n endTimeMs: number,\n rendition: T,\n computeSegmentId: (timeMs: number, rendition: T) => number | undefined,\n): number[] => {\n const segments: number[] = [];\n const segmentDurationMs = (rendition as any).segmentDurationMs || 1000;\n\n // Calculate segment indices that overlap with [startTimeMs, endTimeMs]\n const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);\n const endSegmentIndex = Math.floor(endTimeMs / segmentDurationMs);\n\n for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {\n const segmentId = computeSegmentId(i * segmentDurationMs, rendition);\n if (segmentId !== undefined) {\n segments.push(segmentId);\n }\n }\n\n return segments.filter((id, index, arr) => arr.indexOf(id) === index); // Remove duplicates\n};\n\n/**\n * Async version of computeSegmentRange for when computeSegmentId is async\n */\nexport const computeSegmentRangeAsync = async <\n T extends AudioRendition | VideoRendition,\n>(\n startTimeMs: number,\n endTimeMs: number,\n durationMs: number,\n rendition: T,\n computeSegmentId: (\n timeMs: number,\n rendition: T,\n ) => Promise<number | undefined>,\n): Promise<number[]> => {\n const segments: number[] = [];\n const segmentDurationMs = (rendition as any).segmentDurationMs || 1000;\n\n // Calculate segment indices that overlap with [startTimeMs, endTimeMs]\n const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);\n const endSegmentIndex = Math.floor(\n Math.min(endTimeMs, durationMs) / segmentDurationMs,\n );\n\n for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {\n const timeMs = i * segmentDurationMs;\n if (timeMs < durationMs) {\n const segmentId = await computeSegmentId(timeMs, rendition);\n if (segmentId !== undefined) {\n segments.push(segmentId);\n }\n }\n }\n\n return segments.filter((id, index, arr) => arr.indexOf(id) === index); // Remove duplicates\n};\n\n/**\n * Compute buffer queue based on desired segments and what we've already requested\n * Pure function - determines what new segments should be prefetched\n */\nexport const computeBufferQueue = (\n desiredSegments: number[],\n requestedSegments: Set<number>,\n): number[] => {\n return desiredSegments.filter(\n (segmentId) => !requestedSegments.has(segmentId),\n );\n};\n\n/**\n * Handle seek time change and recompute buffer queue\n * Pure function - computes new queue when seek time changes\n */\nexport const handleSeekTimeChange = <T extends AudioRendition | VideoRendition>(\n newSeekTimeMs: number,\n bufferDurationMs: number,\n rendition: T,\n currentState: MediaBufferState,\n computeSegmentId: (timeMs: number, rendition: T) => number | undefined,\n): { newQueue: number[]; overlappingRequests: number[] } => {\n const endTimeMs = newSeekTimeMs + bufferDurationMs;\n const desiredSegments = computeSegmentRange(\n newSeekTimeMs,\n endTimeMs,\n rendition,\n computeSegmentId,\n );\n\n // Find segments that are already being requested\n const overlappingRequests = desiredSegments.filter((segmentId) =>\n currentState.requestedSegments.has(segmentId),\n );\n\n const newQueue = computeBufferQueue(\n desiredSegments,\n currentState.requestedSegments,\n );\n\n return { newQueue, overlappingRequests };\n};\n\n/**\n * Check if a segment has been requested for buffering\n * Pure function for checking buffer orchestration state\n */\nexport const isSegmentRequested = (\n segmentId: number,\n bufferState: MediaBufferState | undefined,\n): boolean => {\n return bufferState?.requestedSegments.has(segmentId) ?? false;\n};\n\n/**\n * Get requested segments from a list of segment IDs\n * Pure function that returns which segments have been requested for buffering\n */\nexport const getRequestedSegments = (\n segmentIds: number[],\n bufferState: MediaBufferState | undefined,\n): Set<number> => {\n if (!bufferState) {\n return new Set();\n }\n return new Set(\n segmentIds.filter((id) => bufferState.requestedSegments.has(id)),\n );\n};\n\n/**\n * Get unrequested segments from a list of segment IDs\n * Pure function that returns which segments haven't been requested yet\n */\nexport const getUnrequestedSegments = (\n segmentIds: number[],\n bufferState: MediaBufferState | undefined,\n): number[] => {\n if (!bufferState) {\n return segmentIds;\n }\n return segmentIds.filter((id) => !bufferState.requestedSegments.has(id));\n};\n\n/**\n * Calculate distance from element to playhead position\n * Returns 0 if element is currently active, otherwise returns distance in milliseconds\n */\nexport const calculatePlayheadDistance = (\n element: { startTimeMs: number; endTimeMs: number },\n playheadMs: number,\n): number => {\n // Element hasn't started yet\n if (playheadMs < element.startTimeMs) {\n return element.startTimeMs - playheadMs;\n }\n // Element already finished\n if (playheadMs > element.endTimeMs) {\n return playheadMs - element.endTimeMs;\n }\n // Element is currently active\n return 0;\n};\n\n/**\n * Core media buffering orchestration logic - prefetch only, no data storage\n * Integrates with BaseMediaEngine's existing caching and request deduplication\n */\nexport const manageMediaBuffer = async <\n T extends AudioRendition | VideoRendition,\n>(\n seekTimeMs: number,\n config: MediaBufferConfig,\n currentState: MediaBufferState,\n durationMs: number,\n signal: AbortSignal,\n deps: MediaBufferDependencies<T>,\n timelineContext?: {\n elementStartMs: number;\n elementEndMs: number;\n playheadMs: number;\n },\n): Promise<MediaBufferState> => {\n if (!config.enableBuffering) {\n return currentState;\n }\n\n // Timeline-aware buffering: skip if element is too far from playhead\n if (timelineContext && config.bufferThresholdMs !== undefined) {\n const distance = calculatePlayheadDistance(\n {\n startTimeMs: timelineContext.elementStartMs,\n endTimeMs: timelineContext.elementEndMs,\n },\n timelineContext.playheadMs,\n );\n\n if (distance > config.bufferThresholdMs) {\n // Element is too far from playhead, skip buffering\n return currentState;\n }\n }\n\n const rendition = await deps.getRendition();\n if (!rendition) {\n // Cannot buffer without a rendition\n return currentState;\n }\n const endTimeMs = seekTimeMs + config.bufferDurationMs;\n\n const desiredSegments = await computeSegmentRangeAsync(\n seekTimeMs,\n endTimeMs,\n durationMs,\n rendition,\n deps.computeSegmentId,\n );\n // Filter out segments already cached by BaseMediaEngine\n const uncachedSegments = desiredSegments.filter(\n (segmentId) => !deps.isSegmentCached(segmentId, rendition),\n );\n\n const newQueue = computeBufferQueue(\n uncachedSegments,\n currentState.requestedSegments,\n );\n\n // Shared state for concurrency control - prevents race conditions\n const newRequestedSegments = new Set(currentState.requestedSegments);\n const newActiveRequests = new Set(currentState.activeRequests);\n const remainingQueue = [...newQueue];\n\n // Thread-safe function to start next segment when slot becomes available\n const startNextSegment = (): void => {\n // Check if we have capacity and segments to fetch\n if (\n newActiveRequests.size >= config.maxParallelFetches ||\n remainingQueue.length === 0 ||\n signal.aborted\n ) {\n return;\n }\n\n const nextSegmentId = remainingQueue.shift();\n if (nextSegmentId === undefined) return;\n\n // Skip if already requested or now cached\n if (\n newRequestedSegments.has(nextSegmentId) ||\n deps.isSegmentCached(nextSegmentId, rendition)\n ) {\n startNextSegment(); // Try next segment immediately\n return;\n }\n\n newRequestedSegments.add(nextSegmentId);\n newActiveRequests.add(nextSegmentId);\n\n // Start the prefetch request\n deps\n .prefetchSegment(nextSegmentId, rendition)\n .then(() => {\n if (signal.aborted) return;\n newActiveRequests.delete(nextSegmentId);\n // Start next segment if continuous buffering is enabled\n if (config.enableContinuousBuffering ?? true) {\n startNextSegment();\n }\n })\n .catch((error) => {\n if (signal.aborted) return;\n newActiveRequests.delete(nextSegmentId);\n deps.logError(`Failed to prefetch segment ${nextSegmentId}`, error);\n // Continue even after error if continuous buffering is enabled\n if (config.enableContinuousBuffering ?? true) {\n startNextSegment();\n }\n });\n };\n\n // Start initial batch of requests up to maxParallelFetches limit\n const initialBatchSize = Math.min(config.maxParallelFetches, newQueue.length);\n for (let i = 0; i < initialBatchSize; i++) {\n startNextSegment();\n }\n\n const result = {\n currentSeekTimeMs: seekTimeMs,\n requestedSegments: newRequestedSegments,\n activeRequests: newActiveRequests,\n requestQueue: remainingQueue, // What's left in the queue\n };\n return result;\n};\n"],"mappings":";;;;AAwEA,MAAa,2BAA2B,OAGtC,aACA,WACA,YACA,WACA,qBAIsB;CACtB,MAAMA,WAAqB,EAAE;CAC7B,MAAM,oBAAqB,UAAkB,qBAAqB;CAGlE,MAAM,oBAAoB,KAAK,MAAM,cAAc,kBAAkB;CACrE,MAAM,kBAAkB,KAAK,MAC3B,KAAK,IAAI,WAAW,WAAW,GAAG,kBACnC;AAED,MAAK,IAAI,IAAI,mBAAmB,KAAK,iBAAiB,KAAK;EACzD,MAAM,SAAS,IAAI;AACnB,MAAI,SAAS,YAAY;GACvB,MAAM,YAAY,MAAM,iBAAiB,QAAQ,UAAU;AAC3D,OAAI,cAAc,OAChB,UAAS,KAAK,UAAU;;;AAK9B,QAAO,SAAS,QAAQ,IAAI,OAAO,QAAQ,IAAI,QAAQ,GAAG,KAAK,MAAM;;;;;;AAOvE,MAAa,sBACX,iBACA,sBACa;AACb,QAAO,gBAAgB,QACpB,cAAc,CAAC,kBAAkB,IAAI,UAAU,CACjD;;;;;;AAgFH,MAAa,6BACX,SACA,eACW;AAEX,KAAI,aAAa,QAAQ,YACvB,QAAO,QAAQ,cAAc;AAG/B,KAAI,aAAa,QAAQ,UACvB,QAAO,aAAa,QAAQ;AAG9B,QAAO;;;;;;AAOT,MAAa,oBAAoB,OAG/B,YACA,QACA,cACA,YACA,QACA,MACA,oBAK8B;AAC9B,KAAI,CAAC,OAAO,gBACV,QAAO;AAIT,KAAI,mBAAmB,OAAO,sBAAsB,QASlD;MARiB,0BACf;GACE,aAAa,gBAAgB;GAC7B,WAAW,gBAAgB;GAC5B,EACD,gBAAgB,WACjB,GAEc,OAAO,kBAEpB,QAAO;;CAIX,MAAM,YAAY,MAAM,KAAK,cAAc;AAC3C,KAAI,CAAC,UAEH,QAAO;CAgBT,MAAM,WAAW,oBAZO,MAAM,yBAC5B,YAHgB,aAAa,OAAO,kBAKpC,YACA,WACA,KAAK,iBACN,EAEwC,QACtC,cAAc,CAAC,KAAK,gBAAgB,WAAW,UAAU,CAC3D,EAIC,aAAa,kBACd;CAGD,MAAM,uBAAuB,IAAI,IAAI,aAAa,kBAAkB;CACpE,MAAM,oBAAoB,IAAI,IAAI,aAAa,eAAe;CAC9D,MAAM,iBAAiB,CAAC,GAAG,SAAS;CAGpC,MAAM,yBAA+B;AAEnC,MACE,kBAAkB,QAAQ,OAAO,sBACjC,eAAe,WAAW,KAC1B,OAAO,QAEP;EAGF,MAAM,gBAAgB,eAAe,OAAO;AAC5C,MAAI,kBAAkB,OAAW;AAGjC,MACE,qBAAqB,IAAI,cAAc,IACvC,KAAK,gBAAgB,eAAe,UAAU,EAC9C;AACA,qBAAkB;AAClB;;AAGF,uBAAqB,IAAI,cAAc;AACvC,oBAAkB,IAAI,cAAc;AAGpC,OACG,gBAAgB,eAAe,UAAU,CACzC,WAAW;AACV,OAAI,OAAO,QAAS;AACpB,qBAAkB,OAAO,cAAc;AAEvC,OAAI,OAAO,6BAA6B,KACtC,mBAAkB;IAEpB,CACD,OAAO,UAAU;AAChB,OAAI,OAAO,QAAS;AACpB,qBAAkB,OAAO,cAAc;AACvC,QAAK,SAAS,8BAA8B,iBAAiB,MAAM;AAEnE,OAAI,OAAO,6BAA6B,KACtC,mBAAkB;IAEpB;;CAIN,MAAM,mBAAmB,KAAK,IAAI,OAAO,oBAAoB,SAAS,OAAO;AAC7E,MAAK,IAAI,IAAI,GAAG,IAAI,kBAAkB,IACpC,mBAAkB;AASpB,QANe;EACb,mBAAmB;EACnB,mBAAmB;EACnB,gBAAgB;EAChB,cAAc;EACf"}
@@ -24,11 +24,18 @@ const makeVideoBufferTask = (host) => {
24
24
  task: async ([seekTimeMs], { signal }) => {
25
25
  if (EF_RENDERING()) return currentState;
26
26
  const engineConfig = (await getLatestMediaEngine(host, signal)).getBufferConfig();
27
- return manageMediaBuffer(seekTimeMs, {
27
+ const currentConfig = {
28
28
  bufferDurationMs: engineConfig.videoBufferDurationMs,
29
29
  maxParallelFetches: engineConfig.maxVideoBufferFetches,
30
- enableBuffering: host.enableVideoBuffering
31
- }, currentState, host.intrinsicDurationMs || 1e4, signal, {
30
+ enableBuffering: host.enableVideoBuffering,
31
+ bufferThresholdMs: engineConfig.bufferThresholdMs
32
+ };
33
+ const timelineContext = host.rootTimegroup?.currentTimeMs !== void 0 ? {
34
+ elementStartMs: host.startTimeMs,
35
+ elementEndMs: host.endTimeMs,
36
+ playheadMs: host.rootTimegroup.currentTimeMs
37
+ } : void 0;
38
+ return manageMediaBuffer(seekTimeMs, currentConfig, currentState, host.intrinsicDurationMs || 1e4, signal, {
32
39
  computeSegmentId: async (timeMs, rendition) => {
33
40
  return (await getLatestMediaEngine(host, signal)).computeSegmentId(timeMs, rendition);
34
41
  },
@@ -44,7 +51,7 @@ const makeVideoBufferTask = (host) => {
44
51
  return (await getLatestMediaEngine(host, signal)).getVideoRendition();
45
52
  },
46
53
  logError: console.error
47
- });
54
+ }, timelineContext);
48
55
  }
49
56
  });
50
57
  };