@vindral/web-sdk 2.3.0 → 3.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -1,83 +1,5 @@
1
- declare type Type = "audio" | "video";
2
- interface BaseCodedSample {
3
- channelId: string;
4
- isSync: boolean;
5
- renditionId: number;
6
- timestamp: number;
7
- timescale: number;
8
- duration: number;
9
- data: ArrayBuffer;
10
- compositionTimeOffset?: number;
11
- }
12
- interface CodedAudioSample extends BaseCodedSample {
13
- type: "audio";
14
- codec: AudioCodec;
15
- channels: number;
16
- sampleRate: number;
17
- language?: string;
18
- }
19
- interface CodedVideoSample extends BaseCodedSample {
20
- type: "video";
21
- codec: VideoCodec;
22
- width: number;
23
- height: number;
24
- buffer?: ArrayBuffer;
25
- }
26
- declare type CodedSample = CodedAudioSample | CodedVideoSample;
27
1
  declare type AudioCodec = "aac" | "opus" | "mp3";
28
2
  declare type VideoCodec = "h264" | "av1";
29
- declare type Codec = VideoCodec | AudioCodec;
30
- interface DecodedSampleStatistics {
31
- decodeTime: number;
32
- transportTimeFromWorker: number;
33
- transportTimeToWorker: number;
34
- samplesInBatch: number;
35
- }
36
- interface DecodedVideoSample {
37
- type: "video";
38
- colorPrimaries: "bt709";
39
- channelId: string;
40
- isSync: boolean;
41
- renditionId: number;
42
- timestamp: number;
43
- timescale: number;
44
- duration: number;
45
- width: number;
46
- height: number;
47
- codedWidth: number;
48
- codedHeight: number;
49
- format: "yuv420";
50
- data: [
51
- Uint8Array,
52
- Uint8Array,
53
- Uint8Array
54
- ];
55
- buffer: ArrayBuffer;
56
- statistics?: DecodedSampleStatistics;
57
- }
58
- interface DecodedAudioSample {
59
- type: "audio";
60
- channels: number;
61
- channelId: string;
62
- isSync: boolean;
63
- renditionId: number;
64
- timestamp: number;
65
- timescale: number;
66
- duration: number;
67
- sampleRate: number;
68
- format: "f32";
69
- data: Float32Array;
70
- statistics?: DecodedSampleStatistics;
71
- }
72
- declare type DecodedSample = DecodedVideoSample | DecodedAudioSample;
73
- interface InitSegment {
74
- type: Type;
75
- codec: Codec;
76
- channelId: string;
77
- renditionId: number;
78
- timescale: number;
79
- data: ArrayBuffer;
80
- }
81
3
  declare type MatchingKeys<TRecord, TMatch, K extends keyof TRecord = keyof TRecord> = K extends (TRecord[K] extends TMatch ? K : never) ? K : never;
82
4
  declare type VoidKeys<Record> = MatchingKeys<Record, void>;
83
5
  declare type EventListenerReturnType = (() => void) | void;
@@ -87,63 +9,315 @@ declare class Emitter<TEvents, TEmits = TEvents, ArgLessEvents extends VoidKeys<
87
9
  emit<T extends ArgEmits>(eventName: T, args: TEmits[T]): void;
88
10
  off<T extends ArgLessEvents>(eventName: T, fn: () => EventListenerReturnType): void;
89
11
  off<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => EventListenerReturnType): void;
12
+ /**
13
+ * Add an event listener to `eventName`
14
+ *
15
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
16
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
17
+ */
90
18
  on<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
91
19
  on<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
20
+ /**
21
+ * Add an event listener to `eventName` that will be called once only
22
+ *
23
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
24
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
25
+ */
92
26
  once<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
93
27
  once<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
94
28
  reset(): void;
95
29
  private add;
96
30
  }
97
- declare type FilterFunc<T> = (item: T) => boolean;
98
- declare class Fifo<T> {
99
- readonly maxSize: number;
100
- private values;
101
- constructor(maxSize?: number);
102
- push: (value: T) => void;
103
- clear: () => void;
104
- pop: () => T | undefined;
105
- peekFirst: () => T | undefined;
106
- peekLast: () => T | undefined;
107
- isFull: () => boolean;
108
- isEmpty: () => boolean;
109
- items: () => ReadonlyArray<T>;
110
- filterPop: (filter: FilterFunc<T>) => void;
111
- }
112
- declare type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<T>;
113
31
  interface MinMaxAverage {
114
32
  average: number;
115
33
  max: number;
116
34
  min: number;
117
35
  }
118
- interface Regression {
119
- slope: number;
120
- intercept: number;
121
- r2: number;
122
- predict(x: number): number;
36
+ interface RenditionProps {
37
+ id: number;
38
+ bitRate: number;
39
+ codecString?: string;
40
+ language?: string;
41
+ meta?: Record<string, string>;
42
+ }
43
+ interface VideoRenditionProps {
44
+ codec: VideoCodec;
45
+ frameRate: [
46
+ number,
47
+ number
48
+ ];
49
+ width: number;
50
+ height: number;
51
+ }
52
+ interface AudioRenditionProps {
53
+ codec: AudioCodec;
54
+ channels: number;
55
+ sampleRate: number;
56
+ }
57
+ declare type VideoRendition = VideoRenditionProps & RenditionProps;
58
+ declare type AudioRendition = AudioRenditionProps & RenditionProps;
59
+ declare type Rendition = VideoRendition | AudioRendition;
60
+ interface Size {
61
+ width: number;
62
+ height: number;
123
63
  }
64
+ interface VideoConstraint {
65
+ width: number;
66
+ height: number;
67
+ bitRate: number;
68
+ codec?: VideoCodec;
69
+ codecString?: string;
70
+ }
71
+ /**
72
+ * Channel
73
+ */
74
+ export interface Channel {
75
+ /**
76
+ * Channel ID for the channel
77
+ */
78
+ channelId: string;
79
+ /**
80
+ * Display name
81
+ */
82
+ name: string;
83
+ /**
84
+ * Indicates whether there is an incoming source feed for the channel
85
+ */
86
+ isLive: boolean;
87
+ /**
88
+ * URLs to fetch thumbnail from
89
+ */
90
+ thumbnailUrls: string[];
91
+ }
92
+ interface ClientOverrides {
93
+ maxVideoBitRate?: number;
94
+ minBufferTime?: number;
95
+ maxBufferTime?: number;
96
+ burstEnabled?: boolean;
97
+ }
98
+ interface ChannelWithRenditionsAndOverrides extends Channel {
99
+ renditions: Rendition[];
100
+ overrides?: ClientOverrides;
101
+ }
102
+ interface ConnectOptions {
103
+ channelGroupId?: string;
104
+ channelId: string;
105
+ }
106
+ interface Telemetry {
107
+ url: string;
108
+ probability?: number;
109
+ includeErrors?: boolean;
110
+ includeEvents?: boolean;
111
+ includeStats?: boolean;
112
+ maxRetries?: number;
113
+ maxErrorReports?: number;
114
+ interval?: number;
115
+ }
116
+ export interface ConnectResponse {
117
+ logsUrl?: string;
118
+ statsUrl?: string;
119
+ telemetry?: Telemetry;
120
+ channels: ChannelWithRenditionsAndOverrides[];
121
+ edges: string[];
122
+ }
123
+ /**
124
+ * ApiClientOptions
125
+ */
126
+ export interface ApiClientOptions {
127
+ /**
128
+ * String representing the URL to the public CDN API.
129
+ */
130
+ publicEndpoint: string;
131
+ /**
132
+ * Function that should return a string containing a signed authentication token.
133
+ */
134
+ tokenFactory?: AuthorizationTokenFactory;
135
+ }
136
+ /**
137
+ * Represents what authorization that will be requested.
138
+ */
139
+ export interface AuthorizationContext {
140
+ /**
141
+ * The channelGroupId that might need authorization.
142
+ */
143
+ channelGroupId?: string;
144
+ /**
145
+ * The channelId that might need authorization.
146
+ */
147
+ channelId?: string;
148
+ }
149
+ /**
150
+ * AuthorizationTokenFactory
151
+ */
152
+ export declare type AuthorizationTokenFactory = (context: AuthorizationContext) => string | undefined;
153
+ /**
154
+ * Convenience class to call the public available endpoints of the Vindral Live CDN.
155
+ */
156
+ export declare class ApiClient {
157
+ private baseUrl;
158
+ private tokenFactory?;
159
+ constructor(options: ApiClientOptions);
160
+ /**
161
+ * Returns everything needed to setup the connection of Vindral instance.
162
+ */
163
+ connect(options: ConnectOptions): Promise<ConnectResponse>;
164
+ /**
165
+ * Fetches information regarding a single channel.
166
+ *
167
+ * @param channelId the channel to fetch
168
+ * @returns a [[Channel]] containing information about the requested channel.
169
+ */
170
+ getChannel(channelId: string): Promise<Channel>;
171
+ /**
172
+ * Fetches channels within a channel group
173
+ *
174
+ * Note: The returned list includes inactive channels - check isLive to filter out only active channels
175
+ *
176
+ * @param channelGroup the channel group to fetch channels from
177
+ * @returns an array of [[Channel]] that belong to the channel group
178
+ */
179
+ getChannels(channelGroupId: string): Promise<Channel[]>;
180
+ private getHeaders;
181
+ private getAuthToken;
182
+ private toChannels;
183
+ private toChannel;
184
+ }
185
+ /**
186
+ * Represents a timed metadata event
187
+ */
124
188
  export interface Metadata {
189
+ /**
190
+ * The raw string content as it was ingested (if using JSON, it needs to be parsed on your end)
191
+ */
125
192
  content: string;
193
+ /**
194
+ * Timestamp in ms
195
+ */
126
196
  timestamp: number;
127
197
  }
198
+ /**
199
+ * Available events to listen to
200
+ */
201
+ export interface CastSenderEvents {
202
+ /**
203
+ * When a connection has been established with a CastReceiver
204
+ */
205
+ ["connected"]: void;
206
+ /**
207
+ * When a previous session has been resumed
208
+ */
209
+ ["resumed"]: void;
210
+ /**
211
+ * When a CastReceiver has lost or stopped a connection
212
+ */
213
+ ["disconnected"]: void;
214
+ /**
215
+ * When a connection attempt was initiated unsuccessfully
216
+ */
217
+ ["failed"]: void;
218
+ /**
219
+ * When the remote connection emits a metadata event
220
+ */
221
+ ["metadata"]: Metadata;
222
+ /**
223
+ * When the remote connection receives a server wallclock time event
224
+ */
225
+ ["server wallclock time"]: number;
226
+ }
227
+ /**
228
+ * Used for initializing the CastSender
229
+ */
230
+ export interface CastConfig {
231
+ /**
232
+ * The [Vindral Options](./Options) to use for the Cast Receiver
233
+ */
234
+ options: Options;
235
+ /**
236
+ * URL to a background image.
237
+ * Example: "https://via.placeholder.com/256x144"
238
+ */
239
+ background?: string;
240
+ /**
241
+ * Override this if you have your own custom receiver
242
+ */
243
+ receiverApplicationId?: string;
244
+ }
245
+ /**
246
+ * CastSender handles initiation of and communication with the Google Cast Receiver
247
+ */
248
+ export declare class CastSender extends Emitter<CastSenderEvents> {
249
+ private state;
250
+ private config;
251
+ private unloaded;
252
+ constructor(config: CastConfig);
253
+ /**
254
+ * True if the instance is casting right now
255
+ */
256
+ get casting(): boolean;
257
+ /**
258
+ * The current volume
259
+ */
260
+ get volume(): number;
261
+ /**
262
+ * Set the current volume. Setting this to zero is equivalent to muting the video
263
+ */
264
+ set volume(volume: number);
265
+ /**
266
+ * The current language
267
+ */
268
+ get language(): string | undefined;
269
+ /**
270
+ * Set the current language
271
+ */
272
+ set language(language: string | undefined);
273
+ /**
274
+ * The current channelId
275
+ */
276
+ get channelId(): string;
277
+ /**
278
+ * Set the current channelId
279
+ */
280
+ set channelId(channelId: string);
281
+ /**
282
+ * Update authentication token on an already established and authenticated connection
283
+ */
284
+ updateAuthenticationToken: (token: string) => void;
285
+ /**
286
+ * Fully unloads the instance. This disconnects the current listener but lets the
287
+ * cast session continue on the receiving device
288
+ */
289
+ unload: () => void;
290
+ /**
291
+ * Initiates the CastSender.
292
+ * Will reject if Cast is not available on the device or the network.
293
+ */
294
+ init: () => Promise<void>;
295
+ /**
296
+ * Requests a session. It will open the native cast receiver chooser dialog
297
+ */
298
+ start: () => Promise<void>;
299
+ /**
300
+ * Stops a session. It will stop playback on device as well.
301
+ */
302
+ stop: () => void;
303
+ /**
304
+ * Returns a string representing the name of the Cast receiver device or undefined if no receiver exists
305
+ */
306
+ getReceiverName: () => string | undefined;
307
+ private onGCastApiAvailable;
308
+ private send;
309
+ private onMessage;
310
+ private onSessionStarted;
311
+ private onSessionStateChanged;
312
+ private getInstance;
313
+ private getSession;
314
+ private castLibrariesAdded;
315
+ private verifyCastLibraries;
316
+ }
128
317
  export interface TimeRange {
129
318
  start: number;
130
319
  end: number;
131
320
  }
132
- declare type Timestamp = number;
133
- export interface PerLevelStats {
134
- upgradesFromLevel: Timestamp[];
135
- downgradesFromLevel: Timestamp[];
136
- bufferingRanges: PartialBy<TimeRange, "end">[];
137
- activeRanges: PartialBy<TimeRange, "end">[];
138
- decodeRate: number;
139
- }
140
- export interface Metrics {
141
- levels: {
142
- [key: string]: PerLevelStats;
143
- };
144
- general: PerLevelStats;
145
- bufferFullness: number;
146
- }
147
321
  export declare enum Level {
148
322
  TRACE = "trace",
149
323
  DEBUG = "debug",
@@ -152,204 +326,28 @@ export declare enum Level {
152
326
  ERROR = "error",
153
327
  CRITICAL = "critical"
154
328
  }
155
- declare type Meta = Record<string, unknown>;
156
- interface Logger {
157
- getLogger(category: string): Logger;
158
- getFullCategory(): string[];
159
- category(): string;
160
- trace(message: string, meta?: Meta): void;
161
- debug(message: string, meta?: Meta): void;
162
- info(message: string, meta?: Meta): void;
163
- warn(message: string, meta?: Meta): void;
164
- error(message: string, meta?: Meta): void;
165
- critical(message: string, meta?: Meta): void;
166
- }
167
- interface LoggerOptions {
168
- category: string;
169
- onLog: (instance: LoggerInstance, level: Level, message: string, meta?: Meta) => void;
170
- parent?: Logger;
171
- }
172
- declare class LoggerInstance implements Logger {
173
- private options;
174
- constructor(options: LoggerOptions);
175
- getLogger: (category: string) => Logger;
176
- getFullCategory: () => string[];
177
- category: () => string;
178
- trace: (message: string, meta?: Meta | undefined) => void;
179
- debug: (message: string, meta?: Meta | undefined) => void;
180
- info: (message: string, meta?: Meta | undefined) => void;
181
- warn: (message: string, meta?: Meta | undefined) => void;
182
- error: (message: string, meta?: Meta | undefined) => void;
183
- critical: (message: string, meta?: Meta | undefined) => void;
184
- }
185
- export declare type PlayInitiator = "user input" | "programatically";
186
- export interface PlaybackSource {
187
- volume: number;
188
- muted: boolean;
189
- currentTime: number;
190
- playbackRate?: number;
191
- isActivated: boolean;
192
- readonly seekTime: number;
193
- readonly paused: boolean;
194
- readonly isSeeking: boolean;
195
- play(initiator: PlayInitiator): Promise<void>;
196
- load?: () => void;
197
- unload?: () => void;
198
- }
199
- interface MediaElementOptions {
200
- autoplay: boolean;
201
- muted: boolean;
202
- type: "audio" | "video";
203
- logger: Logger;
204
- poster?: string;
205
- }
206
329
  interface NeedsUserInputContext {
330
+ /**
331
+ * True if user input is needed for audio
332
+ */
207
333
  forAudio: boolean;
334
+ /**
335
+ * True if user input is needed for video
336
+ */
208
337
  forVideo: boolean;
209
338
  }
210
- interface MediaElementEvents {
211
- ["needs user input"]: NeedsUserInputContext;
212
- ["buffer state"]: Readonly<BufferState>;
213
- }
214
- declare class MediaElement extends Emitter<MediaElementEvents> {
215
- readonly element: HTMLMediaElement;
216
- private logger;
217
- private seekTimes;
218
- private seekStartTime?;
219
- private _userProvidedMuted;
220
- private timers;
221
- private _userHasProvidedInput;
222
- isActivated: boolean;
223
- constructor({ type, autoplay, muted, logger, poster }: MediaElementOptions);
224
- attach: (container: HTMLElement) => void;
225
- get seekTime(): number;
226
- get isSeeking(): boolean;
227
- get currentTime(): number;
228
- set currentTime(value: number);
229
- get playbackRate(): number;
230
- set playbackRate(rate: number);
231
- get volume(): number;
232
- set volume(volume: number);
233
- get muted(): boolean;
234
- set muted(muted: boolean);
235
- get userHasProvidedInput(): boolean;
236
- get paused(): boolean;
237
- load: () => void;
238
- unload: () => void;
239
- getPlaybackRate: () => number;
240
- getPlaybackState: () => PlaybackState;
241
- setPlaybackRate: (rate: number) => void;
242
- getBuffer: () => TimeRange[];
243
- play: (initiator: PlayInitiator) => Promise<void>;
244
- private _play;
245
- private onEvent;
246
- private onBufferStateChange;
247
- private onSeekStart;
248
- private onSeekEnd;
249
- }
250
- declare type PlaybackState = "buffering" | "playing";
339
+ declare type PlaybackState = "buffering" | "playing" | "paused";
251
340
  declare type BufferStateEvent = "filled" | "drained";
252
- declare type BufferState = {
253
- buffered: ReadonlyArray<TimeRange>;
254
- currentTime: number;
255
- playbackState: PlaybackState;
256
- };
257
- interface PlaybackModuleListeners {
258
- ["buffer state"]: Readonly<BufferState>;
259
- ["needs user input"]: NeedsUserInputContext;
260
- }
261
- interface PlaybackModuleEvents {
262
- ["buffer state event"]: Readonly<BufferStateEvent>;
263
- ["playback state"]: Readonly<PlaybackState>;
264
- ["buffer fullness"]: number;
265
- }
266
- interface ClockSource {
267
- readonly currentTime: number;
268
- }
269
341
  interface PlaybackModuleStatistics {
270
342
  bufferTime: number;
271
343
  needsInputForAudioCount: number;
272
344
  needsInputForVideoCount: number;
273
345
  }
274
- declare class PlaybackModule {
275
- private emitter;
276
- private logger;
277
- private clockSource;
278
- private state;
279
- private bufferFullness;
280
- private targetBufferTime;
281
- private lastBufferStateEvent;
282
- private firstFrameTime?;
283
- private needsInputForAudioCount;
284
- private needsInputForVideoCount;
285
- constructor(emitter: Emitter<PlaybackModuleListeners, PlaybackModuleEvents>, logger: Logger, clockSource: ClockSource, targetBufferTime: number);
286
- static create: (emitter: Emitter<PlaybackModuleListeners, PlaybackModuleEvents>, logger: Logger, clockSource: ClockSource, targetBufferTime: number) => PlaybackModule;
287
- unload: () => void;
288
- setTargetBufferTime: (bufferTime: number) => void;
289
- getTargetBufferTime: () => number;
290
- getBufferFullness: () => number;
291
- getLastBufferStateEvent: () => BufferStateEvent;
292
- getState: () => PlaybackState;
293
- getFirstFrameTime: () => number | undefined;
294
- getStatistics: () => PlaybackModuleStatistics;
295
- private onBufferedStateChanged;
296
- private onNeedsUserInput;
297
- }
298
- declare type Direction = "upgrade" | "downgrade" | "double downgrade" | "reconnect";
299
- interface QualityOfServiceConfig {
300
- cooldownTime: number;
301
- maxBufferingEvents: {
302
- last10Seconds: number;
303
- };
304
- maxBufferingRatio: {
305
- last10Seconds: number;
306
- };
307
- maxRecentDowngradesCount: number;
308
- maxDowngradeLookbackMs: number;
309
- minTimeSpentPlaying: {
310
- factor: number;
311
- ratio: number;
312
- };
313
- maxCooldownRatio: number;
314
- }
315
- interface AdaptivityEvents {
316
- ["adapt level"]: Readonly<Direction>;
317
- }
318
346
  interface AdaptivityStatistics {
319
347
  isAbrEnabled: boolean;
320
348
  }
321
- interface QualityOfServiceSource {
322
- getMetrics(): Metrics;
323
- getLevelStats(level: RenditionLevel): PerLevelStats | undefined;
324
- getBufferFullnessRegression(): Regression | undefined;
325
- timeSpentBuffering(): number;
326
- timeActive(): number;
327
- timeSpentBufferingLast(last: number): number;
328
- timeSpentActiveLast(last: number): number;
329
- timeSpentPlayingInAtLeastLevelRatio(level: RenditionLevel): number;
330
- }
331
- declare class AdaptivityModule {
332
- private qualityOfServiceSource;
333
- private config;
334
- private emitter;
335
- private logger;
336
- private isSuspended;
337
- private lastAdaptTime;
338
- isEnabled: boolean;
339
- constructor(emitter: Emitter<AdaptivityEvents>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, config: Partial<QualityOfServiceConfig>);
340
- static create: (emitter: Emitter<AdaptivityEvents>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, config?: Partial<QualityOfServiceConfig>) => AdaptivityModule;
341
- load: () => void;
342
- unload: () => void;
343
- suspend: () => void;
344
- unsuspend: () => void;
345
- reset: (extraGraceTimeMs?: number) => void;
346
- getStatistics: () => AdaptivityStatistics;
347
- isQoSOk: (renditionLevel: RenditionLevel) => boolean;
348
- private onBufferState;
349
- private onAdaptedLevel;
350
- private adaptLevel;
351
- private tooMuchTimeBuffering;
352
- private tooManyBufferingEvents;
349
+ interface BufferTimeStatistics {
350
+ bufferTimeAdjustmentCount: number;
353
351
  }
354
352
  interface VindralErrorProps {
355
353
  isFatal: boolean;
@@ -364,158 +362,38 @@ export declare const AUTHENTICATION_EXPIRED_CODE = "authentication_expired";
364
362
  export declare const CHANNEL_NOT_FOUND_CODE = "channel_not_found";
365
363
  export declare const NO_INCOMING_DATA = "no_incoming_data_error";
366
364
  export declare const INACTIVITY_CODE = "connection_inactivity";
367
- export declare const MISSING_INIT_SEGMENT = "missing_init_segment";
368
- export declare const NO_TRACK_CONTEXT = "no_track_context";
369
365
  export declare const DISCONNECTED_BY_EDGE = "disconnected_by_edge";
370
- export declare const DECODER_ERROR = "decoder_error";
371
- export declare const AUDIO_PLAYER_ERROR = "audio_player_error";
372
- export declare const MEDIA_SOURCE_ERROR = "media_source_error";
373
- export declare const WEBGL_CONTEXT_LOST_ERROR = "webgl_context_lost_error";
374
- export declare type ErrorType = "internal" | "external";
366
+ declare type ErrorType = "internal" | "external";
367
+ /**
368
+ * Represents a vindral error - all errors emitted from the Vindral instance inherit from this class.
369
+ */
375
370
  export declare class VindralError extends Error {
376
371
  private props;
377
372
  private extra;
378
373
  constructor(message: string, props: VindralErrorProps, extra?: {});
374
+ /**
375
+ * The error code is a stable string that represents the error type - this should be treated as an
376
+ * opaque string that can be used as a key for looking up localized strings for displaying error messages.
377
+ * @returns the error code
378
+ */
379
379
  code: () => string;
380
+ /**
381
+ * Indicates whether the error is fatal - if it is that means the Vindral instance will be unloaded because of this error.
382
+ */
380
383
  isFatal: () => boolean;
384
+ /**
385
+ * The underlying error that caused the Vindral error
386
+ * @returns the underlying error
387
+ */
381
388
  source: () => Error | MediaError | undefined;
382
389
  type: () => ErrorType;
390
+ /**
391
+ * @returns a stringifiable represenation of the error
392
+ */
383
393
  toStringifiable: () => Record<string, unknown>;
384
394
  }
385
- export declare const mediaElementError: (isFatal: boolean, source: MediaError) => VindralError;
386
- export declare const decoderError: (isFatal: boolean, source: Error) => VindralError;
387
- export declare const audioPlayerError: (isFatal: boolean, source: Error) => VindralError;
388
- export declare const missingInitSegmentError: (renditionId: number, channelId: string) => VindralError;
389
- export declare const mediaSourceError: (isFatal: boolean, source: Error) => VindralError;
390
- export declare const noTrackContextError: (type: "audio" | "video", renditionId: number, channelId: string) => VindralError;
391
- export declare const audioContextTimeoutError: () => VindralError;
392
- export declare const disconnectedFromEdgeServerError: (source?: VindralError | undefined) => VindralError;
393
- export declare const authenticationError: (source?: Error | undefined) => VindralError;
394
- export declare const authenticationExpiredError: (source?: Error | undefined) => VindralError;
395
- export declare const inactivityError: (type: ErrorType, source?: Error | undefined) => VindralError;
396
- export declare const channelNotFoundError: (type: ErrorType, source?: Error | undefined) => VindralError;
397
- export declare const noIncomingDataError: (source?: Error | undefined) => VindralError;
398
- export declare const unableToConnectError: (source?: Error | undefined) => VindralError;
399
- export declare const unableToConnectAfterRetriesError: () => VindralError;
400
- export declare const webglContextError: (source: Error) => VindralError;
401
- interface AudioPlayerModuleListeners {
402
- ["decoded frame"]: Readonly<DecodedSample>;
403
- }
404
- interface AudioPlayerModuleEvents {
405
- ["needs user input"]: NeedsUserInputContext;
406
- ["error"]: VindralError;
407
- }
408
- interface ClockSource {
409
- readonly currentTime: number;
410
- }
411
- declare class AudioPlayerModule {
412
- private logger;
413
- private emitter;
414
- private unmuter;
415
- private audio?;
416
- private gainNode?;
417
- private _volume;
418
- private _userProvidedMuted;
419
- private _muted;
420
- private startTime;
421
- private samples;
422
- private preInitSampleQueue;
423
- private sampleRate;
424
- private channels;
425
- private index;
426
- private clockSource;
427
- private clockDelta?;
428
- private startTimeIsInvalidated;
429
- private lastSampleTimestamp;
430
- isActivated: boolean;
431
- get volume(): number;
432
- set volume(volume: number);
433
- get seekTime(): number;
434
- get isSeeking(): boolean;
435
- get muted(): boolean;
436
- set muted(muted: boolean);
437
- get paused(): boolean;
438
- get currentTime(): number;
439
- set currentTime(currentTime: number);
440
- constructor(emitter: Emitter<AudioPlayerModuleListeners, AudioPlayerModuleEvents>, logger: Logger, clockSource: ClockSource, muted: boolean);
441
- unload: () => Promise<void>;
442
- suspend: () => void;
443
- unsuspend: () => void;
444
- static create: (emitter: Emitter<AudioPlayerModuleListeners, AudioPlayerModuleEvents>, logger: Logger, clockSource: ClockSource, muted: boolean) => AudioPlayerModule;
445
- private flush;
446
- private onDecodedFrame;
447
- play: () => Promise<void>;
448
- private resume;
449
- private getAudioContext;
450
- private setupContext;
451
- private setGain;
452
- }
453
- interface BufferTimeConfig {
454
- minBufferTime: number;
455
- maxBufferTime: number;
456
- cooldownTime: number;
457
- maxBufferingEvents: {
458
- last30Seconds: number;
459
- };
460
- maxBufferingRatio: {
461
- last30Seconds: number;
462
- };
463
- }
464
- interface BufferTimeListeners {
465
- ["buffer state"]: Readonly<BufferState>;
466
- }
467
- interface BufferTimeStatistics {
468
- bufferTimeAdjustmentCount: number;
469
- }
470
- interface TargetBufferTimeTarget {
471
- targetBufferTime: number;
472
- }
473
- interface QualityOfServiceSource {
474
- bufferingEventsLast(last: number): number;
475
- timeSpentBufferingLast(last: number): number;
476
- timeSpentActiveLast(last: number): number;
477
- }
478
- declare class BufferTimeModule {
479
- private static BUFFER_TIME_STEP_SIZE;
480
- private qualityOfServiceSource;
481
- private targetBufferTimeTarget;
482
- private config;
483
- private emitter;
484
- private logger;
485
- private isSuspended;
486
- private lastIncreaseTime;
487
- private bufferTimeAdjustmentCount;
488
- constructor(emitter: Emitter<BufferTimeListeners, unknown>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, targetBufferTimeTarget: TargetBufferTimeTarget, config: Partial<BufferTimeConfig>);
489
- static create: (emitter: Emitter<BufferTimeListeners, unknown>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, targetBufferTimeTarget: TargetBufferTimeTarget, config?: Partial<BufferTimeConfig>) => BufferTimeModule;
490
- updateConfig(config: Partial<BufferTimeConfig>): void;
491
- load: () => void;
492
- unload: () => void;
493
- suspend: () => void;
494
- unsuspend: () => void;
495
- reset: () => void;
496
- getStatistics: () => BufferTimeStatistics;
497
- private onBufferState;
498
- }
499
395
  declare type State = "connected" | "disconnected" | "connecting";
500
396
  declare type ContextSwitchState = "completed" | "started";
501
- interface ConnectionModuleListeners {
502
- ["send signal"]: Readonly<string>;
503
- ["disconnect"]: void;
504
- ["reconnect"]: string;
505
- }
506
- interface ConnectionModuleEvents {
507
- ["received signal"]: Readonly<Signal>;
508
- ["received data"]: Readonly<ArrayBuffer>;
509
- ["connection state"]: Readonly<State>;
510
- ["rtt"]: number;
511
- ["error"]: Readonly<VindralError>;
512
- ["context switch complete"]: Readonly<void>;
513
- ["context switch started"]: Readonly<void>;
514
- }
515
- interface ConnectOptions {
516
- connectHandler: () => Promise<string>;
517
- reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
518
- }
519
397
  interface ConnectionStatistics {
520
398
  rtt: MinMaxAverage;
521
399
  estimatedBandwidth: number;
@@ -523,55 +401,6 @@ interface ConnectionStatistics {
523
401
  connectCount: number;
524
402
  connectionAttemptCount: number;
525
403
  }
526
- declare class ConnectionModule {
527
- private static PING_INTERVAL;
528
- private static MAX_MISSED_PINGS;
529
- private static TLS_ROUNDTRIPS;
530
- private timers;
531
- private emitter;
532
- private transport?;
533
- private logger;
534
- private options;
535
- private rtts;
536
- private lastPingSentTime;
537
- private isPingInFlight;
538
- private connectCount;
539
- private _firstConnectionTime?;
540
- private _lastConnectionTime?;
541
- private missedPings;
542
- private contextSwitchesInProgress;
543
- private contextSwitchesCompleted;
544
- private buffer;
545
- private constructor();
546
- static create: (emitter: Emitter<ConnectionModuleListeners, ConnectionModuleEvents>, logger: Logger, options: ConnectOptions) => ConnectionModule;
547
- unload: () => void;
548
- suspend: () => void;
549
- unsuspend: () => void;
550
- get rtt(): number;
551
- get estimatedBandwidth(): number | undefined;
552
- get connectTime(): number | undefined;
553
- get firstConnectionTime(): number | undefined;
554
- get lastConnectionTime(): number | undefined;
555
- getState: () => State;
556
- sendSignal: (signal: Readonly<string>) => void;
557
- getStatistics: () => ConnectionStatistics;
558
- private onMessage;
559
- private handleMessage;
560
- connect: (connectionsCount: 1 | 2) => void;
561
- private onTransportChange;
562
- disconnect: (reason?: string) => void;
563
- reconnect: (reason: string) => void;
564
- private resetPingState;
565
- private pingCooldownExpired;
566
- private sendPing;
567
- }
568
- interface Size {
569
- width: number;
570
- height: number;
571
- }
572
- interface PictureInPictureSizeSource {
573
- getPictureInPictureSize(): Size | undefined;
574
- }
575
404
  export interface RenditionLevel {
576
405
  audio?: AudioRendition;
577
406
  video?: VideoRendition;
@@ -582,29 +411,10 @@ export interface RenditionLevelChanged {
582
411
  to?: RenditionLevel;
583
412
  reason: RenditionLevelChangedReason;
584
413
  }
585
- export interface SubscriptionChange {
586
- readonly to: Readonly<Subscription>;
587
- readonly from: Readonly<Subscription>;
588
- }
589
- interface RenditionsModuleEvents {
590
- ["rendition level changed"]: Readonly<RenditionLevelChanged>;
591
- }
592
- interface RenditionsModuleListeners {
593
- ["subscription changed"]: Readonly<SubscriptionChange>;
594
- ["renditions"]: Readonly<RenditionsArgs>;
595
- }
596
- interface RenditionsArgs {
597
- renditions: ReadonlyArray<Rendition>;
598
- channelId: string;
599
- }
600
414
  interface RenditionLevel {
601
415
  audio?: AudioRendition;
602
416
  video?: VideoRendition;
603
417
  }
604
- interface SubscriptionSource {
605
- getTargetSubscription(): Readonly<Subscription>;
606
- getCurrentSubscription(): Readonly<Subscription>;
607
- }
608
418
  interface RenditionsModuleStatistics {
609
419
  videoRenditionId?: number;
610
420
  audioRenditionId?: number;
@@ -621,42 +431,6 @@ interface RenditionsModuleStatistics {
621
431
  ];
622
432
  renditionLevelChangeCount: number;
623
433
  }
624
- declare type ChannelId = string;
625
- declare class RenditionsModule {
626
- private renditions;
627
- private renditionLevels;
628
- private languages;
629
- private emitter;
630
- private subscriptionSource;
631
- private renditionLevelChangeCount;
632
- constructor(emitter: Emitter<RenditionsModuleListeners, RenditionsModuleEvents>, subscriptionSource: SubscriptionSource);
633
- unload: () => void;
634
- static create: (emitter: Emitter<RenditionsModuleListeners, RenditionsModuleEvents>, subscriptionSource: SubscriptionSource) => RenditionsModule;
635
- getRenditionLevels: (subscription?: Readonly<Subscription> | undefined) => ReadonlyArray<RenditionLevel>;
636
- getRenditionLevel: (subscription?: Readonly<Subscription> | undefined) => RenditionLevel | undefined;
637
- setRenditions: (channelId: ChannelId, renditions: Rendition[]) => void;
638
- getLanguages: () => ReadonlyArray<string>;
639
- getVideoRendition: (renditionId: number, channelId?: string) => Readonly<VideoRendition> | undefined;
640
- getAudioRenditions: (channelId: ChannelId) => Readonly<AudioRendition[]> | undefined;
641
- getAudioRendition: (renditionId: number, channelId?: string) => Readonly<AudioRendition> | undefined;
642
- getRendition: (renditionId: number, channelId?: string) => Readonly<Rendition> | undefined;
643
- getStatistics: () => RenditionsModuleStatistics;
644
- private onRenditions;
645
- private onSubscriptionChanged;
646
- private updateRenditionLevels;
647
- private createRenditionLevels;
648
- private getCurrentSubscription;
649
- }
650
- declare type DeepPartial<T> = {
651
- [P in keyof T]?: DeepPartial<T[P]>;
652
- };
653
- interface ConstraintCapModuleEvents {
654
- ["constraint cap changed"]: Readonly<ConstraintCap>;
655
- }
656
- interface ConstraintCapModuleListeners {
657
- ["enter picture in picture"]: Readonly<PictureInPictureSizeSource>;
658
- ["exit picture in picture"]: void;
659
- }
660
434
  interface VideoConstraintCap {
661
435
  width: number;
662
436
  height: number;
@@ -677,120 +451,13 @@ interface ConstraintCapStatistics {
677
451
  elementHeight: number;
678
452
  pixelRatio: number;
679
453
  }
680
- interface RenditionLevelSource {
681
- getRenditionLevels(): ReadonlyArray<RenditionLevel>;
682
- }
683
- declare class ConstraintCapModule {
684
- private static CHECK_SIZE_INTERVAL;
685
- private emitter;
686
- private timers;
687
- private pictureInPictureSource?;
688
- private element;
689
- private currentCap?;
690
- private userSpecifiedCap?;
691
- private renditionLevelSource;
692
- private resizeObserver?;
693
- private elementSize;
694
- private _sizeBasedResolutionCapEnabled;
695
- constructor(emitter: Emitter<ConstraintCapModuleListeners, ConstraintCapModuleEvents>, element: HTMLElement, renditionLevelSource: RenditionLevelSource, sizeBasedResolutionCapEnabled: boolean);
696
- unload: () => void;
697
- load: () => void;
698
- static create: (emitter: Emitter<ConstraintCapModuleListeners, ConstraintCapModuleEvents>, element: HTMLElement, renditionLevelSource: RenditionLevelSource, sizeBasedResolutionCapEnabled: boolean) => ConstraintCapModule;
699
- unsuspend: () => void;
700
- suspend: () => void;
701
- get sizeBasedResolutionCapEnabled(): boolean;
702
- set sizeBasedResolutionCapEnabled(enabled: boolean);
703
- setUserSpecifiedCap: (cap: DeepPartial<ConstraintCap>) => void;
704
- getUserSpecifiedCap: () => ConstraintCap | undefined;
705
- getCurrentConstraintCap: () => ConstraintCap | undefined;
706
- getStatistics: () => ConstraintCapStatistics;
707
- constrainSubscription: (subscription: Subscription) => Subscription;
708
- private onResize;
709
- private evaluateConstraintCap;
710
- private enterPictureInPicture;
711
- private exitPictureInPicture;
712
- }
713
- declare class ArrayBufferPool {
714
- private maxSize;
715
- private size;
716
- private buffers;
717
- private _borrowedBuffers;
718
- constructor(maxSize: number);
719
- get bufferSize(): number;
720
- get poolSize(): number;
721
- get borrowedBuffers(): number;
722
- get: (size: number) => ArrayBuffer;
723
- reclaim: (buffer: ArrayBuffer) => void;
724
- }
725
- declare type DecoderParamType<Type extends "audio" | "video"> = Type extends "video" ? CodedVideoSample : CodedAudioSample;
726
- declare type DecoderReturnType<Type extends "audio" | "video"> = Type extends "video" ? DecodedVideoSample : DecodedAudioSample;
727
- interface DecoderContext<Type extends "audio" | "video"> {
728
- unload(): void;
729
- initSegment(initSegment: Readonly<InitSegment>): void;
730
- enqueue(sample: Readonly<DecoderParamType<Type>>): void;
731
- take(): DecoderReturnType<Type> | undefined;
732
- error?: () => Error | undefined;
733
- }
734
- interface DecoderModuleEvents {
735
- ["video decode rate"]: number;
736
- ["decoded frame"]: Readonly<DecodedSample>;
737
- ["buffer state"]: Readonly<BufferState>;
738
- ["error"]: Readonly<VindralError>;
739
- }
740
- interface DecoderModuleListeners {
741
- ["init segment"]: Readonly<InitSegment>;
742
- ["coded sample"]: Readonly<CodedSample>;
743
- }
744
454
  interface DecoderStatistics {
745
455
  videoDecodeRate: number;
746
456
  videoDecodeTime: MinMaxAverage;
747
457
  audioDecodeTime: MinMaxAverage;
748
458
  videoTransportTime: MinMaxAverage;
749
459
  }
750
- interface ClockSource {
751
- readonly currentTime: number;
752
- }
753
- interface TrackContext {
754
- buffer: TimeRange[];
755
- decoderContext: DecoderContext<Type>;
756
- decodeTime: Fifo<number>;
757
- transportTime: Fifo<number>;
758
- sampleDuration: number;
759
- }
760
- declare class DecoderModule {
761
- private static BUFFER_GAP_THRESHOLD;
762
- private static DECODED_FRAMES_CHECK_INTERVAL;
763
- private static DECODE_RATE_INTERVAL;
764
- private timers;
765
- private logger;
766
- private emitter;
767
- private trackContexts;
768
- private clockSource;
769
- private isSuspended;
770
- constructor(emitter: Emitter<DecoderModuleListeners, DecoderModuleEvents>, logger: Logger, trackContexts: Map<Type, TrackContext>, clockSource: ClockSource);
771
- load: () => void;
772
- unload: () => void;
773
- suspend: () => void;
774
- unsuspend: () => void;
775
- static create: (emitter: Emitter<DecoderModuleListeners, DecoderModuleEvents>, logger: Logger, tracks: Track[], clockSource: ClockSource, pool: ArrayBufferPool) => Promise<DecoderModule>;
776
- getBuffer: (type: Type) => TimeRange[] | undefined;
777
- videoDecodeRate: () => number;
778
- getStatistics: () => DecoderStatistics;
779
- private onInitSegment;
780
- private decode;
781
- private emitDecodedFrames;
782
- private emitDecodeRate;
783
- private updateBufferState;
784
- }
785
460
  type EffectiveConnectionType = "2g" | "3g" | "4g" | "slow-2g";
786
- interface DocumentState {
787
- readonly isVisible: boolean;
788
- readonly isOnline: boolean;
789
- }
790
- interface DocumentStateEvents {
791
- ["page active"]: boolean;
792
- ["pagehide"]: PageTransitionEvent;
793
- }
794
461
  interface DocumentStateModulesStatistics {
795
462
  isVisible: boolean;
796
463
  isOnline: boolean;
@@ -804,407 +471,45 @@ interface DocumentStateModulesStatistics {
804
471
  navigatorSaveData?: boolean;
805
472
  navigatorDownlink?: number;
806
473
  }
807
- declare class DocumentStateModule implements DocumentState {
808
- private emitter;
809
- private isVisibleCount;
810
- private isHiddenCount;
811
- private isOnlineCount;
812
- private isOfflineCount;
813
- private isActive;
814
- constructor(emitter: Emitter<unknown, DocumentStateEvents>);
815
- static create: (emitter: Emitter<unknown, DocumentStateEvents>) => DocumentStateModule;
816
- unload: () => void;
817
- load: () => void;
818
- unsuspend: () => void;
819
- getStatistics: () => DocumentStateModulesStatistics;
820
- get isOnline(): boolean;
821
- get isVisible(): boolean;
822
- private onOnline;
823
- private onOffline;
824
- private onPageHide;
825
- private onVisibilityChanged;
826
- private setIsActive;
827
- }
828
- interface EventModuleEvents {
829
- ["event"]: Readonly<StoredEvent>;
830
- }
831
- interface TimeSource {
832
- readonly drift: number | undefined;
833
- readonly serverCurrentTime: number;
834
- }
835
- interface BaseEvent {
836
- timestamp: number;
837
- }
838
- declare type TimestampAdded = {
839
- timestampAdded: number;
840
- };
841
- interface Metadata extends BaseEvent {
842
- type: "metadata";
843
- content: string;
844
- }
845
- interface ChannelSwitch extends BaseEvent {
846
- type: "channel switch";
847
- channelId: string;
848
- }
849
- interface LanguageSwitch extends BaseEvent {
850
- type: "language switch";
851
- language: string;
852
- }
853
- declare type Event = Metadata | ChannelSwitch | LanguageSwitch;
854
- declare type StoredEvent = TimestampAdded & Event;
855
- declare class EventModule {
856
- private static EVENT_TIMEOUT;
857
- private static EVENT_CHECK_INTERVAL;
858
- private logger;
859
- private emitter;
860
- private waitingEvents;
861
- private isTriggered;
862
- private timeSource;
863
- private timers;
864
- constructor(emitter: Emitter<unknown, EventModuleEvents>, logger: Logger, timeSource: TimeSource);
865
- load: () => void;
866
- unload: () => void;
867
- static create: (emitter: Emitter<unknown, EventModuleEvents>, logger: Logger, timeSource: TimeSource) => EventModule;
868
- addEvent: (event: Event) => void;
869
- extractEvent: (previousSample: CodedSample, sample: CodedSample) => void;
870
- private onBufferedStateChanged;
871
- }
872
- interface IncomingDataModuleListeners {
873
- ["connection state"]: Readonly<State>;
874
- }
875
- interface IncomingDataModuleEvents {
876
- ["no data timeout"]: number;
877
- ["reconnect"]: string;
878
- ["error"]: Readonly<VindralError>;
879
- }
880
- interface IncomingDataModuleStatistics {
881
- videoBitRate: number;
882
- audioBitRate: number;
883
- bytesReceived: number;
884
- }
885
- declare class IncomingDataModule {
886
- static NO_DATA_ERROR_TIMEOUT: number;
887
- static NO_DATA_TIMEOUT: number;
888
- static UPDATE_RECEIVED_BYTES_INTERVAL: number;
889
- private emitter;
890
- private timers;
891
- private bytesReceived;
892
- private timeoutInterval?;
893
- private lastBytesUpdated;
894
- constructor(emitter: Emitter<IncomingDataModuleListeners, IncomingDataModuleEvents>);
895
- static create: (emitter: Emitter<IncomingDataModuleListeners, IncomingDataModuleEvents>) => IncomingDataModule;
896
- load: () => void;
897
- unload: () => void;
898
- averageBitRate: (type: Type) => number;
899
- totalBytesReceived: () => number;
900
- add: (type: Type, bytes: number) => void;
901
- getStatistics: () => IncomingDataModuleStatistics;
902
- private onConnectionState;
903
- private checkTimeout;
904
- private clearTimeoutInterval;
905
- }
906
- declare class LoggerModule {
907
- private level;
908
- private categoryLevels;
909
- static create: () => LoggerModule;
910
- getLogger: (category: string) => LoggerInstance;
911
- setLevel: (level: Level, category?: string | undefined) => void;
912
- unload: () => void;
913
- private onLog;
914
- }
915
- interface RecoveredContext {
916
- error: Error;
917
- count: number;
918
- }
919
- interface MseModuleEvents {
920
- ["error"]: Readonly<VindralError>;
921
- ["recovered from media error"]: Readonly<RecoveredContext>;
922
- }
923
- interface MseModuleListeners {
924
- ["init segment"]: Readonly<InitSegment>;
925
- ["coded sample"]: Readonly<CodedSample>;
926
- }
927
- interface MseModuleStatistics {
928
- quotaErrorCount: number;
929
- totalVideoFrames?: number;
930
- droppedVideoFrames?: number;
931
- }
932
- declare class MseModule {
933
- private readonly maxChunkSize;
934
- private logger;
935
- private timers;
936
- private emitter;
937
- private mediaElement;
938
- private tracks;
939
- private mediaSource;
940
- private trackContexts;
941
- private autoRecoverFromMediaErrors;
942
- private quotaErrorCount;
943
- private recoveredFromErrorCount;
944
- constructor(logger: Logger, emitter: Emitter<MseModuleListeners, MseModuleEvents>, mediaElement: HTMLMediaElement, tracks: Track[]);
945
- static create: (logger: Logger, emitter: Emitter<MseModuleListeners, MseModuleEvents>, mediaElement: HTMLMediaElement, tracks: Track[]) => Promise<MseModule>;
946
- load: () => void;
947
- unload: () => void;
948
- getStatistics: () => MseModuleStatistics;
949
- private open;
950
- getBuffer: (type: Type) => TimeRange[];
951
- init: (initSegment: Readonly<InitSegment>) => void;
952
- append: (samples: Readonly<CodedSample>[], needsInitSegment: boolean) => Promise<void>;
953
- private onCodedSample;
954
- private onSourceEnded;
955
- }
956
- interface PictureInPictureListeners {
957
- ["add picture in picture listener"]: {
958
- element: Readonly<HTMLMediaElement>;
959
- };
960
- }
961
- interface PictureInPictureEvents {
962
- ["enter picture in picture"]: Readonly<PictureInPictureSizeSource>;
963
- ["exit picture in picture"]: void;
964
- }
965
- declare class PictureInPictureModule {
966
- private logger;
967
- private emitter;
968
- private element?;
969
- private pictureInPictureWindow?;
970
- private constructor();
971
- static create: (emitter: Emitter<PictureInPictureListeners, PictureInPictureEvents>, logger: Logger) => PictureInPictureModule;
972
- unload: () => void;
973
- onEnablePictureInPicture: (params: PictureInPictureListeners["add picture in picture listener"]) => void;
974
- getPictureInPictureSize: () => Size | undefined;
975
- isPictureInPictureActive: () => boolean;
976
- isPictureInPictureSupported: () => boolean;
977
- requestPictureInPicture: () => Promise<void>;
978
- exitPictureInPicture: () => Promise<void>;
979
- private requestStandardPictureInPicture;
980
- private exitStandardPictureInPicture;
981
- private isStandardPictureInPictureSupported;
982
- private isStandardPictureInPictureActive;
983
- private isWebkitPresentationModeActive;
984
- private requestWebkitPresentationMode;
985
- private isWebkitPresentationModeSupported;
986
- }
987
- interface QualityOfServiceModuleEvents {
988
- ["fatal quality of service"]: void;
989
- }
990
- interface QualityOfServiceModuleStatistics {
991
- timeSpentBuffering: number;
992
- bufferingEventsCount: number;
993
- fatalQosCount: number;
994
- timeSpentRatio: {
995
- [bitRate: string]: number;
996
- };
997
- }
998
- declare class QualityOfServiceModule {
999
- private readonly minBufferFullnessLengthForRegression;
1000
- private logger;
1001
- private emitter;
1002
- private timers;
1003
- private metrics;
1004
- private bufferFullness;
1005
- private bufferFullnessRegression?;
1006
- private currentLevel?;
1007
- private isSuspended;
1008
- private _fatalQosCount;
1009
- private hasAsserted;
1010
- constructor(emitter: Emitter<QualityOfServiceModuleEvents>, logger: Logger);
1011
- static create: (emitter: Emitter<QualityOfServiceModuleEvents>, logger: Logger) => QualityOfServiceModule;
1012
- load: () => void;
1013
- unload: () => void;
1014
- suspend: () => void;
1015
- unsuspend: () => void;
1016
- fatalQosGraceTime: () => number;
1017
- get fatalQosCount(): number;
1018
- incrementFatalQosCount: () => void;
1019
- getMetrics: () => Metrics;
1020
- getLevelStats: (renditionLevel: RenditionLevel) => PerLevelStats | undefined;
1021
- getBufferFullnessRegression: () => Regression | undefined;
1022
- activeRatios: () => Map<string, number>;
1023
- bufferingRatios: () => Map<string, number>;
1024
- bufferingEventsLast: (last: number) => number;
1025
- timeSpentBufferingLast: (last: number) => number;
1026
- timeSpentActiveLast: (last: number) => number;
1027
- timeSpentPlayingInAtLeastLevelRatio: (renditionLevel: RenditionLevel) => number;
1028
- timeSpentBuffering: () => number;
1029
- timeActive: () => number;
1030
- getStatistics: () => QualityOfServiceModuleStatistics;
1031
- private onRenditionLevelChanged;
1032
- private onBufferFullness;
1033
- private onPlaybackState;
1034
- private onVideoDecodeRate;
1035
- private calculateLinearRegression;
1036
- private stopBuffering;
1037
- private startBuffering;
1038
- private setActive;
1039
- private unsetActive;
1040
- }
1041
- interface SubscriptionModuleListeners {
1042
- ["subscription changed"]: Readonly<SubscriptionChange>;
1043
- }
1044
- declare class SubscriptionModule {
1045
- private logger;
1046
- private timers;
1047
- private emitter;
1048
- private targetSubscription;
1049
- private currentSubscription;
1050
- private _isSwitchingSubscription;
1051
- private pendingSubscriptionTimeoutId?;
1052
- private burstMs;
1053
- private constructor();
1054
- unload: () => void;
1055
- static create: (logger: Logger, emitter: Emitter<SubscriptionModuleListeners>, subscription: Subscription) => SubscriptionModule;
1056
- isSwitchingSubscription: () => boolean;
1057
- getTargetSubscription: () => Subscription;
1058
- getCurrentSubscription: () => Subscription;
1059
- enableBurst: (bufferTimeMs: number) => void;
1060
- setSize: (size: Readonly<Size>) => void;
1061
- setVideoConstraint: (constraint: VideoConstraint) => void;
1062
- setAudioConstraint: (constraint: AudioConstraint) => void;
1063
- setVideoBitRate: (bitRate: number) => void;
1064
- setAudioBitRate: (bitRate: number) => void;
1065
- setChannelId: (channelId: string) => void;
1066
- setLanguage: (language: string | undefined) => void;
1067
- setVideoCodec: (videoCodec: VideoCodec | undefined) => void;
1068
- setAudioCodec: (audioCodec: AudioCodec | undefined) => void;
1069
- private setBurst;
1070
- private isNewSubscription;
1071
- private onSubscriptionChanged;
1072
- private scheduleSubscriptionChange;
1073
- }
1074
- interface SyncSample {
1075
- type: Type;
1076
- isSync: boolean;
1077
- channelId: string;
1078
- timestamp: number;
1079
- timescale: number;
1080
- }
1081
- interface SyncModuleListeners {
1082
- ["rtt"]: number;
1083
- }
1084
- interface SyncModuleEvents {
1085
- ["channel reset"]: void;
1086
- }
1087
- interface SyncInfo {
1088
- wallclockTime: number;
1089
- timestamp: number;
1090
- }
1091
- interface BufferSource {
1092
- getTargetBufferTime(): number;
1093
- }
1094
- interface SyncModuleStatistics {
1095
- drift: number | undefined;
1096
- driftAdjustmentCount: number;
1097
- timeshiftDriftAdjustmentCount: number;
1098
- discardedTimeInfoCount: number;
1099
- seekTime: number;
1100
- }
1101
- declare class SyncModule {
1102
- private emitter;
1103
- private logger;
1104
- private playbackSource;
1105
- private bufferSource;
1106
- private lastSeekTime;
1107
- readonly seekCooldownTime = 1000;
1108
- readonly seekTimeoutTime = 5000;
1109
- readonly syncMaxBehind: number;
1110
- readonly syncMaxBehindMultiplierStep = 1;
1111
- readonly syncMaxBehindIncreaseEvery = 3;
1112
- readonly syncMaxBehindMaximumAllowed = 2000;
1113
- syncMaxBehindMultiplier: number;
1114
- timeshiftOnAudio: boolean;
1115
- readonly syncMaxAhead = 150;
1116
- timeshiftSync: {
1117
- enabled: boolean;
1118
- maxBehind: number;
1119
- multiplier: number;
1120
- maxBehindAllowed: number;
1121
- overshoot: number;
1122
- minOvershootAllowed: number;
1123
- maxOvershootAllowed: number;
1124
- };
1125
- readonly maxTimeSyncDifferenceTolerance = 150;
1126
- private timers;
1127
- private rtt;
1128
- private channelSyncInfo;
1129
- private driftAdjustmentsCount;
1130
- private timeshiftDriftAdjustmentCount;
1131
- private timestampOffset?;
1132
- private currentChannelId?;
1133
- private highestSeenTimestamps;
1134
- private discardedTimeInfoCount;
1135
- private isSuspended;
1136
- private isSyncAdjustmentActivated;
1137
- private constructor();
1138
- load: () => void;
1139
- unload: () => void;
1140
- suspend: () => void;
1141
- unsuspend: () => void;
1142
- activateSyncAdjustments: () => void;
1143
- static create: (emitter: Emitter<SyncModuleListeners, SyncModuleEvents>, logger: Logger, playbackSource: PlaybackSource, bufferSource: BufferSource, isTimeshiftSyncEnabled: boolean) => SyncModule;
1144
- reset: () => void;
1145
- getTimeshiftOffset: () => number;
1146
- getCurrentChannelId: () => string | undefined;
1147
- updateChannelSyncInfo(channelId: string, syncInfo: SyncInfo): void;
1148
- private tryApplySyncInfo;
1149
- getLiveEdgeTime: (channelId: string) => number | undefined;
1150
- getLiveEdgeTimeLatencyAdjusted: (channelId: string) => number | undefined;
1151
- getWallclockTime: (channelId: string) => number | undefined;
1152
- getWallclockTimeLatencyAdjusted: (channelId: string) => number | undefined;
1153
- get serverCurrentTime(): number;
1154
- get channelCurrentTime(): number;
1155
- processSample: <T extends SyncSample>(sample: T) => T;
1156
- getStatistics: () => SyncModuleStatistics;
1157
- get drift(): number | undefined;
1158
- private updateRtt;
1159
- isPlaybackSourceReadyToSeek: () => boolean;
1160
- private isSeeking;
1161
- private isAllowedToSync;
1162
- private isSeekCooldownExpired;
1163
- private isSeekTimeoutExpired;
1164
- private currentTimeshiftMaxBehind;
1165
- private currentSyncMaxBehind;
1166
- private currentSyncMaxAhead;
1167
- private tryTimeshiftSync;
1168
- private onSync;
1169
- }
1170
- declare const defaultOptions: {
1171
- sizeBasedResolutionCapEnabled: boolean;
1172
- pictureInPictureEnabled: boolean;
1173
- abrEnabled: boolean;
1174
- burstEnabled: boolean;
1175
- mseEnabled: boolean;
1176
- mseOpusEnabled: boolean;
1177
- muted: boolean;
1178
- minBufferTime: number;
1179
- maxBufferTime: number;
1180
- logLevel: Level;
1181
- maxSize: Size;
1182
- maxVideoBitRate: number;
1183
- maxAudioBitRate: number;
1184
- tags: string[];
1185
- media: Media;
1186
- poster: string | boolean;
1187
- reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
1188
- iosWakeLockEnabled: boolean;
1189
- telemetryEnabled: boolean;
1190
- advanced: {
1191
- wasmDecodingConstraint: Partial<VideoConstraint>;
474
+ interface IncomingDataModuleStatistics {
475
+ videoBitRate: number;
476
+ audioBitRate: number;
477
+ bytesReceived: number;
478
+ }
479
+ interface VideoPlayerStatistics {
480
+ renderedFrameCount: number;
481
+ rendererDroppedFrameCount: number;
482
+ contextLostCount: number;
483
+ contextRestoredCount: number;
484
+ }
485
+ interface MseModuleStatistics {
486
+ quotaErrorCount: number;
487
+ mediaSourceOpenTime: number;
488
+ totalVideoFrames?: number;
489
+ droppedVideoFrames?: number;
490
+ }
491
+ interface QualityOfServiceModuleStatistics {
492
+ timeSpentBuffering: number;
493
+ bufferingEventsCount: number;
494
+ fatalQosCount: number;
495
+ timeSpentRatio: {
496
+ [bitRate: string]: number;
1192
497
  };
1193
- };
498
+ }
499
+ interface SyncModuleStatistics {
500
+ drift: number | undefined;
501
+ driftAdjustmentCount: number;
502
+ timeshiftDriftAdjustmentCount: number;
503
+ seekTime: number;
504
+ }
1194
505
  interface UserAgentInformation {
1195
506
  userAgent: string;
1196
507
  locationOrigin: string;
1197
508
  locationPath: string;
1198
509
  ancestorOrigins?: string[];
1199
510
  }
1200
- declare type StatisticsType<K> = K extends {
1201
- getStatistics: () => unknown;
1202
- } ? ReturnType<K["getStatistics"]> : never;
1203
- declare type StatisticsTypes = {
1204
- [Property in keyof Modules]: StatisticsType<Modules[Property]>;
1205
- };
1206
- export declare type ModuleStatistics = Flatten<StatisticsTypes>;
1207
- export declare type Statistics = ModuleStatistics & UserAgentInformation & {
511
+ declare type ModuleStatistics = AdaptivityStatistics & BufferTimeStatistics & ConnectionStatistics & ConstraintCapStatistics & DecoderStatistics & DocumentStateModulesStatistics & IncomingDataModuleStatistics & MseModuleStatistics & PlaybackModuleStatistics & QualityOfServiceModuleStatistics & RenditionsModuleStatistics & SyncModuleStatistics & TelemetryModuleStatistics & VideoPlayerStatistics;
512
+ declare type Statistics = ModuleStatistics & UserAgentInformation & {
1208
513
  version: string;
1209
514
  ip?: string;
1210
515
  url: string;
@@ -1217,15 +522,70 @@ export declare type Statistics = ModuleStatistics & UserAgentInformation & {
1217
522
  channelId: string;
1218
523
  channelGroupId?: string;
1219
524
  timeToFirstFrame?: number;
525
+ iosMediaElementEnabled?: boolean;
1220
526
  };
527
+ /**
528
+ * Represents a Vindral client instance
529
+ *
530
+ * The most most essential methods when using the Vindral class are:
531
+ *
532
+ * - connect() - this has to be called to actually start connecting
533
+ * - attach() - to attach the Vindral video view to the DOM so that users can see it
534
+ * - userInput() - to activate audio on browsers that require a user gesture to play audio
535
+ * - unload() - unloads the instance, its very important that this is called when cleaning up the Vindral instance, otherwise background timers may leak.
536
+ *
537
+ * The Vindral instance will emit a variety of events during its lifetime. Use .on("event-name", callback) to listen to these events.
538
+ * See [[PublicVindralEvents]] for the events types that can be emitted.
539
+ *
540
+ * ```typescript
541
+ * // minimal configuration of a Vindral client instance
542
+ * const instance = new Vindral({
543
+ * url: "https://lb.cdn.vindral.com",
544
+ * channelId: "vindral_demo1_ci_099ee1fa-80f3-455e-aa23-3d184e93e04f",
545
+ * })
546
+ *
547
+ * // Will be called when timed metadata is received
548
+ * instance.on("metadata", console.log)
549
+ *
550
+ * // Will be called when a user interaction is needed to activate audio
551
+ * instance.on("needs user input", console.log)
552
+ *
553
+ * // Start connecting to the cdn
554
+ * instance.connect()
555
+ *
556
+ * // Attach the video view to the DOM
557
+ * instance.attach(document.getElementById("root"))
558
+ *
559
+ * // When done with the instance
560
+ * instance.unload()
561
+ * ```
562
+ */
1221
563
  export declare class Vindral extends Emitter<PublicVindralEvents> {
1222
564
  private static MAX_POOL_SIZE;
1223
565
  private static INITIAL_MAX_BIT_RATE;
1224
566
  private static PING_TIMEOUT;
567
+ private static DISCONNECT_TIMEOUT;
568
+ /**
569
+ * Picture in picture
570
+ */
1225
571
  readonly pictureInPicture: {
572
+ /**
573
+ * Enters picture in picture
574
+ * @returns a promise that resolves if successful
575
+ */
1226
576
  enter: () => Promise<void>;
577
+ /**
578
+ * Exits picture in picture
579
+ * @returns a promise that resolves if successful
580
+ */
1227
581
  exit: () => Promise<void>;
582
+ /**
583
+ * returns whether picture in picture is currently active
584
+ */
1228
585
  isActive: () => boolean;
586
+ /**
587
+ * returns whether picture in picture is supported
588
+ */
1229
589
  isSupported: () => boolean;
1230
590
  };
1231
591
  private browser;
@@ -1249,47 +609,200 @@ export declare class Vindral extends Emitter<PublicVindralEvents> {
1249
609
  private pool;
1250
610
  private durationSessions;
1251
611
  private sizes;
612
+ private isSuspended;
613
+ private disconnectTimeout;
1252
614
  constructor(options: Options);
615
+ /**
616
+ * Attaches the video view to a DOM element. The Vindral video view will be sized to fill this element while
617
+ * maintaining the correct aspect ratio.
618
+ * @param container the container element to append the video view to. Often a div element.
619
+ * @returns
620
+ */
1253
621
  attach: (container: HTMLElement) => void;
622
+ /**
623
+ * Set the current volume.
624
+ * Setting this to 0 is not equivalent to muting the audio.
625
+ * Setting this to >0 is not equivalent to unmuting the audio.
626
+ */
1254
627
  set volume(volume: number);
628
+ /**
629
+ * The current volume. Note that if the playback is muted volume can still be set.
630
+ */
1255
631
  get volume(): number;
632
+ /**
633
+ * Set playback to muted/unmuted
634
+ */
1256
635
  set muted(muted: boolean);
636
+ /**
637
+ * Whether the playback is muted or not
638
+ */
1257
639
  get muted(): boolean;
640
+ /**
641
+ * Media (audio | video | audio+video)
642
+ */
1258
643
  get media(): Media;
644
+ /**
645
+ * The current average video bit rate in bits/s
646
+ */
1259
647
  get videoBitRate(): number;
648
+ /**
649
+ * The current average audio bit rate in bits/s
650
+ */
1260
651
  get audioBitRate(): number;
652
+ /**
653
+ * The current connection state
654
+ */
1261
655
  get connectionState(): Readonly<State>;
656
+ /**
657
+ * The current playback state
658
+ */
1262
659
  get playbackState(): Readonly<PlaybackState>;
660
+ /**
661
+ * The current buffer fullness as a floating point value between 0-1, where 1 is full and 0 i empty.
662
+ */
1263
663
  get bufferFullness(): number;
664
+ /**
665
+ * Whether user bandwidth savings by capping the video resolution to the size of the video element is enabled
666
+ */
1264
667
  get sizeBasedResolutionCapEnabled(): boolean;
668
+ /**
669
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
670
+ */
1265
671
  set sizeBasedResolutionCapEnabled(enabled: boolean);
672
+ /**
673
+ * Whether ABR is currently enabled
674
+ */
1266
675
  get abrEnabled(): boolean;
676
+ /**
677
+ * Enable or disable ABR
678
+ *
679
+ * The client will immediatly stop changing renditon level based on QoS metrics
680
+ *
681
+ * Note: It is strongly recommended to keep this enabled as it can severly increase
682
+ * the number of buffering events for viewers.
683
+ */
1267
684
  set abrEnabled(enabled: boolean);
685
+ /**
686
+ * Estimated live edge time for the current channel
687
+ */
1268
688
  get serverEdgeTime(): number | undefined;
689
+ /**
690
+ * @returns Estimated wallclock time on the edge server in milliseconds
691
+ */
1269
692
  get serverWallclockTime(): number | undefined;
693
+ /**
694
+ * Local current time normalized between all channels in the channel group
695
+ */
1270
696
  get currentTime(): number;
697
+ /**
698
+ * Current time for the channel. This is the actual stream time, passed on from your ingress.
699
+ * Integer overflow could make this value differ from your encoder timestamps if it has been rolling for more
700
+ * than 42 days with RTMP as target.
701
+ *
702
+ * Note: This is not normalized between channels, thus it can make jumps when switching channels
703
+ */
1271
704
  get channelCurrentTime(): number;
705
+ /**
706
+ * The current target buffer time in milliseconds
707
+ */
1272
708
  get targetBufferTime(): number;
709
+ /**
710
+ * Set the current target buffer time in milliseconds
711
+ */
1273
712
  set targetBufferTime(bufferTimeMs: number);
713
+ /**
714
+ * The estimated playback latency based on target buffer time, the connection rtt and local playback drift
715
+ */
1274
716
  get playbackLatency(): number | undefined;
717
+ /**
718
+ * The estimated utc timestamp (in ms) for the playhead.
719
+ */
1275
720
  get playbackWallclockTime(): number | undefined;
721
+ /**
722
+ * Channels that can be switched between
723
+ */
1276
724
  get channels(): ReadonlyArray<Channel>;
725
+ /**
726
+ * Languages available
727
+ */
1277
728
  get languages(): ReadonlyArray<string>;
729
+ /**
730
+ * The current language
731
+ */
1278
732
  get language(): string | undefined;
733
+ /**
734
+ * Set the current language
735
+ */
1279
736
  set language(language: string | undefined);
737
+ /**
738
+ * The current channelId
739
+ */
1280
740
  get channelId(): string;
741
+ /**
742
+ * Set the current channelId
743
+ *
744
+ * Possible channels to set are available from [[channels]]
745
+ */
1281
746
  set channelId(channelId: string);
747
+ /**
748
+ * Max size that will be subcribed to
749
+ */
1282
750
  get maxSize(): Size;
751
+ /**
752
+ * Set max size that will be subscribed to
753
+ *
754
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this size
755
+ */
1283
756
  set maxSize(size: Size);
757
+ /**
758
+ * The max video bit rate that will be subscribed to
759
+ *
760
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
761
+ */
1284
762
  get maxVideoBitRate(): number;
763
+ /**
764
+ * Set max video bit rate that will be subscribed to
765
+ *
766
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bitrate
767
+ */
1285
768
  set maxVideoBitRate(bitRate: number);
769
+ /**
770
+ * The max audio bit rate that will be subscribed to
771
+ *
772
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
773
+ */
1286
774
  get maxAudioBitRate(): number;
775
+ /**
776
+ * Set max audio bit rate that will be subscribed to
777
+ *
778
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bit rate
779
+ */
1287
780
  set maxAudioBitRate(bitRate: number);
781
+ /**
782
+ * The rendition levels available.
783
+ */
1288
784
  get renditionLevels(): ReadonlyArray<RenditionLevel>;
785
+ /**
786
+ * The current rendition level
787
+ */
1289
788
  get currentRenditionLevel(): Readonly<RenditionLevel> | undefined;
789
+ /**
790
+ * The target rendition level that the client is currently switching to
791
+ */
1290
792
  get targetRenditionLevel(): Readonly<RenditionLevel> | undefined;
793
+ /**
794
+ * True if the client is currently switching from one rendition level to another
795
+ */
1291
796
  get isSwitchingRenditionLevel(): boolean;
797
+ /**
798
+ * The time ranges buffered for video.
799
+ * The ranges are specified in milliseconds.
800
+ */
1292
801
  get videoBufferedRanges(): ReadonlyArray<TimeRange>;
802
+ /**
803
+ * The time ranges buffered for audio.
804
+ * The ranges are specified in milliseconds.
805
+ */
1293
806
  get audioBufferedRanges(): ReadonlyArray<TimeRange>;
1294
807
  get lastBufferEvent(): Readonly<BufferStateEvent>;
1295
808
  get activeRatios(): Map<string, number>;
@@ -1297,482 +810,498 @@ export declare class Vindral extends Emitter<PublicVindralEvents> {
1297
810
  get timeSpentBuffering(): number;
1298
811
  get timeActive(): number;
1299
812
  get mediaElement(): HTMLMediaElement | HTMLCanvasElement;
813
+ /**
814
+ * Get active Vindral Options
815
+ */
1300
816
  getOptions: () => Options & typeof defaultOptions;
817
+ /**
818
+ * Get url for fetching thumbnail. Note that fetching thumbnails only works for an active channel.
819
+ */
1301
820
  getThumbnailUrl: () => string;
821
+ /**
822
+ * Update authentication token on an already established and authenticated connection
823
+ */
1302
824
  updateAuthenticationToken: (token: string) => void;
825
+ /**
826
+ * @deprecated since 3.0.0 Use play instead.
827
+ * Connects to the configured channel and starts streaming
828
+ */
1303
829
  connect: () => void;
830
+ private _connect;
831
+ /**
832
+ * Get options that can be used for CastSender
833
+ */
1304
834
  getCastOptions: () => Options;
1305
835
  private connectionInfo;
1306
836
  private estimateRTT;
1307
837
  private connectHandler;
1308
838
  private emitLanguagesIfChanged;
1309
839
  private filterRenditions;
840
+ /**
841
+ * Patch the subscription with properties from the channel that isn't known until connection
842
+ * @param channel Channel with the renditions to patch the subscription based on
843
+ */
1310
844
  private patchSubscription;
1311
845
  private isSupportedVideoCodecProfile;
1312
846
  private supportedAudioCodecs;
1313
847
  private initializeDecodingModule;
848
+ /**
849
+ * Fully unloads the instance. This disconnects the clients and stops any background tasks.
850
+ * This client instance can not be used after this has been called.
851
+ */
1314
852
  unload: () => Promise<void>;
853
+ /**
854
+ * @deprecated since 3.0.0 Use play instead.
855
+ *
856
+ * Activates audio or video on web browsers that require a user gesture to enable media playback.
857
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
858
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
859
+ * the Vindral instance itself is created in a user input event.
860
+ *
861
+ * Requirements: This method needs to be called within an user-input event handler to function properly, such as
862
+ * an onclick handler.
863
+ *
864
+ * Note: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
865
+ * and handle that event gracefully.
866
+ */
1315
867
  userInput: () => void;
1316
- private play;
868
+ pause: () => void;
869
+ /**
870
+ *
871
+ * Start playing the stream.
872
+ *
873
+ * This method also activates audio or video on web browsers that require a user gesture to enable media playback.
874
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
875
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
876
+ * the Vindral instance itself is created in a user input event.
877
+ *
878
+ * Note: In most browsers this method needs to be called within an user-input event handler, such as
879
+ * an onclick handler in order to activate audio. Most implementations call this directly after constructing the Vindral
880
+ * instance once in order to start playing, and then listen to a user-event in order to allow audio to be activated.
881
+ *
882
+ * Note 2: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
883
+ * and handle that event gracefully.
884
+ */
885
+ play: () => Promise<void>;
886
+ /**
887
+ * How long in milliseconds since the instance was created
888
+ */
1317
889
  get uptime(): number;
1318
890
  getStatistics: () => Statistics;
891
+ private resetModules;
892
+ private suspend;
893
+ private unsuspend;
1319
894
  private getRuntimeInfo;
895
+ private onMediaElementState;
1320
896
  private onBufferEvent;
897
+ /**
898
+ * Aligns size and bitrate to match a rendition level correctly
899
+ */
1321
900
  private alignSizeAndBitRate;
1322
901
  private get currentSubscription();
1323
902
  private get targetSubscription();
1324
903
  private timeToFirstFrame;
1325
904
  private willUseMediaSource;
1326
905
  }
1327
- interface TelemetryModuleOptions {
1328
- url: string;
1329
- enabled?: boolean;
1330
- interval?: number;
1331
- includeErrors?: boolean;
1332
- includeEvents?: boolean;
1333
- includeStats?: boolean;
1334
- maxRetries?: number;
1335
- maxErrorReports?: number;
1336
- maxEvents?: number;
1337
- }
1338
- interface ShiftedEdgeEvent {
1339
- code: "shifted_edge";
1340
- edgeUrl: string;
1341
- }
1342
- declare type TelemetryEvent = ShiftedEdgeEvent;
1343
- interface FlushOptions {
1344
- isFinal?: boolean;
1345
- initiator: StatsInitiator;
1346
- }
1347
- interface ParentContext {
1348
- getStatistics(): Statistics;
1349
- readonly connectionState: Readonly<State>;
1350
- }
1351
- interface TelemetryModuleListeners {
1352
- ["visibilitystate"]: VisibilityState;
1353
- ["pagehide"]: PageTransitionEvent;
1354
- ["error"]: Readonly<VindralError>;
1355
- }
1356
- interface TelemetryModuleEvents {
1357
- ["send signal"]: Readonly<string>;
1358
- }
1359
906
  interface TelemetryModuleStatistics {
1360
907
  errorCount: number;
1361
908
  }
1362
- declare type StatsInitiator = "interval" | "visibilitychange" | "pagehide" | "unload";
1363
- declare class TelemetryModule {
1364
- private logger;
1365
- private timers;
1366
- private emitter;
1367
- private options;
1368
- private parentContext;
1369
- private unsentLines;
1370
- private retries;
1371
- private errorCount;
1372
- private eventCount;
1373
- private statsCount;
1374
- private constructor();
1375
- unload: () => void;
1376
- static create: (logger: Logger, emitter: Emitter<TelemetryModuleListeners, TelemetryModuleEvents>, options: TelemetryModuleOptions, parentContext: ParentContext) => TelemetryModule;
1377
- load: () => void;
1378
- addStats: ({ isFinal, initiator }?: FlushOptions, extra?: Record<string, unknown>) => void;
1379
- addEvent: (event: TelemetryEvent) => void;
1380
- getStatistics: () => TelemetryModuleStatistics;
1381
- private send;
1382
- private onVisibilityChange;
1383
- private onPageHide;
1384
- private onRejection;
1385
- private onError;
1386
- private addError;
1387
- }
1388
- declare class TimerModule {
1389
- private pendingTimeouts;
1390
- private pendingIntervals;
1391
- static create: () => TimerModule;
1392
- clearTimeout: (handle: number) => void;
1393
- clearInterval: (handle: number) => void;
1394
- setTimeout: (callback: (...args: unknown[]) => void, timeout: number, ...args: unknown[]) => number;
1395
- setInterval: (callback: (...args: unknown[]) => void, interval: number, ...args: unknown[]) => number;
1396
- unload: () => void;
1397
- }
1398
- declare class UnpauseModule {
1399
- private logger;
1400
- private element;
1401
- private documentState;
1402
- private timers;
1403
- private constructor();
1404
- static create: (logger: Logger, element: MediaElement, documentState: DocumentState) => UnpauseModule;
1405
- unload: () => void;
1406
- private unpause;
1407
- }
1408
- interface VideoPlayerModuleListeners {
1409
- ["decoded frame"]: Readonly<DecodedSample>;
1410
- ["error"]: Readonly<VindralError>;
1411
- }
1412
- interface ClockSource {
1413
- readonly currentTime: number;
1414
- }
1415
- interface VideoPlayerStatistics {
1416
- renderedFrameCount: number;
1417
- rendererDroppedFrameCount: number;
1418
- contextLostCount: number;
1419
- contextRestoredCount: number;
1420
- }
1421
- declare class VideoPlayerModule {
1422
- private readonly oldTimestampLimit;
1423
- private pool;
1424
- private logger;
1425
- private emitter;
1426
- private videoRenderer;
1427
- private renderQueue;
1428
- private clockSource;
1429
- private isFirstFrame;
1430
- private animationFrameRequest?;
1431
- private renderedFrameCount;
1432
- private rendererDroppedFrameCount;
1433
- constructor(emitter: Emitter<VideoPlayerModuleListeners, unknown>, logger: Logger, clockSource: ClockSource, pool: ArrayBufferPool);
1434
- load: () => void;
1435
- unload: () => Promise<void>;
1436
- suspend: () => void;
1437
- unsuspend: () => void;
1438
- element: () => HTMLCanvasElement;
1439
- getStatistics: () => VideoPlayerStatistics;
1440
- static create: (emitter: Emitter<VideoPlayerModuleListeners, unknown>, logger: Logger, clockSource: ClockSource, pool: ArrayBufferPool) => VideoPlayerModule;
1441
- private onDecodedFrame;
1442
- private render;
1443
- private renderSample;
1444
- private stopRender;
1445
- }
1446
- export interface Modules {
1447
- audioPlayer?: AudioPlayerModule;
1448
- videoPlayer?: VideoPlayerModule;
1449
- decoder?: DecoderModule;
1450
- mseModule?: MseModule;
1451
- constraintCap: ConstraintCapModule;
1452
- adaptivity: AdaptivityModule;
1453
- connection: ConnectionModule;
1454
- playback: PlaybackModule;
1455
- pictureInPicture: PictureInPictureModule;
1456
- logger: LoggerModule;
1457
- timer: TimerModule;
1458
- subscription: SubscriptionModule;
1459
- renditions: RenditionsModule;
1460
- qualityOfService: QualityOfServiceModule;
1461
- event: EventModule;
1462
- sync: SyncModule;
1463
- telemetry?: TelemetryModule;
1464
- documentState: DocumentStateModule;
1465
- incomingData: IncomingDataModule;
1466
- bufferTime: BufferTimeModule;
1467
- unpause?: UnpauseModule;
1468
- }
909
+ /**
910
+ * The current reconnect state to use to decide whether to kep reconnecting or not
911
+ */
1469
912
  export interface ReconnectState {
913
+ /**
914
+ * The number or retry attempts so far.
915
+ * This gets reset on every successful connect, so it will start from zero every
916
+ * time the client instance gets disconnected and will increment until the
917
+ * client instance makes a connection attempt is successful.
918
+ */
1470
919
  reconnectRetries: number;
1471
920
  }
921
+ /**
922
+ * Advanced options to override default behaviour.
923
+ */
1472
924
  export interface AdvancedOptions {
925
+ /**
926
+ * Constrains wasm decoding to this resolution.
927
+ * By default it is set to 1280 in width and height.
928
+ * This guarantees better performance on older devices and reduces battery drain in general.
929
+ */
1473
930
  wasmDecodingConstraint: Partial<VideoConstraint>;
1474
931
  }
1475
- export declare type Media = "audio" | "video" | "audio+video";
932
+ declare type Media = "audio" | "video" | "audio+video";
933
+ /**
934
+ * Options for the Vindral instance
935
+ *
936
+ */
1476
937
  export interface Options {
938
+ /**
939
+ * URL to use when connecting to the stream
940
+ */
1477
941
  url: string;
942
+ /**
943
+ * Channel ID to connect to initially - can be changed later mid-stream when connected to a channel group.
944
+ */
1478
945
  channelId: string;
946
+ /**
947
+ * Channel group to connect to
948
+ * Note: Only needed for fast channel switching
949
+ */
1479
950
  channelGroupId?: string;
951
+ /**
952
+ * A container to attach the video view in - can be provided later with .attach() on the vindral core instance
953
+ */
1480
954
  container?: HTMLElement;
955
+ /**
956
+ * An authentication token to provide to the server when connecting - only needed for channels with authentication enabled
957
+ * Note: If not supplied when needed, an "Authentication Failed" error will be raised.
958
+ */
1481
959
  authenticationToken?: string;
960
+ /**
961
+ * Language to use initially - can be changed during during runtime on the vindral instance
962
+ * Note: Only needed when multiple languages are provided - if no language is specified, one will be automatically selected.
963
+ */
1482
964
  language?: string;
965
+ /**
966
+ * Sets the log level - defaults to info
967
+ */
1483
968
  logLevel?: Level;
969
+ /**
970
+ * Sets the minimum and initial buffer time
971
+ */
1484
972
  minBufferTime?: number;
973
+ /**
974
+ * Sets the maximum buffer time allowed. The vindral instance will automatically slowly increase
975
+ * the buffer time if the use experiences to much buffering with the initial buffer time.
976
+ *
977
+ * Note: This is not yet implemented
978
+ */
1485
979
  maxBufferTime?: number;
980
+ /**
981
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
982
+ *
983
+ * Is enabled by default.
984
+ *
985
+ * Note: This is automatically set to false when abrEnabled is set to false.
986
+ */
1486
987
  sizeBasedResolutionCapEnabled?: boolean;
988
+ /**
989
+ * Enables or disables picture in picture support.
990
+ */
1487
991
  pictureInPictureEnabled?: boolean;
992
+ /**
993
+ * Enable bursting for initial connection and channel switches. This makes time to first frame faster at the
994
+ * cost of stability (more demanding due to the sudden burst of live content)
995
+ *
996
+ * Is disabled by default.
997
+ *
998
+ */
1488
999
  burstEnabled?: boolean;
1000
+ /**
1001
+ * Enable usage of the MediaSource API on supported browsers.
1002
+ *
1003
+ * Is enabled by default.
1004
+ *
1005
+ * Note: We recommend to keep this at the default value unless you have very specific needs.
1006
+ */
1489
1007
  mseEnabled?: boolean;
1008
+ /**
1009
+ * Enable Opus with the MediaSource API on supported browsers.
1010
+ *
1011
+ * Is enabled by default.
1012
+ *
1013
+ * Note: Opus generally provides better audio quality and is therefore recommended to keep enabled.
1014
+ */
1490
1015
  mseOpusEnabled?: boolean;
1016
+ /**
1017
+ * Enable or disable support for playing audio in the background for iOS devices.
1018
+ *
1019
+ * Is false (disabled) by default.
1020
+ *
1021
+ * Note: This may be enabled by default in a future (major) release
1022
+ */
1491
1023
  iosBackgroundPlayEnabled?: boolean;
1024
+ /**
1025
+ * Enable or disable Adaptive Bit Rate. This allows for automatically adapting the incoming bit rate based on
1026
+ * the viewers bandwidth and thus avoiding buffering events. This also disables the
1027
+ * sizeBasedResolutionCapEnabled option.
1028
+ *
1029
+ * Is enabled by default.
1030
+ *
1031
+ * Note: It is strongly recommended to keep this enabled as user experience can greatly suffer without ABR.
1032
+ */
1492
1033
  abrEnabled?: boolean;
1034
+ /**
1035
+ * Enable or disable telemetry. This allows for telemetry and errors being collected.
1036
+ *
1037
+ * Is enabled by default.
1038
+ *
1039
+ * We appreciate you turning it off during development/staging to not bloat real telemetry data.
1040
+ *
1041
+ * Note: It is strongly recommended to keep this enabled in production as it is required for insights and KPIs.
1042
+ */
1493
1043
  telemetryEnabled?: boolean;
1044
+ /**
1045
+ * Set a cap on the maximum video size.
1046
+ * This can be used to provide user options to limit the video bandwidth usage.
1047
+ *
1048
+ * Note: This takes presedence over any size based resolution caps.
1049
+ */
1494
1050
  maxSize?: Size;
1051
+ /**
1052
+ * Maximum audio bit rate allowed.
1053
+ * This can be used to provide user options to limit the audio bandwidth usage.
1054
+ */
1495
1055
  maxAudioBitRate?: number;
1056
+ /**
1057
+ * Maximum video bit rate allowed.
1058
+ * This can be used to provide user options to limit the video bandwidth usage.
1059
+ */
1496
1060
  maxVideoBitRate?: number;
1061
+ /**
1062
+ * Controls video element background behaviour while loading.
1063
+ * - If `false`, a black background will be shown.
1064
+ * - If undefined or `true`, a live thumbnail will be shown.
1065
+ * - If set to a string containing a URL (https://urltoimage), use that.
1066
+ * Default `true` - meaning a live thumbnail is shown
1067
+ */
1497
1068
  poster?: boolean | string;
1069
+ /**
1070
+ * Whether to start the player muted or to try to start playing audio automatically.
1071
+ */
1498
1072
  muted?: boolean;
1073
+ /**
1074
+ * Provide a custom reconnect handler to control when the instance should stop trying to
1075
+ * reconnect. The reconnect handler should either return true to allow the reconnect or
1076
+ * false to stop reconnecting. It can also return a promise with true or false if it needs
1077
+ * to make any async calls before determining wether to reconnect.
1078
+ *
1079
+ * The default reconnect handler allows 30 reconnects before stopping.
1080
+ *
1081
+ * Note: the ReconnectState gets reset every time the client instance makes a successful connection.
1082
+ * This means the default reconnect handler will only stop reconnecting after 30 _consecutive_ failed connections.
1083
+ *
1084
+ * ```typescript
1085
+ * // An example reconnect handler that will reconnect forever
1086
+ * const reconnectHandler = (state: ReconnectState) => true
1087
+ *
1088
+ * // An example reconnect handler that will fetch an url and determine whether to reconnect
1089
+ * const reconnectHandler = async (state: ReconnectState) => {
1090
+ * const result = await fetch("https://should-i-reconnect-now.com")
1091
+ * return result.ok
1092
+ * },
1093
+ * ```
1094
+ */
1499
1095
  reconnectHandler?: (state: ReconnectState) => Promise<boolean> | boolean;
1500
1096
  tags?: string[];
1501
1097
  ownerSessionId?: string;
1502
1098
  edgeUrl?: string;
1503
1099
  logShippingEnabled?: boolean;
1504
1100
  statsShippingEnabled?: boolean;
1101
+ /**
1102
+ * Enable wake lock for iOS devices.
1103
+ * The wake lock requires that the audio has been activated at least once for the instance, othwerwise it will not work.
1104
+ * Other devices already provide wake lock by default.
1105
+ *
1106
+ * This option is redundant and has no effect if iosMediaElementEnabled is enabled since that automatically enables wake lock.
1107
+ *
1108
+ * Disabled by default.
1109
+ */
1505
1110
  iosWakeLockEnabled?: boolean;
1111
+ /**
1112
+ * Disabling this will revert to legacy behaviour where Vindral will try to always keep the video element playing.
1113
+ */
1114
+ pauseSupportEnabled?: boolean;
1115
+ /**
1116
+ * Enables iOS devices to use a media element for playback. This enables fullscreen and picture in picture support on iOS.
1117
+ */
1118
+ iosMediaElementEnabled?: boolean;
1119
+ /**
1120
+ * Advanced options to override default behaviour.
1121
+ */
1506
1122
  advanced?: AdvancedOptions;
1507
1123
  media?: Media;
1508
1124
  }
1125
+ /**
1126
+ * Contextual information about the language switch
1127
+ */
1509
1128
  export interface LanguageSwitchContext {
1129
+ /**
1130
+ * The new language that was switched to
1131
+ */
1510
1132
  language: string;
1511
1133
  }
1134
+ /**
1135
+ * Contextual information about the channel switch
1136
+ */
1512
1137
  export interface ChannelSwitchContext {
1138
+ /**
1139
+ * The new channel id that was switched to
1140
+ */
1513
1141
  channelId: string;
1514
1142
  }
1143
+ interface VolumeState {
1144
+ /**
1145
+ * Wether the audio is muted
1146
+ */
1147
+ isMuted: boolean;
1148
+ /**
1149
+ * The volume level
1150
+ */
1151
+ volume: number;
1152
+ }
1153
+ /**
1154
+ * The events that can be emitted from the Vindral instance
1155
+ */
1515
1156
  export interface PublicVindralEvents {
1157
+ /**
1158
+ * When an error that requires action has occured
1159
+ *
1160
+ * Can be a fatal error that will unload the Vindral instance - this is indicated by `isFatal()` on the error object returning true.
1161
+ *
1162
+ * In case of a fatal error it is appropriate to indicate what the error was to the user, either by displaying the error.message or
1163
+ * by using the error.code() as a key to look up a localization string. To resume streaming it is required to create a new Vindral instance.
1164
+ */
1516
1165
  ["error"]: Readonly<VindralError>;
1166
+ /**
1167
+ * When the instance needs user input to activate audio or sometimes video playback.
1168
+ * Is called with an object
1169
+ * ```
1170
+ * {
1171
+ * forAudio: boolean // true if user input is needed for audio playback
1172
+ * forVideo: boolean // true if user input is needed for video playback
1173
+ * }
1174
+ * ```
1175
+ */
1517
1176
  ["needs user input"]: NeedsUserInputContext;
1177
+ /**
1178
+ * When a timed metadata event has been triggered
1179
+ */
1518
1180
  ["metadata"]: Readonly<Metadata>;
1181
+ /**
1182
+ * When the playback state changes
1183
+ */
1519
1184
  ["playback state"]: Readonly<PlaybackState>;
1185
+ /**
1186
+ * When the connection state changes
1187
+ */
1520
1188
  ["connection state"]: Readonly<State>;
1189
+ /**
1190
+ * When the available rendition levels is changed
1191
+ */
1521
1192
  ["rendition levels"]: ReadonlyArray<RenditionLevel>;
1193
+ /**
1194
+ * When the rendition level is changed
1195
+ */
1522
1196
  ["rendition level"]: Readonly<RenditionLevel>;
1197
+ /**
1198
+ * When the available languages is changed
1199
+ */
1523
1200
  ["languages"]: ReadonlyArray<string>;
1201
+ /**
1202
+ * When the available channels is changed
1203
+ */
1524
1204
  ["channels"]: ReadonlyArray<Channel>;
1205
+ /**
1206
+ * When a context switch state change has occured.
1207
+ * E.g. when a channel change has been requested, or quality is changed.
1208
+ */
1525
1209
  ["context switch"]: Readonly<ContextSwitchState>;
1210
+ /**
1211
+ * Emitted when a wallclock time message has been received from the server.
1212
+ *
1213
+ * Note: This is the edge server wallclock time and thus may differ slightly
1214
+ * between two viewers if they are connected to different edge servers.
1215
+ */
1526
1216
  ["server wallclock time"]: Readonly<number>;
1217
+ /**
1218
+ * Is emitted during connection whether the channel is live or not.
1219
+ *
1220
+ * If the channel is not live, the Vindral instance will try to reconnect until the `reconnectHandler`
1221
+ * determines that no more retries should be made.
1222
+ *
1223
+ * Note: If the web-sdk is instantiated at the same time as you are starting the stream it is possible
1224
+ * that this emits false until the started state has propagated through the system.
1225
+ */
1527
1226
  ["is live"]: boolean;
1227
+ /**
1228
+ * Emitted when a channel switch has been completed and the first frame of the new channel is rendered.
1229
+ * A string containing the channel id of the new channel is provided as an argument.
1230
+ */
1528
1231
  ["channel switch"]: Readonly<ChannelSwitchContext>;
1232
+ /**
1233
+ * Emitted when a language switch has been completed and the new language starts playing.
1234
+ */
1529
1235
  ["language switch"]: Readonly<LanguageSwitchContext>;
1236
+ /**
1237
+ * Emitted when the volume state changes.
1238
+ *
1239
+ * This is triggered triggered both when the user changes the volume through the Vindral instance, but also
1240
+ * from external sources such as OS media shortcuts or other native UI outside of the browser.
1241
+ */
1242
+ ["volume state"]: Readonly<VolumeState>;
1530
1243
  ["buffer state event"]: Readonly<BufferStateEvent>;
1531
1244
  ["initialized media"]: void;
1532
1245
  }
1533
- export interface Track {
1534
- type: Type;
1535
- mimeType: string;
1536
- }
1537
- declare type ValuesOf<T> = T[keyof T];
1538
- declare type UndefinedKeys<T> = {
1539
- [K in keyof T]: undefined extends T[K] ? K : never;
1540
- }[keyof T];
1541
- declare type ExtractOptional<T> = Pick<T, Exclude<UndefinedKeys<T>, undefined>>;
1542
- declare type ExtractRequired<T> = Omit<T, Exclude<UndefinedKeys<T>, undefined>>;
1543
- declare type NullableObjectValuesOf<T> = Exclude<Partial<Extract<ValuesOf<ExtractOptional<T>>, object>>, Array<unknown>>;
1544
- declare type ObjectValuesOf<T> = Exclude<Extract<ValuesOf<ExtractRequired<T>>, object>, Array<unknown>>;
1545
- declare type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never;
1546
- declare type Flatten<T> = UnionToIntersection<ObjectValuesOf<T> | NullableObjectValuesOf<T>>;
1547
- interface RenditionProps {
1548
- id: number;
1549
- bitRate: number;
1550
- codecString?: string;
1551
- language?: string;
1552
- meta?: Record<string, string>;
1553
- }
1554
- interface VideoRenditionProps {
1555
- codec: VideoCodec;
1556
- frameRate: [
1557
- number,
1558
- number
1559
- ];
1560
- width: number;
1561
- height: number;
1562
- }
1563
- interface AudioRenditionProps {
1564
- codec: AudioCodec;
1565
- channels: number;
1566
- sampleRate: number;
1567
- }
1568
- interface CodecProps {
1569
- codec: Codec;
1570
- codecString?: string;
1571
- }
1572
- export declare type VideoRendition = VideoRenditionProps & RenditionProps;
1573
- export declare type AudioRendition = AudioRenditionProps & RenditionProps;
1574
- export declare type Rendition = VideoRendition | AudioRendition;
1575
- export declare const isVideoRendition: (rendition: Readonly<Rendition>) => rendition is VideoRendition;
1576
- export declare const isAudioRendition: (rendition: Readonly<Rendition>) => rendition is AudioRendition;
1577
- export declare const getMimeType: (rendition: Readonly<CodecProps>) => string;
1578
- interface AudioConstraint {
1579
- bitRate: number;
1580
- codec?: AudioCodec;
1581
- codecString?: string;
1582
- language?: string;
1583
- }
1584
- interface VideoConstraint {
1585
- width: number;
1586
- height: number;
1587
- bitRate: number;
1588
- codec?: VideoCodec;
1589
- codecString?: string;
1590
- }
1591
- declare type SubscriptionInitiator = "manual" | "edge";
1592
- interface Subscription {
1593
- channelId: string;
1594
- video: VideoConstraint;
1595
- audio: AudioConstraint;
1596
- burstMs?: number;
1597
- meta?: Record<string, string>;
1598
- initiator?: SubscriptionInitiator;
1599
- }
1600
- interface ContextSwitch {
1601
- type: "context switch";
1602
- }
1603
- interface ContextSwitchComplete {
1604
- type: "context switch complete";
1605
- }
1606
- interface RenditionsSignal {
1607
- type: "renditions";
1608
- renditions: Rendition[];
1609
- }
1610
- interface SubscribeSignal {
1611
- type: "subscribe";
1612
- subscription: Subscription;
1613
- }
1614
- interface ClientIpSignal {
1615
- type: "client ip";
1616
- ip: string;
1617
- }
1618
- interface SubscriptionChangedSignal {
1619
- type: "subscription changed";
1620
- subscription: Subscription;
1621
- }
1622
- interface TimingInfoSignal {
1623
- type: "timing info";
1624
- timingInfo: {
1625
- channelId: string;
1626
- timestamp: number;
1627
- wallclockTime: number;
1246
+ declare const defaultOptions: {
1247
+ sizeBasedResolutionCapEnabled: boolean;
1248
+ pictureInPictureEnabled: boolean;
1249
+ abrEnabled: boolean;
1250
+ burstEnabled: boolean;
1251
+ mseEnabled: boolean;
1252
+ mseOpusEnabled: boolean;
1253
+ muted: boolean;
1254
+ minBufferTime: number;
1255
+ maxBufferTime: number;
1256
+ logLevel: Level;
1257
+ maxSize: Size;
1258
+ maxVideoBitRate: number;
1259
+ maxAudioBitRate: number;
1260
+ tags: string[];
1261
+ media: Media;
1262
+ poster: string | boolean;
1263
+ reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
1264
+ iosWakeLockEnabled: boolean;
1265
+ telemetryEnabled: boolean;
1266
+ iosMediaElementEnabled: boolean;
1267
+ pauseSupportEnabled: boolean;
1268
+ advanced: {
1269
+ wasmDecodingConstraint: Partial<VideoConstraint>;
1628
1270
  };
1629
- }
1630
- interface PingSignal {
1631
- type: "ping";
1632
- }
1633
- interface PongSignal {
1634
- type: "pong";
1635
- }
1636
- interface RefreshAuthSignal {
1637
- type: "refresh auth";
1638
- token: string;
1639
- }
1640
- declare type Signal = RenditionsSignal | SubscribeSignal | SubscriptionChangedSignal | TimingInfoSignal | PingSignal | PongSignal | ClientIpSignal | ContextSwitch | ContextSwitchComplete | RefreshAuthSignal;
1641
- export interface Channel {
1642
- channelId: string;
1643
- name: string;
1644
- isLive: boolean;
1645
- thumbnailUrls: string[];
1646
- }
1647
- interface ClientOverrides {
1648
- maxVideoBitRate?: number;
1649
- minBufferTime?: number;
1650
- maxBufferTime?: number;
1651
- burstEnabled?: boolean;
1652
- }
1653
- interface ChannelWithRenditionsAndOverrides extends Channel {
1654
- renditions: Rendition[];
1655
- overrides?: ClientOverrides;
1656
- }
1657
- interface ConnectOptions {
1658
- channelGroupId?: string;
1659
- channelId: string;
1660
- }
1661
- interface Telemetry {
1662
- url: string;
1663
- probability?: number;
1664
- includeErrors?: boolean;
1665
- includeEvents?: boolean;
1666
- includeStats?: boolean;
1667
- maxRetries?: number;
1668
- maxErrorReports?: number;
1669
- interval?: number;
1670
- }
1671
- export interface ConnectResponse {
1672
- logsUrl?: string;
1673
- statsUrl?: string;
1674
- telemetry?: Telemetry;
1675
- channels: ChannelWithRenditionsAndOverrides[];
1676
- edges: string[];
1677
- }
1678
- export interface ApiClientOptions {
1679
- publicEndpoint: string;
1680
- tokenFactory?: AuthorizationTokenFactory;
1681
- }
1682
- export interface AuthorizationContext {
1683
- channelGroupId?: string;
1684
- channelId?: string;
1685
- }
1686
- export declare type AuthorizationTokenFactory = (context: AuthorizationContext) => string | undefined;
1687
- export declare class ApiClient {
1688
- private baseUrl;
1689
- private tokenFactory?;
1690
- constructor(options: ApiClientOptions);
1691
- connect(options: ConnectOptions): Promise<ConnectResponse>;
1692
- getChannel(channelId: string): Promise<Channel>;
1693
- getChannels(channelGroupId: string): Promise<Channel[]>;
1694
- private getHeaders;
1695
- private getAuthToken;
1696
- private toChannels;
1697
- private toChannel;
1698
- }
1699
- export declare type CastState = "casting" | "not casting";
1700
- export interface CastSenderEvents {
1701
- ["connected"]: void;
1702
- ["resumed"]: void;
1703
- ["disconnected"]: void;
1704
- ["failed"]: void;
1705
- ["metadata"]: Metadata;
1706
- ["server wallclock time"]: number;
1707
- }
1708
- export interface CastConfig {
1709
- options: Options;
1710
- background?: string;
1711
- receiverApplicationId?: string;
1712
- }
1713
- export declare type CustomCastMessageType = "stop" | "start" | "updateAuthToken" | "serverWallclockTime" | "metadata" | "setChannelId" | "setLanguage";
1714
- export interface CastCustomMessage {
1715
- type: CustomCastMessageType;
1716
- channelId?: string;
1717
- language?: string;
1718
- config?: CastConfig;
1719
- token?: string;
1720
- serverWallclockTime?: number;
1721
- metadata?: Metadata;
1722
- }
1723
- export declare class CastSender extends Emitter<CastSenderEvents> {
1724
- private state;
1725
- private config;
1726
- private unloaded;
1727
- constructor(config: CastConfig);
1728
- get casting(): boolean;
1729
- get volume(): number;
1730
- set volume(volume: number);
1731
- get language(): string | undefined;
1732
- set language(language: string | undefined);
1733
- get channelId(): string;
1734
- set channelId(channelId: string);
1735
- updateAuthenticationToken: (token: string) => void;
1736
- unload: () => void;
1737
- init: () => Promise<void>;
1738
- start: () => Promise<void>;
1739
- stop: () => void;
1740
- getReceiverName: () => string | undefined;
1741
- private onGCastApiAvailable;
1742
- private send;
1743
- private onMessage;
1744
- private onSessionStarted;
1745
- private onSessionStateChanged;
1746
- private getInstance;
1747
- private getSession;
1748
- private castLibrariesAdded;
1749
- private verifyCastLibraries;
1750
- }
1751
- export declare const isValidOptions: (options: unknown) => options is Options;
1752
- export declare const validateOptions: (options: Options) => Options;
1753
- interface FullscreenEvents {
1754
- ["on fullscreen change"]: boolean;
1755
- }
1756
- declare class Fullscreen extends Emitter<FullscreenEvents> {
1757
- private container;
1758
- constructor(container: HTMLElement);
1759
- unload: () => void;
1760
- request: () => Promise<void>;
1761
- exit: () => Promise<void>;
1762
- private onChange;
1763
- isFullscreen: () => boolean;
1764
- isSupported: () => boolean;
1765
- private isFullscreenApiSupported;
1766
- private isInIframe;
1767
- private get requestFn();
1768
- private get exitFn();
1769
- }
1271
+ };
1272
+ /**
1273
+ * Available options when initializing the Player. Used for enabling/disabling features
1274
+ * and hiding/showing buttons in the control pane
1275
+ */
1770
1276
  export interface PlayerOptions {
1277
+ /**
1278
+ * Enable or disable controls
1279
+ */
1771
1280
  controlsEnabled?: boolean;
1281
+ /**
1282
+ * Enable or disable Google Cast (button and functionality). Will be disabled if
1283
+ * device is unable to use Cast Framework
1284
+ */
1772
1285
  castEnabled?: boolean;
1286
+ /**
1287
+ * Enable or disable fullscreen button
1288
+ */
1773
1289
  fullscreenButtonEnabled?: boolean;
1290
+ /**
1291
+ * Enable or disable Picture-in-picture button. Will be disabled if
1292
+ * device is unable to manually enter PiP
1293
+ */
1774
1294
  pipButtonEnabled?: boolean;
1295
+ /**
1296
+ @deprecated since 2.0.10. Use channelSelectionOptions.
1297
+
1298
+ Enable or disable channel selector (only available for channel groups with more than one channel)
1299
+ */
1775
1300
  channelSelectionEnabled?: boolean;
1301
+ /**
1302
+ * Customize and enable channel selection interfaces.
1303
+ * Will be disabled if not using a channel group with more than one channel.
1304
+ */
1776
1305
  channelSelectionOptions?: {
1777
1306
  barButton?: {
1778
1307
  enabled?: boolean;
@@ -1782,21 +1311,58 @@ export interface PlayerOptions {
1782
1311
  enabled?: boolean;
1783
1312
  };
1784
1313
  };
1314
+ /**
1315
+ * Enable or disable rendition (bitrate) selector
1316
+ */
1785
1317
  renditionLevelsEnabled?: boolean;
1318
+ /**
1319
+ * Enable or disable language selector
1320
+ */
1786
1321
  languagesButtonEnabled?: boolean;
1322
+ /**
1323
+ * Enable or disable one-to-one (real size, not filling the entire container)
1324
+ */
1787
1325
  oneToOneButtonEnabled?: boolean;
1326
+ /**
1327
+ * Hide controls after this time in ms
1328
+ */
1788
1329
  hideTimeout?: number;
1330
+ /**
1331
+ * Background image for Cast Receiver
1332
+ */
1789
1333
  castBackground?: string;
1334
+ /**
1335
+ * For custom Cast Receivers, enter your Application Id here
1336
+ */
1790
1337
  castReceiverApplicationId?: string;
1338
+ /**
1339
+ * How often we should refresh thumbnails if used in MS. Default is 60000.
1340
+ */
1791
1341
  thumbnailUpdateInterval?: number;
1792
- }
1793
- export interface PlayerState {
1794
- isBuffering: boolean;
1795
- fullscreen: Fullscreen;
1796
- lastInteractionTime: number;
1797
- }
1342
+ /**
1343
+ * Enable or disable the pause and play button
1344
+ */
1345
+ pauseButtonEnabled?: boolean;
1346
+ }
1347
+ /**
1348
+ * Represents a Vindral player
1349
+ *
1350
+ * ```typescript
1351
+ * // minimal configuration of a Vindral web player
1352
+ * const instance = new Player({
1353
+ * url: "https://lb.cdn.vindral.com",
1354
+ * channelId: "vindral_demo1_ci_099ee1fa-80f3-455e-aa23-3d184e93e04f",
1355
+ * })
1356
+ * ```
1357
+ */
1798
1358
  export declare class Player {
1359
+ /**
1360
+ * The Vindral instance
1361
+ */
1799
1362
  readonly core: Vindral;
1363
+ /**
1364
+ * The CastSender instance
1365
+ */
1800
1366
  readonly castSender: CastSender;
1801
1367
  private options;
1802
1368
  private state;
@@ -1806,16 +1372,31 @@ export declare class Player {
1806
1372
  private channelSelectionList?;
1807
1373
  private bar;
1808
1374
  private stateInterval?;
1809
- private showBufferingTimeout?;
1810
1375
  private thumbnailModule?;
1376
+ private browser;
1811
1377
  constructor(optionsOrInstance: Options | Vindral, playerOptions?: PlayerOptions);
1378
+ /**
1379
+ * Fully unloads the Player and the Vindral instance.
1380
+ */
1812
1381
  unload: () => void;
1382
+ /**
1383
+ * Attaches the Player root node to a container
1384
+ */
1813
1385
  attach: (container: HTMLElement) => void;
1814
1386
  private setupCastSender;
1815
1387
  private onMouseMove;
1816
1388
  private onClick;
1817
1389
  private togglePip;
1818
1390
  private toggleFullscreen;
1391
+ /**
1392
+ * This method is used to trigger play permissions without activating audio.
1393
+ * Setting muted to false will implicitly activate play permissions, and then we instantly revert the mute state to whatever it is.
1394
+ * The reason we need this method to begin with is because there are edge cases on iOS where users can enter fullscreen and then try to activate audio
1395
+ * from there. But that click (through a native ui control) to activate the audio only gives the media element permissions to play audio, but not the audio context.
1396
+ * By triggering this on any button that can lead to fullscreen mode we can ensure that the audio context already has permissions to play before the user might decide
1397
+ * to activate it through the native ui.
1398
+ */
1399
+ private triggerUserInput;
1819
1400
  private toggleOneToOne;
1820
1401
  private enterFullscreen;
1821
1402
  private exitFullscreen;