@vindral/web-sdk 2.3.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -1,83 +1,5 @@
1
- declare type Type = "audio" | "video";
2
- interface BaseCodedSample {
3
- channelId: string;
4
- isSync: boolean;
5
- renditionId: number;
6
- timestamp: number;
7
- timescale: number;
8
- duration: number;
9
- data: ArrayBuffer;
10
- compositionTimeOffset?: number;
11
- }
12
- interface CodedAudioSample extends BaseCodedSample {
13
- type: "audio";
14
- codec: AudioCodec;
15
- channels: number;
16
- sampleRate: number;
17
- language?: string;
18
- }
19
- interface CodedVideoSample extends BaseCodedSample {
20
- type: "video";
21
- codec: VideoCodec;
22
- width: number;
23
- height: number;
24
- buffer?: ArrayBuffer;
25
- }
26
- declare type CodedSample = CodedAudioSample | CodedVideoSample;
27
1
  declare type AudioCodec = "aac" | "opus" | "mp3";
28
2
  declare type VideoCodec = "h264" | "av1";
29
- declare type Codec = VideoCodec | AudioCodec;
30
- interface DecodedSampleStatistics {
31
- decodeTime: number;
32
- transportTimeFromWorker: number;
33
- transportTimeToWorker: number;
34
- samplesInBatch: number;
35
- }
36
- interface DecodedVideoSample {
37
- type: "video";
38
- colorPrimaries: "bt709";
39
- channelId: string;
40
- isSync: boolean;
41
- renditionId: number;
42
- timestamp: number;
43
- timescale: number;
44
- duration: number;
45
- width: number;
46
- height: number;
47
- codedWidth: number;
48
- codedHeight: number;
49
- format: "yuv420";
50
- data: [
51
- Uint8Array,
52
- Uint8Array,
53
- Uint8Array
54
- ];
55
- buffer: ArrayBuffer;
56
- statistics?: DecodedSampleStatistics;
57
- }
58
- interface DecodedAudioSample {
59
- type: "audio";
60
- channels: number;
61
- channelId: string;
62
- isSync: boolean;
63
- renditionId: number;
64
- timestamp: number;
65
- timescale: number;
66
- duration: number;
67
- sampleRate: number;
68
- format: "f32";
69
- data: Float32Array;
70
- statistics?: DecodedSampleStatistics;
71
- }
72
- declare type DecodedSample = DecodedVideoSample | DecodedAudioSample;
73
- interface InitSegment {
74
- type: Type;
75
- codec: Codec;
76
- channelId: string;
77
- renditionId: number;
78
- timescale: number;
79
- data: ArrayBuffer;
80
- }
81
3
  declare type MatchingKeys<TRecord, TMatch, K extends keyof TRecord = keyof TRecord> = K extends (TRecord[K] extends TMatch ? K : never) ? K : never;
82
4
  declare type VoidKeys<Record> = MatchingKeys<Record, void>;
83
5
  declare type EventListenerReturnType = (() => void) | void;
@@ -87,62 +9,314 @@ declare class Emitter<TEvents, TEmits = TEvents, ArgLessEvents extends VoidKeys<
87
9
  emit<T extends ArgEmits>(eventName: T, args: TEmits[T]): void;
88
10
  off<T extends ArgLessEvents>(eventName: T, fn: () => EventListenerReturnType): void;
89
11
  off<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => EventListenerReturnType): void;
12
+ /**
13
+ * Add an event listener to `eventName`
14
+ *
15
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
16
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
17
+ */
90
18
  on<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
91
19
  on<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
20
+ /**
21
+ * Add an event listener to `eventName` that will be called once only
22
+ *
23
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
24
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
25
+ */
92
26
  once<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
93
27
  once<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
94
28
  reset(): void;
95
29
  private add;
96
30
  }
97
- declare type FilterFunc<T> = (item: T) => boolean;
98
- declare class Fifo<T> {
99
- readonly maxSize: number;
100
- private values;
101
- constructor(maxSize?: number);
102
- push: (value: T) => void;
103
- clear: () => void;
104
- pop: () => T | undefined;
105
- peekFirst: () => T | undefined;
106
- peekLast: () => T | undefined;
107
- isFull: () => boolean;
108
- isEmpty: () => boolean;
109
- items: () => ReadonlyArray<T>;
110
- filterPop: (filter: FilterFunc<T>) => void;
111
- }
112
- declare type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<T>;
113
31
  interface MinMaxAverage {
114
32
  average: number;
115
33
  max: number;
116
34
  min: number;
117
35
  }
118
- interface Regression {
119
- slope: number;
120
- intercept: number;
121
- r2: number;
122
- predict(x: number): number;
36
+ interface RenditionProps {
37
+ id: number;
38
+ bitRate: number;
39
+ codecString?: string;
40
+ language?: string;
41
+ meta?: Record<string, string>;
42
+ }
43
+ interface VideoRenditionProps {
44
+ codec: VideoCodec;
45
+ frameRate: [
46
+ number,
47
+ number
48
+ ];
49
+ width: number;
50
+ height: number;
51
+ }
52
+ interface AudioRenditionProps {
53
+ codec: AudioCodec;
54
+ channels: number;
55
+ sampleRate: number;
56
+ }
57
+ declare type VideoRendition = VideoRenditionProps & RenditionProps;
58
+ declare type AudioRendition = AudioRenditionProps & RenditionProps;
59
+ declare type Rendition = VideoRendition | AudioRendition;
60
+ interface Size {
61
+ width: number;
62
+ height: number;
63
+ }
64
+ interface VideoConstraint {
65
+ width: number;
66
+ height: number;
67
+ bitRate: number;
68
+ codec?: VideoCodec;
69
+ codecString?: string;
70
+ }
71
+ /**
72
+ * Channel
73
+ */
74
+ export interface Channel {
75
+ /**
76
+ * Channel ID for the channel
77
+ */
78
+ channelId: string;
79
+ /**
80
+ * Display name
81
+ */
82
+ name: string;
83
+ /**
84
+ * Indicates whether there is an incoming source feed for the channel
85
+ */
86
+ isLive: boolean;
87
+ /**
88
+ * URLs to fetch thumbnail from
89
+ */
90
+ thumbnailUrls: string[];
91
+ }
92
+ interface ClientOverrides {
93
+ maxVideoBitRate?: number;
94
+ minBufferTime?: number;
95
+ maxBufferTime?: number;
96
+ burstEnabled?: boolean;
97
+ }
98
+ interface ChannelWithRenditionsAndOverrides extends Channel {
99
+ renditions: Rendition[];
100
+ overrides?: ClientOverrides;
101
+ }
102
+ interface ConnectOptions {
103
+ channelGroupId?: string;
104
+ channelId: string;
105
+ }
106
+ interface Telemetry {
107
+ url: string;
108
+ probability?: number;
109
+ includeErrors?: boolean;
110
+ includeEvents?: boolean;
111
+ includeStats?: boolean;
112
+ maxRetries?: number;
113
+ maxErrorReports?: number;
114
+ interval?: number;
115
+ }
116
+ interface ConnectResponse {
117
+ logsUrl?: string;
118
+ statsUrl?: string;
119
+ telemetry?: Telemetry;
120
+ channels: ChannelWithRenditionsAndOverrides[];
121
+ edges: string[];
122
+ }
123
+ /**
124
+ * ApiClientOptions
125
+ */
126
+ export interface ApiClientOptions {
127
+ /**
128
+ * String representing the URL to the public CDN API.
129
+ */
130
+ publicEndpoint: string;
131
+ /**
132
+ * Function that should return a string containing a signed authentication token.
133
+ */
134
+ tokenFactory?: AuthorizationTokenFactory;
135
+ }
136
+ /**
137
+ * Represents what authorization that will be requested.
138
+ */
139
+ export interface AuthorizationContext {
140
+ /**
141
+ * The channelGroupId that might need authorization.
142
+ */
143
+ channelGroupId?: string;
144
+ /**
145
+ * The channelId that might need authorization.
146
+ */
147
+ channelId?: string;
148
+ }
149
+ /**
150
+ * AuthorizationTokenFactory
151
+ */
152
+ export declare type AuthorizationTokenFactory = (context: AuthorizationContext) => string | undefined;
153
+ /**
154
+ * Convenience class to call the public available endpoints of the Vindral Live CDN.
155
+ */
156
+ export declare class ApiClient {
157
+ private baseUrl;
158
+ private tokenFactory?;
159
+ constructor(options: ApiClientOptions);
160
+ /**
161
+ * Returns everything needed to setup the connection of Vindral instance.
162
+ */
163
+ connect(options: ConnectOptions): Promise<ConnectResponse>;
164
+ /**
165
+ * Fetches information regarding a single channel.
166
+ *
167
+ * @param channelId the channel to fetch
168
+ * @returns a [[Channel]] containing information about the requested channel.
169
+ */
170
+ getChannel(channelId: string): Promise<Channel>;
171
+ /**
172
+ * Fetches channels within a channel group
173
+ *
174
+ * Note: The returned list includes inactive channels - check isLive to filter out only active channels
175
+ *
176
+ * @param channelGroup the channel group to fetch channels from
177
+ * @returns an array of [[Channel]] that belong to the channel group
178
+ */
179
+ getChannels(channelGroupId: string): Promise<Channel[]>;
180
+ private getHeaders;
181
+ private getAuthToken;
182
+ private toChannels;
183
+ private toChannel;
123
184
  }
185
+ /**
186
+ * Represents a timed metadata event
187
+ */
124
188
  export interface Metadata {
189
+ /**
190
+ * The raw string content as it was ingested (if using JSON, it needs to be parsed on your end)
191
+ */
125
192
  content: string;
193
+ /**
194
+ * Timestamp in ms
195
+ */
126
196
  timestamp: number;
127
197
  }
128
- export interface TimeRange {
129
- start: number;
130
- end: number;
198
+ /**
199
+ * Available events to listen to
200
+ */
201
+ export interface CastSenderEvents {
202
+ /**
203
+ * When a connection has been established with a CastReceiver
204
+ */
205
+ ["connected"]: void;
206
+ /**
207
+ * When a previous session has been resumed
208
+ */
209
+ ["resumed"]: void;
210
+ /**
211
+ * When a CastReceiver has lost or stopped a connection
212
+ */
213
+ ["disconnected"]: void;
214
+ /**
215
+ * When a connection attempt was initiated unsuccessfully
216
+ */
217
+ ["failed"]: void;
218
+ /**
219
+ * When the remote connection emits a metadata event
220
+ */
221
+ ["metadata"]: Metadata;
222
+ /**
223
+ * When the remote connection receives a server wallclock time event
224
+ */
225
+ ["server wallclock time"]: number;
131
226
  }
132
- declare type Timestamp = number;
133
- export interface PerLevelStats {
134
- upgradesFromLevel: Timestamp[];
135
- downgradesFromLevel: Timestamp[];
136
- bufferingRanges: PartialBy<TimeRange, "end">[];
137
- activeRanges: PartialBy<TimeRange, "end">[];
138
- decodeRate: number;
227
+ /**
228
+ * Used for initializing the CastSender
229
+ */
230
+ export interface CastConfig {
231
+ /**
232
+ * The [Vindral Options](./Options) to use for the Cast Receiver
233
+ */
234
+ options: Options;
235
+ /**
236
+ * URL to a background image.
237
+ * Example: "https://via.placeholder.com/256x144"
238
+ */
239
+ background?: string;
240
+ /**
241
+ * Override this if you have your own custom receiver
242
+ */
243
+ receiverApplicationId?: string;
139
244
  }
140
- export interface Metrics {
141
- levels: {
142
- [key: string]: PerLevelStats;
143
- };
144
- general: PerLevelStats;
145
- bufferFullness: number;
245
+ /**
246
+ * CastSender handles initiation of and communication with the Google Cast Receiver
247
+ */
248
+ export declare class CastSender extends Emitter<CastSenderEvents> {
249
+ private state;
250
+ private config;
251
+ private unloaded;
252
+ constructor(config: CastConfig);
253
+ /**
254
+ * True if the instance is casting right now
255
+ */
256
+ get casting(): boolean;
257
+ /**
258
+ * The current volume
259
+ */
260
+ get volume(): number;
261
+ /**
262
+ * Set the current volume. Setting this to zero is equivalent to muting the video
263
+ */
264
+ set volume(volume: number);
265
+ /**
266
+ * The current language
267
+ */
268
+ get language(): string | undefined;
269
+ /**
270
+ * Set the current language
271
+ */
272
+ set language(language: string | undefined);
273
+ /**
274
+ * The current channelId
275
+ */
276
+ get channelId(): string;
277
+ /**
278
+ * Set the current channelId
279
+ */
280
+ set channelId(channelId: string);
281
+ /**
282
+ * Update authentication token on an already established and authenticated connection
283
+ */
284
+ updateAuthenticationToken: (token: string) => void;
285
+ /**
286
+ * Fully unloads the instance. This disconnects the current listener but lets the
287
+ * cast session continue on the receiving device
288
+ */
289
+ unload: () => void;
290
+ /**
291
+ * Initiates the CastSender.
292
+ * Will reject if Cast is not available on the device or the network.
293
+ */
294
+ init: () => Promise<void>;
295
+ /**
296
+ * Requests a session. It will open the native cast receiver chooser dialog
297
+ */
298
+ start: () => Promise<void>;
299
+ /**
300
+ * Stops a session. It will stop playback on device as well.
301
+ */
302
+ stop: () => void;
303
+ /**
304
+ * Returns a string representing the name of the Cast receiver device or undefined if no receiver exists
305
+ */
306
+ getReceiverName: () => string | undefined;
307
+ private onGCastApiAvailable;
308
+ private send;
309
+ private onMessage;
310
+ private onSessionStarted;
311
+ private onSessionStateChanged;
312
+ private getInstance;
313
+ private getSession;
314
+ private castLibrariesAdded;
315
+ private verifyCastLibraries;
316
+ }
317
+ interface TimeRange {
318
+ start: number;
319
+ end: number;
146
320
  }
147
321
  export declare enum Level {
148
322
  TRACE = "trace",
@@ -152,204 +326,28 @@ export declare enum Level {
152
326
  ERROR = "error",
153
327
  CRITICAL = "critical"
154
328
  }
155
- declare type Meta = Record<string, unknown>;
156
- interface Logger {
157
- getLogger(category: string): Logger;
158
- getFullCategory(): string[];
159
- category(): string;
160
- trace(message: string, meta?: Meta): void;
161
- debug(message: string, meta?: Meta): void;
162
- info(message: string, meta?: Meta): void;
163
- warn(message: string, meta?: Meta): void;
164
- error(message: string, meta?: Meta): void;
165
- critical(message: string, meta?: Meta): void;
166
- }
167
- interface LoggerOptions {
168
- category: string;
169
- onLog: (instance: LoggerInstance, level: Level, message: string, meta?: Meta) => void;
170
- parent?: Logger;
171
- }
172
- declare class LoggerInstance implements Logger {
173
- private options;
174
- constructor(options: LoggerOptions);
175
- getLogger: (category: string) => Logger;
176
- getFullCategory: () => string[];
177
- category: () => string;
178
- trace: (message: string, meta?: Meta | undefined) => void;
179
- debug: (message: string, meta?: Meta | undefined) => void;
180
- info: (message: string, meta?: Meta | undefined) => void;
181
- warn: (message: string, meta?: Meta | undefined) => void;
182
- error: (message: string, meta?: Meta | undefined) => void;
183
- critical: (message: string, meta?: Meta | undefined) => void;
184
- }
185
- export declare type PlayInitiator = "user input" | "programatically";
186
- export interface PlaybackSource {
187
- volume: number;
188
- muted: boolean;
189
- currentTime: number;
190
- playbackRate?: number;
191
- isActivated: boolean;
192
- readonly seekTime: number;
193
- readonly paused: boolean;
194
- readonly isSeeking: boolean;
195
- play(initiator: PlayInitiator): Promise<void>;
196
- load?: () => void;
197
- unload?: () => void;
198
- }
199
- interface MediaElementOptions {
200
- autoplay: boolean;
201
- muted: boolean;
202
- type: "audio" | "video";
203
- logger: Logger;
204
- poster?: string;
205
- }
206
329
  interface NeedsUserInputContext {
330
+ /**
331
+ * True if user input is needed for audio
332
+ */
207
333
  forAudio: boolean;
334
+ /**
335
+ * True if user input is needed for video
336
+ */
208
337
  forVideo: boolean;
209
338
  }
210
- interface MediaElementEvents {
211
- ["needs user input"]: NeedsUserInputContext;
212
- ["buffer state"]: Readonly<BufferState>;
213
- }
214
- declare class MediaElement extends Emitter<MediaElementEvents> {
215
- readonly element: HTMLMediaElement;
216
- private logger;
217
- private seekTimes;
218
- private seekStartTime?;
219
- private _userProvidedMuted;
220
- private timers;
221
- private _userHasProvidedInput;
222
- isActivated: boolean;
223
- constructor({ type, autoplay, muted, logger, poster }: MediaElementOptions);
224
- attach: (container: HTMLElement) => void;
225
- get seekTime(): number;
226
- get isSeeking(): boolean;
227
- get currentTime(): number;
228
- set currentTime(value: number);
229
- get playbackRate(): number;
230
- set playbackRate(rate: number);
231
- get volume(): number;
232
- set volume(volume: number);
233
- get muted(): boolean;
234
- set muted(muted: boolean);
235
- get userHasProvidedInput(): boolean;
236
- get paused(): boolean;
237
- load: () => void;
238
- unload: () => void;
239
- getPlaybackRate: () => number;
240
- getPlaybackState: () => PlaybackState;
241
- setPlaybackRate: (rate: number) => void;
242
- getBuffer: () => TimeRange[];
243
- play: (initiator: PlayInitiator) => Promise<void>;
244
- private _play;
245
- private onEvent;
246
- private onBufferStateChange;
247
- private onSeekStart;
248
- private onSeekEnd;
249
- }
250
- declare type PlaybackState = "buffering" | "playing";
339
+ declare type PlaybackState = "buffering" | "playing" | "paused";
251
340
  declare type BufferStateEvent = "filled" | "drained";
252
- declare type BufferState = {
253
- buffered: ReadonlyArray<TimeRange>;
254
- currentTime: number;
255
- playbackState: PlaybackState;
256
- };
257
- interface PlaybackModuleListeners {
258
- ["buffer state"]: Readonly<BufferState>;
259
- ["needs user input"]: NeedsUserInputContext;
260
- }
261
- interface PlaybackModuleEvents {
262
- ["buffer state event"]: Readonly<BufferStateEvent>;
263
- ["playback state"]: Readonly<PlaybackState>;
264
- ["buffer fullness"]: number;
265
- }
266
- interface ClockSource {
267
- readonly currentTime: number;
268
- }
269
341
  interface PlaybackModuleStatistics {
270
342
  bufferTime: number;
271
343
  needsInputForAudioCount: number;
272
344
  needsInputForVideoCount: number;
273
345
  }
274
- declare class PlaybackModule {
275
- private emitter;
276
- private logger;
277
- private clockSource;
278
- private state;
279
- private bufferFullness;
280
- private targetBufferTime;
281
- private lastBufferStateEvent;
282
- private firstFrameTime?;
283
- private needsInputForAudioCount;
284
- private needsInputForVideoCount;
285
- constructor(emitter: Emitter<PlaybackModuleListeners, PlaybackModuleEvents>, logger: Logger, clockSource: ClockSource, targetBufferTime: number);
286
- static create: (emitter: Emitter<PlaybackModuleListeners, PlaybackModuleEvents>, logger: Logger, clockSource: ClockSource, targetBufferTime: number) => PlaybackModule;
287
- unload: () => void;
288
- setTargetBufferTime: (bufferTime: number) => void;
289
- getTargetBufferTime: () => number;
290
- getBufferFullness: () => number;
291
- getLastBufferStateEvent: () => BufferStateEvent;
292
- getState: () => PlaybackState;
293
- getFirstFrameTime: () => number | undefined;
294
- getStatistics: () => PlaybackModuleStatistics;
295
- private onBufferedStateChanged;
296
- private onNeedsUserInput;
297
- }
298
- declare type Direction = "upgrade" | "downgrade" | "double downgrade" | "reconnect";
299
- interface QualityOfServiceConfig {
300
- cooldownTime: number;
301
- maxBufferingEvents: {
302
- last10Seconds: number;
303
- };
304
- maxBufferingRatio: {
305
- last10Seconds: number;
306
- };
307
- maxRecentDowngradesCount: number;
308
- maxDowngradeLookbackMs: number;
309
- minTimeSpentPlaying: {
310
- factor: number;
311
- ratio: number;
312
- };
313
- maxCooldownRatio: number;
314
- }
315
- interface AdaptivityEvents {
316
- ["adapt level"]: Readonly<Direction>;
317
- }
318
346
  interface AdaptivityStatistics {
319
347
  isAbrEnabled: boolean;
320
348
  }
321
- interface QualityOfServiceSource {
322
- getMetrics(): Metrics;
323
- getLevelStats(level: RenditionLevel): PerLevelStats | undefined;
324
- getBufferFullnessRegression(): Regression | undefined;
325
- timeSpentBuffering(): number;
326
- timeActive(): number;
327
- timeSpentBufferingLast(last: number): number;
328
- timeSpentActiveLast(last: number): number;
329
- timeSpentPlayingInAtLeastLevelRatio(level: RenditionLevel): number;
330
- }
331
- declare class AdaptivityModule {
332
- private qualityOfServiceSource;
333
- private config;
334
- private emitter;
335
- private logger;
336
- private isSuspended;
337
- private lastAdaptTime;
338
- isEnabled: boolean;
339
- constructor(emitter: Emitter<AdaptivityEvents>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, config: Partial<QualityOfServiceConfig>);
340
- static create: (emitter: Emitter<AdaptivityEvents>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, config?: Partial<QualityOfServiceConfig>) => AdaptivityModule;
341
- load: () => void;
342
- unload: () => void;
343
- suspend: () => void;
344
- unsuspend: () => void;
345
- reset: (extraGraceTimeMs?: number) => void;
346
- getStatistics: () => AdaptivityStatistics;
347
- isQoSOk: (renditionLevel: RenditionLevel) => boolean;
348
- private onBufferState;
349
- private onAdaptedLevel;
350
- private adaptLevel;
351
- private tooMuchTimeBuffering;
352
- private tooManyBufferingEvents;
349
+ interface BufferTimeStatistics {
350
+ bufferTimeAdjustmentCount: number;
353
351
  }
354
352
  interface VindralErrorProps {
355
353
  isFatal: boolean;
@@ -357,165 +355,37 @@ interface VindralErrorProps {
357
355
  code: string;
358
356
  source?: Error | MediaError;
359
357
  }
360
- export declare const CONNECTION_FAILED_CODE = "connection_failed";
361
- export declare const CONNECTION_FAILED_AFTER_RETRIES_CODE = "connection_failed_will_not_attempt_again";
362
- export declare const AUTHENTICATION_FAILED_CODE = "authentication_error";
363
- export declare const AUTHENTICATION_EXPIRED_CODE = "authentication_expired";
364
- export declare const CHANNEL_NOT_FOUND_CODE = "channel_not_found";
365
- export declare const NO_INCOMING_DATA = "no_incoming_data_error";
366
- export declare const INACTIVITY_CODE = "connection_inactivity";
367
- export declare const MISSING_INIT_SEGMENT = "missing_init_segment";
368
- export declare const NO_TRACK_CONTEXT = "no_track_context";
369
- export declare const DISCONNECTED_BY_EDGE = "disconnected_by_edge";
370
- export declare const DECODER_ERROR = "decoder_error";
371
- export declare const AUDIO_PLAYER_ERROR = "audio_player_error";
372
- export declare const MEDIA_SOURCE_ERROR = "media_source_error";
373
- export declare const WEBGL_CONTEXT_LOST_ERROR = "webgl_context_lost_error";
374
- export declare type ErrorType = "internal" | "external";
358
+ declare type ErrorType = "internal" | "external";
359
+ /**
360
+ * Represents a vindral error - all errors emitted from the Vindral instance inherit from this class.
361
+ */
375
362
  export declare class VindralError extends Error {
376
363
  private props;
377
364
  private extra;
378
365
  constructor(message: string, props: VindralErrorProps, extra?: {});
366
+ /**
367
+ * The error code is a stable string that represents the error type - this should be treated as an
368
+ * opaque string that can be used as a key for looking up localized strings for displaying error messages.
369
+ * @returns the error code
370
+ */
379
371
  code: () => string;
372
+ /**
373
+ * Indicates whether the error is fatal - if it is that means the Vindral instance will be unloaded because of this error.
374
+ */
380
375
  isFatal: () => boolean;
376
+ /**
377
+ * The underlying error that caused the Vindral error
378
+ * @returns the underlying error
379
+ */
381
380
  source: () => Error | MediaError | undefined;
382
381
  type: () => ErrorType;
382
+ /**
383
+ * @returns a stringifiable represenation of the error
384
+ */
383
385
  toStringifiable: () => Record<string, unknown>;
384
386
  }
385
- export declare const mediaElementError: (isFatal: boolean, source: MediaError) => VindralError;
386
- export declare const decoderError: (isFatal: boolean, source: Error) => VindralError;
387
- export declare const audioPlayerError: (isFatal: boolean, source: Error) => VindralError;
388
- export declare const missingInitSegmentError: (renditionId: number, channelId: string) => VindralError;
389
- export declare const mediaSourceError: (isFatal: boolean, source: Error) => VindralError;
390
- export declare const noTrackContextError: (type: "audio" | "video", renditionId: number, channelId: string) => VindralError;
391
- export declare const audioContextTimeoutError: () => VindralError;
392
- export declare const disconnectedFromEdgeServerError: (source?: VindralError | undefined) => VindralError;
393
- export declare const authenticationError: (source?: Error | undefined) => VindralError;
394
- export declare const authenticationExpiredError: (source?: Error | undefined) => VindralError;
395
- export declare const inactivityError: (type: ErrorType, source?: Error | undefined) => VindralError;
396
- export declare const channelNotFoundError: (type: ErrorType, source?: Error | undefined) => VindralError;
397
- export declare const noIncomingDataError: (source?: Error | undefined) => VindralError;
398
- export declare const unableToConnectError: (source?: Error | undefined) => VindralError;
399
- export declare const unableToConnectAfterRetriesError: () => VindralError;
400
- export declare const webglContextError: (source: Error) => VindralError;
401
- interface AudioPlayerModuleListeners {
402
- ["decoded frame"]: Readonly<DecodedSample>;
403
- }
404
- interface AudioPlayerModuleEvents {
405
- ["needs user input"]: NeedsUserInputContext;
406
- ["error"]: VindralError;
407
- }
408
- interface ClockSource {
409
- readonly currentTime: number;
410
- }
411
- declare class AudioPlayerModule {
412
- private logger;
413
- private emitter;
414
- private unmuter;
415
- private audio?;
416
- private gainNode?;
417
- private _volume;
418
- private _userProvidedMuted;
419
- private _muted;
420
- private startTime;
421
- private samples;
422
- private preInitSampleQueue;
423
- private sampleRate;
424
- private channels;
425
- private index;
426
- private clockSource;
427
- private clockDelta?;
428
- private startTimeIsInvalidated;
429
- private lastSampleTimestamp;
430
- isActivated: boolean;
431
- get volume(): number;
432
- set volume(volume: number);
433
- get seekTime(): number;
434
- get isSeeking(): boolean;
435
- get muted(): boolean;
436
- set muted(muted: boolean);
437
- get paused(): boolean;
438
- get currentTime(): number;
439
- set currentTime(currentTime: number);
440
- constructor(emitter: Emitter<AudioPlayerModuleListeners, AudioPlayerModuleEvents>, logger: Logger, clockSource: ClockSource, muted: boolean);
441
- unload: () => Promise<void>;
442
- suspend: () => void;
443
- unsuspend: () => void;
444
- static create: (emitter: Emitter<AudioPlayerModuleListeners, AudioPlayerModuleEvents>, logger: Logger, clockSource: ClockSource, muted: boolean) => AudioPlayerModule;
445
- private flush;
446
- private onDecodedFrame;
447
- play: () => Promise<void>;
448
- private resume;
449
- private getAudioContext;
450
- private setupContext;
451
- private setGain;
452
- }
453
- interface BufferTimeConfig {
454
- minBufferTime: number;
455
- maxBufferTime: number;
456
- cooldownTime: number;
457
- maxBufferingEvents: {
458
- last30Seconds: number;
459
- };
460
- maxBufferingRatio: {
461
- last30Seconds: number;
462
- };
463
- }
464
- interface BufferTimeListeners {
465
- ["buffer state"]: Readonly<BufferState>;
466
- }
467
- interface BufferTimeStatistics {
468
- bufferTimeAdjustmentCount: number;
469
- }
470
- interface TargetBufferTimeTarget {
471
- targetBufferTime: number;
472
- }
473
- interface QualityOfServiceSource {
474
- bufferingEventsLast(last: number): number;
475
- timeSpentBufferingLast(last: number): number;
476
- timeSpentActiveLast(last: number): number;
477
- }
478
- declare class BufferTimeModule {
479
- private static BUFFER_TIME_STEP_SIZE;
480
- private qualityOfServiceSource;
481
- private targetBufferTimeTarget;
482
- private config;
483
- private emitter;
484
- private logger;
485
- private isSuspended;
486
- private lastIncreaseTime;
487
- private bufferTimeAdjustmentCount;
488
- constructor(emitter: Emitter<BufferTimeListeners, unknown>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, targetBufferTimeTarget: TargetBufferTimeTarget, config: Partial<BufferTimeConfig>);
489
- static create: (emitter: Emitter<BufferTimeListeners, unknown>, logger: Logger, qualityOfServiceSource: QualityOfServiceSource, targetBufferTimeTarget: TargetBufferTimeTarget, config?: Partial<BufferTimeConfig>) => BufferTimeModule;
490
- updateConfig(config: Partial<BufferTimeConfig>): void;
491
- load: () => void;
492
- unload: () => void;
493
- suspend: () => void;
494
- unsuspend: () => void;
495
- reset: () => void;
496
- getStatistics: () => BufferTimeStatistics;
497
- private onBufferState;
498
- }
499
387
  declare type State = "connected" | "disconnected" | "connecting";
500
388
  declare type ContextSwitchState = "completed" | "started";
501
- interface ConnectionModuleListeners {
502
- ["send signal"]: Readonly<string>;
503
- ["disconnect"]: void;
504
- ["reconnect"]: string;
505
- }
506
- interface ConnectionModuleEvents {
507
- ["received signal"]: Readonly<Signal>;
508
- ["received data"]: Readonly<ArrayBuffer>;
509
- ["connection state"]: Readonly<State>;
510
- ["rtt"]: number;
511
- ["error"]: Readonly<VindralError>;
512
- ["context switch complete"]: Readonly<void>;
513
- ["context switch started"]: Readonly<void>;
514
- }
515
- interface ConnectOptions {
516
- connectHandler: () => Promise<string>;
517
- reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
518
- }
519
389
  interface ConnectionStatistics {
520
390
  rtt: MinMaxAverage;
521
391
  estimatedBandwidth: number;
@@ -523,88 +393,14 @@ interface ConnectionStatistics {
523
393
  connectCount: number;
524
394
  connectionAttemptCount: number;
525
395
  }
526
- declare class ConnectionModule {
527
- private static PING_INTERVAL;
528
- private static MAX_MISSED_PINGS;
529
- private static TLS_ROUNDTRIPS;
530
- private timers;
531
- private emitter;
532
- private transport?;
533
- private logger;
534
- private options;
535
- private rtts;
536
- private lastPingSentTime;
537
- private isPingInFlight;
538
- private connectCount;
539
- private _firstConnectionTime?;
540
- private _lastConnectionTime?;
541
- private missedPings;
542
- private contextSwitchesInProgress;
543
- private contextSwitchesCompleted;
544
- private buffer;
545
- private constructor();
546
- static create: (emitter: Emitter<ConnectionModuleListeners, ConnectionModuleEvents>, logger: Logger, options: ConnectOptions) => ConnectionModule;
547
- unload: () => void;
548
- suspend: () => void;
549
- unsuspend: () => void;
550
- get rtt(): number;
551
- get estimatedBandwidth(): number | undefined;
552
- get connectTime(): number | undefined;
553
- get firstConnectionTime(): number | undefined;
554
- get lastConnectionTime(): number | undefined;
555
- getState: () => State;
556
- sendSignal: (signal: Readonly<string>) => void;
557
- getStatistics: () => ConnectionStatistics;
558
- private onMessage;
559
- private handleMessage;
560
- connect: (connectionsCount: 1 | 2) => void;
561
- private onTransportChange;
562
- disconnect: (reason?: string) => void;
563
- reconnect: (reason: string) => void;
564
- private resetPingState;
565
- private pingCooldownExpired;
566
- private sendPing;
567
- }
568
- interface Size {
569
- width: number;
570
- height: number;
571
- }
572
- interface PictureInPictureSizeSource {
573
- getPictureInPictureSize(): Size | undefined;
574
- }
575
- export interface RenditionLevel {
396
+ interface RenditionLevel {
576
397
  audio?: AudioRendition;
577
398
  video?: VideoRendition;
578
399
  }
579
- declare type RenditionLevelChangedReason = "abr" | "manual";
580
- export interface RenditionLevelChanged {
581
- from?: RenditionLevel;
582
- to?: RenditionLevel;
583
- reason: RenditionLevelChangedReason;
584
- }
585
- export interface SubscriptionChange {
586
- readonly to: Readonly<Subscription>;
587
- readonly from: Readonly<Subscription>;
588
- }
589
- interface RenditionsModuleEvents {
590
- ["rendition level changed"]: Readonly<RenditionLevelChanged>;
591
- }
592
- interface RenditionsModuleListeners {
593
- ["subscription changed"]: Readonly<SubscriptionChange>;
594
- ["renditions"]: Readonly<RenditionsArgs>;
595
- }
596
- interface RenditionsArgs {
597
- renditions: ReadonlyArray<Rendition>;
598
- channelId: string;
599
- }
600
400
  interface RenditionLevel {
601
401
  audio?: AudioRendition;
602
402
  video?: VideoRendition;
603
403
  }
604
- interface SubscriptionSource {
605
- getTargetSubscription(): Readonly<Subscription>;
606
- getCurrentSubscription(): Readonly<Subscription>;
607
- }
608
404
  interface RenditionsModuleStatistics {
609
405
  videoRenditionId?: number;
610
406
  audioRenditionId?: number;
@@ -621,42 +417,6 @@ interface RenditionsModuleStatistics {
621
417
  ];
622
418
  renditionLevelChangeCount: number;
623
419
  }
624
- declare type ChannelId = string;
625
- declare class RenditionsModule {
626
- private renditions;
627
- private renditionLevels;
628
- private languages;
629
- private emitter;
630
- private subscriptionSource;
631
- private renditionLevelChangeCount;
632
- constructor(emitter: Emitter<RenditionsModuleListeners, RenditionsModuleEvents>, subscriptionSource: SubscriptionSource);
633
- unload: () => void;
634
- static create: (emitter: Emitter<RenditionsModuleListeners, RenditionsModuleEvents>, subscriptionSource: SubscriptionSource) => RenditionsModule;
635
- getRenditionLevels: (subscription?: Readonly<Subscription> | undefined) => ReadonlyArray<RenditionLevel>;
636
- getRenditionLevel: (subscription?: Readonly<Subscription> | undefined) => RenditionLevel | undefined;
637
- setRenditions: (channelId: ChannelId, renditions: Rendition[]) => void;
638
- getLanguages: () => ReadonlyArray<string>;
639
- getVideoRendition: (renditionId: number, channelId?: string) => Readonly<VideoRendition> | undefined;
640
- getAudioRenditions: (channelId: ChannelId) => Readonly<AudioRendition[]> | undefined;
641
- getAudioRendition: (renditionId: number, channelId?: string) => Readonly<AudioRendition> | undefined;
642
- getRendition: (renditionId: number, channelId?: string) => Readonly<Rendition> | undefined;
643
- getStatistics: () => RenditionsModuleStatistics;
644
- private onRenditions;
645
- private onSubscriptionChanged;
646
- private updateRenditionLevels;
647
- private createRenditionLevels;
648
- private getCurrentSubscription;
649
- }
650
- declare type DeepPartial<T> = {
651
- [P in keyof T]?: DeepPartial<T[P]>;
652
- };
653
- interface ConstraintCapModuleEvents {
654
- ["constraint cap changed"]: Readonly<ConstraintCap>;
655
- }
656
- interface ConstraintCapModuleListeners {
657
- ["enter picture in picture"]: Readonly<PictureInPictureSizeSource>;
658
- ["exit picture in picture"]: void;
659
- }
660
420
  interface VideoConstraintCap {
661
421
  width: number;
662
422
  height: number;
@@ -677,120 +437,13 @@ interface ConstraintCapStatistics {
677
437
  elementHeight: number;
678
438
  pixelRatio: number;
679
439
  }
680
- interface RenditionLevelSource {
681
- getRenditionLevels(): ReadonlyArray<RenditionLevel>;
682
- }
683
- declare class ConstraintCapModule {
684
- private static CHECK_SIZE_INTERVAL;
685
- private emitter;
686
- private timers;
687
- private pictureInPictureSource?;
688
- private element;
689
- private currentCap?;
690
- private userSpecifiedCap?;
691
- private renditionLevelSource;
692
- private resizeObserver?;
693
- private elementSize;
694
- private _sizeBasedResolutionCapEnabled;
695
- constructor(emitter: Emitter<ConstraintCapModuleListeners, ConstraintCapModuleEvents>, element: HTMLElement, renditionLevelSource: RenditionLevelSource, sizeBasedResolutionCapEnabled: boolean);
696
- unload: () => void;
697
- load: () => void;
698
- static create: (emitter: Emitter<ConstraintCapModuleListeners, ConstraintCapModuleEvents>, element: HTMLElement, renditionLevelSource: RenditionLevelSource, sizeBasedResolutionCapEnabled: boolean) => ConstraintCapModule;
699
- unsuspend: () => void;
700
- suspend: () => void;
701
- get sizeBasedResolutionCapEnabled(): boolean;
702
- set sizeBasedResolutionCapEnabled(enabled: boolean);
703
- setUserSpecifiedCap: (cap: DeepPartial<ConstraintCap>) => void;
704
- getUserSpecifiedCap: () => ConstraintCap | undefined;
705
- getCurrentConstraintCap: () => ConstraintCap | undefined;
706
- getStatistics: () => ConstraintCapStatistics;
707
- constrainSubscription: (subscription: Subscription) => Subscription;
708
- private onResize;
709
- private evaluateConstraintCap;
710
- private enterPictureInPicture;
711
- private exitPictureInPicture;
712
- }
713
- declare class ArrayBufferPool {
714
- private maxSize;
715
- private size;
716
- private buffers;
717
- private _borrowedBuffers;
718
- constructor(maxSize: number);
719
- get bufferSize(): number;
720
- get poolSize(): number;
721
- get borrowedBuffers(): number;
722
- get: (size: number) => ArrayBuffer;
723
- reclaim: (buffer: ArrayBuffer) => void;
724
- }
725
- declare type DecoderParamType<Type extends "audio" | "video"> = Type extends "video" ? CodedVideoSample : CodedAudioSample;
726
- declare type DecoderReturnType<Type extends "audio" | "video"> = Type extends "video" ? DecodedVideoSample : DecodedAudioSample;
727
- interface DecoderContext<Type extends "audio" | "video"> {
728
- unload(): void;
729
- initSegment(initSegment: Readonly<InitSegment>): void;
730
- enqueue(sample: Readonly<DecoderParamType<Type>>): void;
731
- take(): DecoderReturnType<Type> | undefined;
732
- error?: () => Error | undefined;
733
- }
734
- interface DecoderModuleEvents {
735
- ["video decode rate"]: number;
736
- ["decoded frame"]: Readonly<DecodedSample>;
737
- ["buffer state"]: Readonly<BufferState>;
738
- ["error"]: Readonly<VindralError>;
739
- }
740
- interface DecoderModuleListeners {
741
- ["init segment"]: Readonly<InitSegment>;
742
- ["coded sample"]: Readonly<CodedSample>;
743
- }
744
440
  interface DecoderStatistics {
745
441
  videoDecodeRate: number;
746
442
  videoDecodeTime: MinMaxAverage;
747
443
  audioDecodeTime: MinMaxAverage;
748
444
  videoTransportTime: MinMaxAverage;
749
445
  }
750
- interface ClockSource {
751
- readonly currentTime: number;
752
- }
753
- interface TrackContext {
754
- buffer: TimeRange[];
755
- decoderContext: DecoderContext<Type>;
756
- decodeTime: Fifo<number>;
757
- transportTime: Fifo<number>;
758
- sampleDuration: number;
759
- }
760
- declare class DecoderModule {
761
- private static BUFFER_GAP_THRESHOLD;
762
- private static DECODED_FRAMES_CHECK_INTERVAL;
763
- private static DECODE_RATE_INTERVAL;
764
- private timers;
765
- private logger;
766
- private emitter;
767
- private trackContexts;
768
- private clockSource;
769
- private isSuspended;
770
- constructor(emitter: Emitter<DecoderModuleListeners, DecoderModuleEvents>, logger: Logger, trackContexts: Map<Type, TrackContext>, clockSource: ClockSource);
771
- load: () => void;
772
- unload: () => void;
773
- suspend: () => void;
774
- unsuspend: () => void;
775
- static create: (emitter: Emitter<DecoderModuleListeners, DecoderModuleEvents>, logger: Logger, tracks: Track[], clockSource: ClockSource, pool: ArrayBufferPool) => Promise<DecoderModule>;
776
- getBuffer: (type: Type) => TimeRange[] | undefined;
777
- videoDecodeRate: () => number;
778
- getStatistics: () => DecoderStatistics;
779
- private onInitSegment;
780
- private decode;
781
- private emitDecodedFrames;
782
- private emitDecodeRate;
783
- private updateBufferState;
784
- }
785
446
  type EffectiveConnectionType = "2g" | "3g" | "4g" | "slow-2g";
786
- interface DocumentState {
787
- readonly isVisible: boolean;
788
- readonly isOnline: boolean;
789
- }
790
- interface DocumentStateEvents {
791
- ["page active"]: boolean;
792
- ["pagehide"]: PageTransitionEvent;
793
- }
794
447
  interface DocumentStateModulesStatistics {
795
448
  isVisible: boolean;
796
449
  isOnline: boolean;
@@ -803,408 +456,46 @@ interface DocumentStateModulesStatistics {
803
456
  navigatorConnectionType?: ConnectionType;
804
457
  navigatorSaveData?: boolean;
805
458
  navigatorDownlink?: number;
806
- }
807
- declare class DocumentStateModule implements DocumentState {
808
- private emitter;
809
- private isVisibleCount;
810
- private isHiddenCount;
811
- private isOnlineCount;
812
- private isOfflineCount;
813
- private isActive;
814
- constructor(emitter: Emitter<unknown, DocumentStateEvents>);
815
- static create: (emitter: Emitter<unknown, DocumentStateEvents>) => DocumentStateModule;
816
- unload: () => void;
817
- load: () => void;
818
- unsuspend: () => void;
819
- getStatistics: () => DocumentStateModulesStatistics;
820
- get isOnline(): boolean;
821
- get isVisible(): boolean;
822
- private onOnline;
823
- private onOffline;
824
- private onPageHide;
825
- private onVisibilityChanged;
826
- private setIsActive;
827
- }
828
- interface EventModuleEvents {
829
- ["event"]: Readonly<StoredEvent>;
830
- }
831
- interface TimeSource {
832
- readonly drift: number | undefined;
833
- readonly serverCurrentTime: number;
834
- }
835
- interface BaseEvent {
836
- timestamp: number;
837
- }
838
- declare type TimestampAdded = {
839
- timestampAdded: number;
840
- };
841
- interface Metadata extends BaseEvent {
842
- type: "metadata";
843
- content: string;
844
- }
845
- interface ChannelSwitch extends BaseEvent {
846
- type: "channel switch";
847
- channelId: string;
848
- }
849
- interface LanguageSwitch extends BaseEvent {
850
- type: "language switch";
851
- language: string;
852
- }
853
- declare type Event = Metadata | ChannelSwitch | LanguageSwitch;
854
- declare type StoredEvent = TimestampAdded & Event;
855
- declare class EventModule {
856
- private static EVENT_TIMEOUT;
857
- private static EVENT_CHECK_INTERVAL;
858
- private logger;
859
- private emitter;
860
- private waitingEvents;
861
- private isTriggered;
862
- private timeSource;
863
- private timers;
864
- constructor(emitter: Emitter<unknown, EventModuleEvents>, logger: Logger, timeSource: TimeSource);
865
- load: () => void;
866
- unload: () => void;
867
- static create: (emitter: Emitter<unknown, EventModuleEvents>, logger: Logger, timeSource: TimeSource) => EventModule;
868
- addEvent: (event: Event) => void;
869
- extractEvent: (previousSample: CodedSample, sample: CodedSample) => void;
870
- private onBufferedStateChanged;
871
- }
872
- interface IncomingDataModuleListeners {
873
- ["connection state"]: Readonly<State>;
874
- }
875
- interface IncomingDataModuleEvents {
876
- ["no data timeout"]: number;
877
- ["reconnect"]: string;
878
- ["error"]: Readonly<VindralError>;
879
- }
880
- interface IncomingDataModuleStatistics {
881
- videoBitRate: number;
882
- audioBitRate: number;
883
- bytesReceived: number;
884
- }
885
- declare class IncomingDataModule {
886
- static NO_DATA_ERROR_TIMEOUT: number;
887
- static NO_DATA_TIMEOUT: number;
888
- static UPDATE_RECEIVED_BYTES_INTERVAL: number;
889
- private emitter;
890
- private timers;
891
- private bytesReceived;
892
- private timeoutInterval?;
893
- private lastBytesUpdated;
894
- constructor(emitter: Emitter<IncomingDataModuleListeners, IncomingDataModuleEvents>);
895
- static create: (emitter: Emitter<IncomingDataModuleListeners, IncomingDataModuleEvents>) => IncomingDataModule;
896
- load: () => void;
897
- unload: () => void;
898
- averageBitRate: (type: Type) => number;
899
- totalBytesReceived: () => number;
900
- add: (type: Type, bytes: number) => void;
901
- getStatistics: () => IncomingDataModuleStatistics;
902
- private onConnectionState;
903
- private checkTimeout;
904
- private clearTimeoutInterval;
905
- }
906
- declare class LoggerModule {
907
- private level;
908
- private categoryLevels;
909
- static create: () => LoggerModule;
910
- getLogger: (category: string) => LoggerInstance;
911
- setLevel: (level: Level, category?: string | undefined) => void;
912
- unload: () => void;
913
- private onLog;
914
- }
915
- interface RecoveredContext {
916
- error: Error;
917
- count: number;
918
- }
919
- interface MseModuleEvents {
920
- ["error"]: Readonly<VindralError>;
921
- ["recovered from media error"]: Readonly<RecoveredContext>;
922
- }
923
- interface MseModuleListeners {
924
- ["init segment"]: Readonly<InitSegment>;
925
- ["coded sample"]: Readonly<CodedSample>;
926
- }
927
- interface MseModuleStatistics {
928
- quotaErrorCount: number;
929
- totalVideoFrames?: number;
930
- droppedVideoFrames?: number;
931
- }
932
- declare class MseModule {
933
- private readonly maxChunkSize;
934
- private logger;
935
- private timers;
936
- private emitter;
937
- private mediaElement;
938
- private tracks;
939
- private mediaSource;
940
- private trackContexts;
941
- private autoRecoverFromMediaErrors;
942
- private quotaErrorCount;
943
- private recoveredFromErrorCount;
944
- constructor(logger: Logger, emitter: Emitter<MseModuleListeners, MseModuleEvents>, mediaElement: HTMLMediaElement, tracks: Track[]);
945
- static create: (logger: Logger, emitter: Emitter<MseModuleListeners, MseModuleEvents>, mediaElement: HTMLMediaElement, tracks: Track[]) => Promise<MseModule>;
946
- load: () => void;
947
- unload: () => void;
948
- getStatistics: () => MseModuleStatistics;
949
- private open;
950
- getBuffer: (type: Type) => TimeRange[];
951
- init: (initSegment: Readonly<InitSegment>) => void;
952
- append: (samples: Readonly<CodedSample>[], needsInitSegment: boolean) => Promise<void>;
953
- private onCodedSample;
954
- private onSourceEnded;
955
- }
956
- interface PictureInPictureListeners {
957
- ["add picture in picture listener"]: {
958
- element: Readonly<HTMLMediaElement>;
959
- };
960
- }
961
- interface PictureInPictureEvents {
962
- ["enter picture in picture"]: Readonly<PictureInPictureSizeSource>;
963
- ["exit picture in picture"]: void;
964
- }
965
- declare class PictureInPictureModule {
966
- private logger;
967
- private emitter;
968
- private element?;
969
- private pictureInPictureWindow?;
970
- private constructor();
971
- static create: (emitter: Emitter<PictureInPictureListeners, PictureInPictureEvents>, logger: Logger) => PictureInPictureModule;
972
- unload: () => void;
973
- onEnablePictureInPicture: (params: PictureInPictureListeners["add picture in picture listener"]) => void;
974
- getPictureInPictureSize: () => Size | undefined;
975
- isPictureInPictureActive: () => boolean;
976
- isPictureInPictureSupported: () => boolean;
977
- requestPictureInPicture: () => Promise<void>;
978
- exitPictureInPicture: () => Promise<void>;
979
- private requestStandardPictureInPicture;
980
- private exitStandardPictureInPicture;
981
- private isStandardPictureInPictureSupported;
982
- private isStandardPictureInPictureActive;
983
- private isWebkitPresentationModeActive;
984
- private requestWebkitPresentationMode;
985
- private isWebkitPresentationModeSupported;
986
- }
987
- interface QualityOfServiceModuleEvents {
988
- ["fatal quality of service"]: void;
989
- }
990
- interface QualityOfServiceModuleStatistics {
991
- timeSpentBuffering: number;
992
- bufferingEventsCount: number;
993
- fatalQosCount: number;
994
- timeSpentRatio: {
995
- [bitRate: string]: number;
996
- };
997
- }
998
- declare class QualityOfServiceModule {
999
- private readonly minBufferFullnessLengthForRegression;
1000
- private logger;
1001
- private emitter;
1002
- private timers;
1003
- private metrics;
1004
- private bufferFullness;
1005
- private bufferFullnessRegression?;
1006
- private currentLevel?;
1007
- private isSuspended;
1008
- private _fatalQosCount;
1009
- private hasAsserted;
1010
- constructor(emitter: Emitter<QualityOfServiceModuleEvents>, logger: Logger);
1011
- static create: (emitter: Emitter<QualityOfServiceModuleEvents>, logger: Logger) => QualityOfServiceModule;
1012
- load: () => void;
1013
- unload: () => void;
1014
- suspend: () => void;
1015
- unsuspend: () => void;
1016
- fatalQosGraceTime: () => number;
1017
- get fatalQosCount(): number;
1018
- incrementFatalQosCount: () => void;
1019
- getMetrics: () => Metrics;
1020
- getLevelStats: (renditionLevel: RenditionLevel) => PerLevelStats | undefined;
1021
- getBufferFullnessRegression: () => Regression | undefined;
1022
- activeRatios: () => Map<string, number>;
1023
- bufferingRatios: () => Map<string, number>;
1024
- bufferingEventsLast: (last: number) => number;
1025
- timeSpentBufferingLast: (last: number) => number;
1026
- timeSpentActiveLast: (last: number) => number;
1027
- timeSpentPlayingInAtLeastLevelRatio: (renditionLevel: RenditionLevel) => number;
1028
- timeSpentBuffering: () => number;
1029
- timeActive: () => number;
1030
- getStatistics: () => QualityOfServiceModuleStatistics;
1031
- private onRenditionLevelChanged;
1032
- private onBufferFullness;
1033
- private onPlaybackState;
1034
- private onVideoDecodeRate;
1035
- private calculateLinearRegression;
1036
- private stopBuffering;
1037
- private startBuffering;
1038
- private setActive;
1039
- private unsetActive;
1040
- }
1041
- interface SubscriptionModuleListeners {
1042
- ["subscription changed"]: Readonly<SubscriptionChange>;
1043
- }
1044
- declare class SubscriptionModule {
1045
- private logger;
1046
- private timers;
1047
- private emitter;
1048
- private targetSubscription;
1049
- private currentSubscription;
1050
- private _isSwitchingSubscription;
1051
- private pendingSubscriptionTimeoutId?;
1052
- private burstMs;
1053
- private constructor();
1054
- unload: () => void;
1055
- static create: (logger: Logger, emitter: Emitter<SubscriptionModuleListeners>, subscription: Subscription) => SubscriptionModule;
1056
- isSwitchingSubscription: () => boolean;
1057
- getTargetSubscription: () => Subscription;
1058
- getCurrentSubscription: () => Subscription;
1059
- enableBurst: (bufferTimeMs: number) => void;
1060
- setSize: (size: Readonly<Size>) => void;
1061
- setVideoConstraint: (constraint: VideoConstraint) => void;
1062
- setAudioConstraint: (constraint: AudioConstraint) => void;
1063
- setVideoBitRate: (bitRate: number) => void;
1064
- setAudioBitRate: (bitRate: number) => void;
1065
- setChannelId: (channelId: string) => void;
1066
- setLanguage: (language: string | undefined) => void;
1067
- setVideoCodec: (videoCodec: VideoCodec | undefined) => void;
1068
- setAudioCodec: (audioCodec: AudioCodec | undefined) => void;
1069
- private setBurst;
1070
- private isNewSubscription;
1071
- private onSubscriptionChanged;
1072
- private scheduleSubscriptionChange;
1073
- }
1074
- interface SyncSample {
1075
- type: Type;
1076
- isSync: boolean;
1077
- channelId: string;
1078
- timestamp: number;
1079
- timescale: number;
1080
- }
1081
- interface SyncModuleListeners {
1082
- ["rtt"]: number;
1083
- }
1084
- interface SyncModuleEvents {
1085
- ["channel reset"]: void;
1086
- }
1087
- interface SyncInfo {
1088
- wallclockTime: number;
1089
- timestamp: number;
1090
- }
1091
- interface BufferSource {
1092
- getTargetBufferTime(): number;
1093
- }
1094
- interface SyncModuleStatistics {
1095
- drift: number | undefined;
1096
- driftAdjustmentCount: number;
1097
- timeshiftDriftAdjustmentCount: number;
1098
- discardedTimeInfoCount: number;
1099
- seekTime: number;
1100
- }
1101
- declare class SyncModule {
1102
- private emitter;
1103
- private logger;
1104
- private playbackSource;
1105
- private bufferSource;
1106
- private lastSeekTime;
1107
- readonly seekCooldownTime = 1000;
1108
- readonly seekTimeoutTime = 5000;
1109
- readonly syncMaxBehind: number;
1110
- readonly syncMaxBehindMultiplierStep = 1;
1111
- readonly syncMaxBehindIncreaseEvery = 3;
1112
- readonly syncMaxBehindMaximumAllowed = 2000;
1113
- syncMaxBehindMultiplier: number;
1114
- timeshiftOnAudio: boolean;
1115
- readonly syncMaxAhead = 150;
1116
- timeshiftSync: {
1117
- enabled: boolean;
1118
- maxBehind: number;
1119
- multiplier: number;
1120
- maxBehindAllowed: number;
1121
- overshoot: number;
1122
- minOvershootAllowed: number;
1123
- maxOvershootAllowed: number;
1124
- };
1125
- readonly maxTimeSyncDifferenceTolerance = 150;
1126
- private timers;
1127
- private rtt;
1128
- private channelSyncInfo;
1129
- private driftAdjustmentsCount;
1130
- private timeshiftDriftAdjustmentCount;
1131
- private timestampOffset?;
1132
- private currentChannelId?;
1133
- private highestSeenTimestamps;
1134
- private discardedTimeInfoCount;
1135
- private isSuspended;
1136
- private isSyncAdjustmentActivated;
1137
- private constructor();
1138
- load: () => void;
1139
- unload: () => void;
1140
- suspend: () => void;
1141
- unsuspend: () => void;
1142
- activateSyncAdjustments: () => void;
1143
- static create: (emitter: Emitter<SyncModuleListeners, SyncModuleEvents>, logger: Logger, playbackSource: PlaybackSource, bufferSource: BufferSource, isTimeshiftSyncEnabled: boolean) => SyncModule;
1144
- reset: () => void;
1145
- getTimeshiftOffset: () => number;
1146
- getCurrentChannelId: () => string | undefined;
1147
- updateChannelSyncInfo(channelId: string, syncInfo: SyncInfo): void;
1148
- private tryApplySyncInfo;
1149
- getLiveEdgeTime: (channelId: string) => number | undefined;
1150
- getLiveEdgeTimeLatencyAdjusted: (channelId: string) => number | undefined;
1151
- getWallclockTime: (channelId: string) => number | undefined;
1152
- getWallclockTimeLatencyAdjusted: (channelId: string) => number | undefined;
1153
- get serverCurrentTime(): number;
1154
- get channelCurrentTime(): number;
1155
- processSample: <T extends SyncSample>(sample: T) => T;
1156
- getStatistics: () => SyncModuleStatistics;
1157
- get drift(): number | undefined;
1158
- private updateRtt;
1159
- isPlaybackSourceReadyToSeek: () => boolean;
1160
- private isSeeking;
1161
- private isAllowedToSync;
1162
- private isSeekCooldownExpired;
1163
- private isSeekTimeoutExpired;
1164
- private currentTimeshiftMaxBehind;
1165
- private currentSyncMaxBehind;
1166
- private currentSyncMaxAhead;
1167
- private tryTimeshiftSync;
1168
- private onSync;
1169
- }
1170
- declare const defaultOptions: {
1171
- sizeBasedResolutionCapEnabled: boolean;
1172
- pictureInPictureEnabled: boolean;
1173
- abrEnabled: boolean;
1174
- burstEnabled: boolean;
1175
- mseEnabled: boolean;
1176
- mseOpusEnabled: boolean;
1177
- muted: boolean;
1178
- minBufferTime: number;
1179
- maxBufferTime: number;
1180
- logLevel: Level;
1181
- maxSize: Size;
1182
- maxVideoBitRate: number;
1183
- maxAudioBitRate: number;
1184
- tags: string[];
1185
- media: Media;
1186
- poster: string | boolean;
1187
- reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
1188
- iosWakeLockEnabled: boolean;
1189
- telemetryEnabled: boolean;
1190
- advanced: {
1191
- wasmDecodingConstraint: Partial<VideoConstraint>;
459
+ }
460
+ interface IncomingDataModuleStatistics {
461
+ videoBitRate: number;
462
+ audioBitRate: number;
463
+ bytesReceived: number;
464
+ }
465
+ interface VideoPlayerStatistics {
466
+ renderedFrameCount: number;
467
+ rendererDroppedFrameCount: number;
468
+ contextLostCount: number;
469
+ contextRestoredCount: number;
470
+ }
471
+ interface MseModuleStatistics {
472
+ quotaErrorCount: number;
473
+ mediaSourceOpenTime: number;
474
+ totalVideoFrames?: number;
475
+ droppedVideoFrames?: number;
476
+ }
477
+ interface QualityOfServiceModuleStatistics {
478
+ timeSpentBuffering: number;
479
+ bufferingEventsCount: number;
480
+ fatalQosCount: number;
481
+ timeSpentRatio: {
482
+ [bitRate: string]: number;
1192
483
  };
1193
- };
484
+ }
485
+ interface SyncModuleStatistics {
486
+ drift: number | undefined;
487
+ driftAdjustmentCount: number;
488
+ timeshiftDriftAdjustmentCount: number;
489
+ seekTime: number;
490
+ }
1194
491
  interface UserAgentInformation {
1195
492
  userAgent: string;
1196
493
  locationOrigin: string;
1197
494
  locationPath: string;
1198
495
  ancestorOrigins?: string[];
1199
496
  }
1200
- declare type StatisticsType<K> = K extends {
1201
- getStatistics: () => unknown;
1202
- } ? ReturnType<K["getStatistics"]> : never;
1203
- declare type StatisticsTypes = {
1204
- [Property in keyof Modules]: StatisticsType<Modules[Property]>;
1205
- };
1206
- export declare type ModuleStatistics = Flatten<StatisticsTypes>;
1207
- export declare type Statistics = ModuleStatistics & UserAgentInformation & {
497
+ declare type ModuleStatistics = AdaptivityStatistics & BufferTimeStatistics & ConnectionStatistics & ConstraintCapStatistics & DecoderStatistics & DocumentStateModulesStatistics & IncomingDataModuleStatistics & MseModuleStatistics & PlaybackModuleStatistics & QualityOfServiceModuleStatistics & RenditionsModuleStatistics & SyncModuleStatistics & TelemetryModuleStatistics & VideoPlayerStatistics;
498
+ declare type Statistics = ModuleStatistics & UserAgentInformation & {
1208
499
  version: string;
1209
500
  ip?: string;
1210
501
  url: string;
@@ -1217,15 +508,70 @@ export declare type Statistics = ModuleStatistics & UserAgentInformation & {
1217
508
  channelId: string;
1218
509
  channelGroupId?: string;
1219
510
  timeToFirstFrame?: number;
511
+ iosMediaElementEnabled?: boolean;
1220
512
  };
513
+ /**
514
+ * Represents a Vindral client instance
515
+ *
516
+ * The most most essential methods when using the Vindral class are:
517
+ *
518
+ * - connect() - this has to be called to actually start connecting
519
+ * - attach() - to attach the Vindral video view to the DOM so that users can see it
520
+ * - userInput() - to activate audio on browsers that require a user gesture to play audio
521
+ * - unload() - unloads the instance, its very important that this is called when cleaning up the Vindral instance, otherwise background timers may leak.
522
+ *
523
+ * The Vindral instance will emit a variety of events during its lifetime. Use .on("event-name", callback) to listen to these events.
524
+ * See [[PublicVindralEvents]] for the events types that can be emitted.
525
+ *
526
+ * ```typescript
527
+ * // minimal configuration of a Vindral client instance
528
+ * const instance = new Vindral({
529
+ * url: "https://lb.cdn.vindral.com",
530
+ * channelId: "vindral_demo1_ci_099ee1fa-80f3-455e-aa23-3d184e93e04f",
531
+ * })
532
+ *
533
+ * // Will be called when timed metadata is received
534
+ * instance.on("metadata", console.log)
535
+ *
536
+ * // Will be called when a user interaction is needed to activate audio
537
+ * instance.on("needs user input", console.log)
538
+ *
539
+ * // Start connecting to the cdn
540
+ * instance.connect()
541
+ *
542
+ * // Attach the video view to the DOM
543
+ * instance.attach(document.getElementById("root"))
544
+ *
545
+ * // When done with the instance
546
+ * instance.unload()
547
+ * ```
548
+ */
1221
549
  export declare class Vindral extends Emitter<PublicVindralEvents> {
1222
550
  private static MAX_POOL_SIZE;
1223
551
  private static INITIAL_MAX_BIT_RATE;
1224
552
  private static PING_TIMEOUT;
553
+ private static DISCONNECT_TIMEOUT;
554
+ /**
555
+ * Picture in picture
556
+ */
1225
557
  readonly pictureInPicture: {
558
+ /**
559
+ * Enters picture in picture
560
+ * @returns a promise that resolves if successful
561
+ */
1226
562
  enter: () => Promise<void>;
563
+ /**
564
+ * Exits picture in picture
565
+ * @returns a promise that resolves if successful
566
+ */
1227
567
  exit: () => Promise<void>;
568
+ /**
569
+ * returns whether picture in picture is currently active
570
+ */
1228
571
  isActive: () => boolean;
572
+ /**
573
+ * returns whether picture in picture is supported
574
+ */
1229
575
  isSupported: () => boolean;
1230
576
  };
1231
577
  private browser;
@@ -1249,47 +595,200 @@ export declare class Vindral extends Emitter<PublicVindralEvents> {
1249
595
  private pool;
1250
596
  private durationSessions;
1251
597
  private sizes;
598
+ private isSuspended;
599
+ private disconnectTimeout;
1252
600
  constructor(options: Options);
601
+ /**
602
+ * Attaches the video view to a DOM element. The Vindral video view will be sized to fill this element while
603
+ * maintaining the correct aspect ratio.
604
+ * @param container the container element to append the video view to. Often a div element.
605
+ * @returns
606
+ */
1253
607
  attach: (container: HTMLElement) => void;
608
+ /**
609
+ * Set the current volume.
610
+ * Setting this to 0 is not equivalent to muting the audio.
611
+ * Setting this to >0 is not equivalent to unmuting the audio.
612
+ */
1254
613
  set volume(volume: number);
614
+ /**
615
+ * The current volume. Note that if the playback is muted volume can still be set.
616
+ */
1255
617
  get volume(): number;
618
+ /**
619
+ * Set playback to muted/unmuted
620
+ */
1256
621
  set muted(muted: boolean);
622
+ /**
623
+ * Whether the playback is muted or not
624
+ */
1257
625
  get muted(): boolean;
626
+ /**
627
+ * Media (audio | video | audio+video)
628
+ */
1258
629
  get media(): Media;
630
+ /**
631
+ * The current average video bit rate in bits/s
632
+ */
1259
633
  get videoBitRate(): number;
634
+ /**
635
+ * The current average audio bit rate in bits/s
636
+ */
1260
637
  get audioBitRate(): number;
638
+ /**
639
+ * The current connection state
640
+ */
1261
641
  get connectionState(): Readonly<State>;
642
+ /**
643
+ * The current playback state
644
+ */
1262
645
  get playbackState(): Readonly<PlaybackState>;
646
+ /**
647
+ * The current buffer fullness as a floating point value between 0-1, where 1 is full and 0 i empty.
648
+ */
1263
649
  get bufferFullness(): number;
650
+ /**
651
+ * Whether user bandwidth savings by capping the video resolution to the size of the video element is enabled
652
+ */
1264
653
  get sizeBasedResolutionCapEnabled(): boolean;
654
+ /**
655
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
656
+ */
1265
657
  set sizeBasedResolutionCapEnabled(enabled: boolean);
658
+ /**
659
+ * Whether ABR is currently enabled
660
+ */
1266
661
  get abrEnabled(): boolean;
662
+ /**
663
+ * Enable or disable ABR
664
+ *
665
+ * The client will immediatly stop changing renditon level based on QoS metrics
666
+ *
667
+ * Note: It is strongly recommended to keep this enabled as it can severly increase
668
+ * the number of buffering events for viewers.
669
+ */
1267
670
  set abrEnabled(enabled: boolean);
671
+ /**
672
+ * Estimated live edge time for the current channel
673
+ */
1268
674
  get serverEdgeTime(): number | undefined;
675
+ /**
676
+ * @returns Estimated wallclock time on the edge server in milliseconds
677
+ */
1269
678
  get serverWallclockTime(): number | undefined;
679
+ /**
680
+ * Local current time normalized between all channels in the channel group
681
+ */
1270
682
  get currentTime(): number;
683
+ /**
684
+ * Current time for the channel. This is the actual stream time, passed on from your ingress.
685
+ * Integer overflow could make this value differ from your encoder timestamps if it has been rolling for more
686
+ * than 42 days with RTMP as target.
687
+ *
688
+ * Note: This is not normalized between channels, thus it can make jumps when switching channels
689
+ */
1271
690
  get channelCurrentTime(): number;
691
+ /**
692
+ * The current target buffer time in milliseconds
693
+ */
1272
694
  get targetBufferTime(): number;
695
+ /**
696
+ * Set the current target buffer time in milliseconds
697
+ */
1273
698
  set targetBufferTime(bufferTimeMs: number);
699
+ /**
700
+ * The estimated playback latency based on target buffer time, the connection rtt and local playback drift
701
+ */
1274
702
  get playbackLatency(): number | undefined;
703
+ /**
704
+ * The estimated utc timestamp (in ms) for the playhead.
705
+ */
1275
706
  get playbackWallclockTime(): number | undefined;
707
+ /**
708
+ * Channels that can be switched between
709
+ */
1276
710
  get channels(): ReadonlyArray<Channel>;
711
+ /**
712
+ * Languages available
713
+ */
1277
714
  get languages(): ReadonlyArray<string>;
715
+ /**
716
+ * The current language
717
+ */
1278
718
  get language(): string | undefined;
719
+ /**
720
+ * Set the current language
721
+ */
1279
722
  set language(language: string | undefined);
723
+ /**
724
+ * The current channelId
725
+ */
1280
726
  get channelId(): string;
727
+ /**
728
+ * Set the current channelId
729
+ *
730
+ * Possible channels to set are available from [[channels]]
731
+ */
1281
732
  set channelId(channelId: string);
733
+ /**
734
+ * Max size that will be subcribed to
735
+ */
1282
736
  get maxSize(): Size;
737
+ /**
738
+ * Set max size that will be subscribed to
739
+ *
740
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this size
741
+ */
1283
742
  set maxSize(size: Size);
743
+ /**
744
+ * The max video bit rate that will be subscribed to
745
+ *
746
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
747
+ */
1284
748
  get maxVideoBitRate(): number;
749
+ /**
750
+ * Set max video bit rate that will be subscribed to
751
+ *
752
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bitrate
753
+ */
1285
754
  set maxVideoBitRate(bitRate: number);
755
+ /**
756
+ * The max audio bit rate that will be subscribed to
757
+ *
758
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
759
+ */
1286
760
  get maxAudioBitRate(): number;
761
+ /**
762
+ * Set max audio bit rate that will be subscribed to
763
+ *
764
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bit rate
765
+ */
1287
766
  set maxAudioBitRate(bitRate: number);
767
+ /**
768
+ * The rendition levels available.
769
+ */
1288
770
  get renditionLevels(): ReadonlyArray<RenditionLevel>;
771
+ /**
772
+ * The current rendition level
773
+ */
1289
774
  get currentRenditionLevel(): Readonly<RenditionLevel> | undefined;
775
+ /**
776
+ * The target rendition level that the client is currently switching to
777
+ */
1290
778
  get targetRenditionLevel(): Readonly<RenditionLevel> | undefined;
779
+ /**
780
+ * True if the client is currently switching from one rendition level to another
781
+ */
1291
782
  get isSwitchingRenditionLevel(): boolean;
783
+ /**
784
+ * The time ranges buffered for video.
785
+ * The ranges are specified in milliseconds.
786
+ */
1292
787
  get videoBufferedRanges(): ReadonlyArray<TimeRange>;
788
+ /**
789
+ * The time ranges buffered for audio.
790
+ * The ranges are specified in milliseconds.
791
+ */
1293
792
  get audioBufferedRanges(): ReadonlyArray<TimeRange>;
1294
793
  get lastBufferEvent(): Readonly<BufferStateEvent>;
1295
794
  get activeRatios(): Map<string, number>;
@@ -1297,482 +796,498 @@ export declare class Vindral extends Emitter<PublicVindralEvents> {
1297
796
  get timeSpentBuffering(): number;
1298
797
  get timeActive(): number;
1299
798
  get mediaElement(): HTMLMediaElement | HTMLCanvasElement;
799
+ /**
800
+ * Get active Vindral Options
801
+ */
1300
802
  getOptions: () => Options & typeof defaultOptions;
803
+ /**
804
+ * Get url for fetching thumbnail. Note that fetching thumbnails only works for an active channel.
805
+ */
1301
806
  getThumbnailUrl: () => string;
807
+ /**
808
+ * Update authentication token on an already established and authenticated connection
809
+ */
1302
810
  updateAuthenticationToken: (token: string) => void;
811
+ /**
812
+ * @deprecated since 3.0.0 Use play instead.
813
+ * Connects to the configured channel and starts streaming
814
+ */
1303
815
  connect: () => void;
816
+ private _connect;
817
+ /**
818
+ * Get options that can be used for CastSender
819
+ */
1304
820
  getCastOptions: () => Options;
1305
821
  private connectionInfo;
1306
822
  private estimateRTT;
1307
823
  private connectHandler;
1308
824
  private emitLanguagesIfChanged;
1309
825
  private filterRenditions;
826
+ /**
827
+ * Patch the subscription with properties from the channel that isn't known until connection
828
+ * @param channel Channel with the renditions to patch the subscription based on
829
+ */
1310
830
  private patchSubscription;
1311
831
  private isSupportedVideoCodecProfile;
1312
832
  private supportedAudioCodecs;
1313
833
  private initializeDecodingModule;
834
+ /**
835
+ * Fully unloads the instance. This disconnects the clients and stops any background tasks.
836
+ * This client instance can not be used after this has been called.
837
+ */
1314
838
  unload: () => Promise<void>;
839
+ /**
840
+ * @deprecated since 3.0.0 Use play instead.
841
+ *
842
+ * Activates audio or video on web browsers that require a user gesture to enable media playback.
843
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
844
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
845
+ * the Vindral instance itself is created in a user input event.
846
+ *
847
+ * Requirements: This method needs to be called within an user-input event handler to function properly, such as
848
+ * an onclick handler.
849
+ *
850
+ * Note: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
851
+ * and handle that event gracefully.
852
+ */
1315
853
  userInput: () => void;
1316
- private play;
854
+ pause: () => void;
855
+ /**
856
+ *
857
+ * Start playing the stream.
858
+ *
859
+ * This method also activates audio or video on web browsers that require a user gesture to enable media playback.
860
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
861
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
862
+ * the Vindral instance itself is created in a user input event.
863
+ *
864
+ * Note: In most browsers this method needs to be called within an user-input event handler, such as
865
+ * an onclick handler in order to activate audio. Most implementations call this directly after constructing the Vindral
866
+ * instance once in order to start playing, and then listen to a user-event in order to allow audio to be activated.
867
+ *
868
+ * Note 2: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
869
+ * and handle that event gracefully.
870
+ */
871
+ play: () => Promise<void>;
872
+ /**
873
+ * How long in milliseconds since the instance was created
874
+ */
1317
875
  get uptime(): number;
1318
876
  getStatistics: () => Statistics;
877
+ private resetModules;
878
+ private suspend;
879
+ private unsuspend;
1319
880
  private getRuntimeInfo;
881
+ private onMediaElementState;
1320
882
  private onBufferEvent;
883
+ /**
884
+ * Aligns size and bitrate to match a rendition level correctly
885
+ */
1321
886
  private alignSizeAndBitRate;
1322
887
  private get currentSubscription();
1323
888
  private get targetSubscription();
1324
889
  private timeToFirstFrame;
1325
890
  private willUseMediaSource;
1326
891
  }
1327
- interface TelemetryModuleOptions {
1328
- url: string;
1329
- enabled?: boolean;
1330
- interval?: number;
1331
- includeErrors?: boolean;
1332
- includeEvents?: boolean;
1333
- includeStats?: boolean;
1334
- maxRetries?: number;
1335
- maxErrorReports?: number;
1336
- maxEvents?: number;
1337
- }
1338
- interface ShiftedEdgeEvent {
1339
- code: "shifted_edge";
1340
- edgeUrl: string;
1341
- }
1342
- declare type TelemetryEvent = ShiftedEdgeEvent;
1343
- interface FlushOptions {
1344
- isFinal?: boolean;
1345
- initiator: StatsInitiator;
1346
- }
1347
- interface ParentContext {
1348
- getStatistics(): Statistics;
1349
- readonly connectionState: Readonly<State>;
1350
- }
1351
- interface TelemetryModuleListeners {
1352
- ["visibilitystate"]: VisibilityState;
1353
- ["pagehide"]: PageTransitionEvent;
1354
- ["error"]: Readonly<VindralError>;
1355
- }
1356
- interface TelemetryModuleEvents {
1357
- ["send signal"]: Readonly<string>;
1358
- }
1359
892
  interface TelemetryModuleStatistics {
1360
893
  errorCount: number;
1361
894
  }
1362
- declare type StatsInitiator = "interval" | "visibilitychange" | "pagehide" | "unload";
1363
- declare class TelemetryModule {
1364
- private logger;
1365
- private timers;
1366
- private emitter;
1367
- private options;
1368
- private parentContext;
1369
- private unsentLines;
1370
- private retries;
1371
- private errorCount;
1372
- private eventCount;
1373
- private statsCount;
1374
- private constructor();
1375
- unload: () => void;
1376
- static create: (logger: Logger, emitter: Emitter<TelemetryModuleListeners, TelemetryModuleEvents>, options: TelemetryModuleOptions, parentContext: ParentContext) => TelemetryModule;
1377
- load: () => void;
1378
- addStats: ({ isFinal, initiator }?: FlushOptions, extra?: Record<string, unknown>) => void;
1379
- addEvent: (event: TelemetryEvent) => void;
1380
- getStatistics: () => TelemetryModuleStatistics;
1381
- private send;
1382
- private onVisibilityChange;
1383
- private onPageHide;
1384
- private onRejection;
1385
- private onError;
1386
- private addError;
1387
- }
1388
- declare class TimerModule {
1389
- private pendingTimeouts;
1390
- private pendingIntervals;
1391
- static create: () => TimerModule;
1392
- clearTimeout: (handle: number) => void;
1393
- clearInterval: (handle: number) => void;
1394
- setTimeout: (callback: (...args: unknown[]) => void, timeout: number, ...args: unknown[]) => number;
1395
- setInterval: (callback: (...args: unknown[]) => void, interval: number, ...args: unknown[]) => number;
1396
- unload: () => void;
1397
- }
1398
- declare class UnpauseModule {
1399
- private logger;
1400
- private element;
1401
- private documentState;
1402
- private timers;
1403
- private constructor();
1404
- static create: (logger: Logger, element: MediaElement, documentState: DocumentState) => UnpauseModule;
1405
- unload: () => void;
1406
- private unpause;
1407
- }
1408
- interface VideoPlayerModuleListeners {
1409
- ["decoded frame"]: Readonly<DecodedSample>;
1410
- ["error"]: Readonly<VindralError>;
1411
- }
1412
- interface ClockSource {
1413
- readonly currentTime: number;
1414
- }
1415
- interface VideoPlayerStatistics {
1416
- renderedFrameCount: number;
1417
- rendererDroppedFrameCount: number;
1418
- contextLostCount: number;
1419
- contextRestoredCount: number;
1420
- }
1421
- declare class VideoPlayerModule {
1422
- private readonly oldTimestampLimit;
1423
- private pool;
1424
- private logger;
1425
- private emitter;
1426
- private videoRenderer;
1427
- private renderQueue;
1428
- private clockSource;
1429
- private isFirstFrame;
1430
- private animationFrameRequest?;
1431
- private renderedFrameCount;
1432
- private rendererDroppedFrameCount;
1433
- constructor(emitter: Emitter<VideoPlayerModuleListeners, unknown>, logger: Logger, clockSource: ClockSource, pool: ArrayBufferPool);
1434
- load: () => void;
1435
- unload: () => Promise<void>;
1436
- suspend: () => void;
1437
- unsuspend: () => void;
1438
- element: () => HTMLCanvasElement;
1439
- getStatistics: () => VideoPlayerStatistics;
1440
- static create: (emitter: Emitter<VideoPlayerModuleListeners, unknown>, logger: Logger, clockSource: ClockSource, pool: ArrayBufferPool) => VideoPlayerModule;
1441
- private onDecodedFrame;
1442
- private render;
1443
- private renderSample;
1444
- private stopRender;
1445
- }
1446
- export interface Modules {
1447
- audioPlayer?: AudioPlayerModule;
1448
- videoPlayer?: VideoPlayerModule;
1449
- decoder?: DecoderModule;
1450
- mseModule?: MseModule;
1451
- constraintCap: ConstraintCapModule;
1452
- adaptivity: AdaptivityModule;
1453
- connection: ConnectionModule;
1454
- playback: PlaybackModule;
1455
- pictureInPicture: PictureInPictureModule;
1456
- logger: LoggerModule;
1457
- timer: TimerModule;
1458
- subscription: SubscriptionModule;
1459
- renditions: RenditionsModule;
1460
- qualityOfService: QualityOfServiceModule;
1461
- event: EventModule;
1462
- sync: SyncModule;
1463
- telemetry?: TelemetryModule;
1464
- documentState: DocumentStateModule;
1465
- incomingData: IncomingDataModule;
1466
- bufferTime: BufferTimeModule;
1467
- unpause?: UnpauseModule;
1468
- }
895
+ /**
896
+ * The current reconnect state to use to decide whether to kep reconnecting or not
897
+ */
1469
898
  export interface ReconnectState {
899
+ /**
900
+ * The number or retry attempts so far.
901
+ * This gets reset on every successful connect, so it will start from zero every
902
+ * time the client instance gets disconnected and will increment until the
903
+ * client instance makes a connection attempt is successful.
904
+ */
1470
905
  reconnectRetries: number;
1471
906
  }
907
+ /**
908
+ * Advanced options to override default behaviour.
909
+ */
1472
910
  export interface AdvancedOptions {
911
+ /**
912
+ * Constrains wasm decoding to this resolution.
913
+ * By default it is set to 1280 in width and height.
914
+ * This guarantees better performance on older devices and reduces battery drain in general.
915
+ */
1473
916
  wasmDecodingConstraint: Partial<VideoConstraint>;
1474
917
  }
1475
- export declare type Media = "audio" | "video" | "audio+video";
918
+ declare type Media = "audio" | "video" | "audio+video";
919
+ /**
920
+ * Options for the Vindral instance
921
+ *
922
+ */
1476
923
  export interface Options {
924
+ /**
925
+ * URL to use when connecting to the stream
926
+ */
1477
927
  url: string;
928
+ /**
929
+ * Channel ID to connect to initially - can be changed later mid-stream when connected to a channel group.
930
+ */
1478
931
  channelId: string;
932
+ /**
933
+ * Channel group to connect to
934
+ * Note: Only needed for fast channel switching
935
+ */
1479
936
  channelGroupId?: string;
937
+ /**
938
+ * A container to attach the video view in - can be provided later with .attach() on the vindral core instance
939
+ */
1480
940
  container?: HTMLElement;
941
+ /**
942
+ * An authentication token to provide to the server when connecting - only needed for channels with authentication enabled
943
+ * Note: If not supplied when needed, an "Authentication Failed" error will be raised.
944
+ */
1481
945
  authenticationToken?: string;
946
+ /**
947
+ * Language to use initially - can be changed during during runtime on the vindral instance
948
+ * Note: Only needed when multiple languages are provided - if no language is specified, one will be automatically selected.
949
+ */
1482
950
  language?: string;
951
+ /**
952
+ * Sets the log level - defaults to info
953
+ */
1483
954
  logLevel?: Level;
955
+ /**
956
+ * Sets the minimum and initial buffer time
957
+ */
1484
958
  minBufferTime?: number;
959
+ /**
960
+ * Sets the maximum buffer time allowed. The vindral instance will automatically slowly increase
961
+ * the buffer time if the use experiences to much buffering with the initial buffer time.
962
+ *
963
+ * Note: This is not yet implemented
964
+ */
1485
965
  maxBufferTime?: number;
966
+ /**
967
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
968
+ *
969
+ * Is enabled by default.
970
+ *
971
+ * Note: This is automatically set to false when abrEnabled is set to false.
972
+ */
1486
973
  sizeBasedResolutionCapEnabled?: boolean;
974
+ /**
975
+ * Enables or disables picture in picture support.
976
+ */
1487
977
  pictureInPictureEnabled?: boolean;
978
+ /**
979
+ * Enable bursting for initial connection and channel switches. This makes time to first frame faster at the
980
+ * cost of stability (more demanding due to the sudden burst of live content)
981
+ *
982
+ * Is disabled by default.
983
+ *
984
+ */
1488
985
  burstEnabled?: boolean;
986
+ /**
987
+ * Enable usage of the MediaSource API on supported browsers.
988
+ *
989
+ * Is enabled by default.
990
+ *
991
+ * Note: We recommend to keep this at the default value unless you have very specific needs.
992
+ */
1489
993
  mseEnabled?: boolean;
994
+ /**
995
+ * Enable Opus with the MediaSource API on supported browsers.
996
+ *
997
+ * Is enabled by default.
998
+ *
999
+ * Note: Opus generally provides better audio quality and is therefore recommended to keep enabled.
1000
+ */
1490
1001
  mseOpusEnabled?: boolean;
1002
+ /**
1003
+ * Enable or disable support for playing audio in the background for iOS devices.
1004
+ *
1005
+ * Is false (disabled) by default.
1006
+ *
1007
+ * Note: This may be enabled by default in a future (major) release
1008
+ */
1491
1009
  iosBackgroundPlayEnabled?: boolean;
1010
+ /**
1011
+ * Enable or disable Adaptive Bit Rate. This allows for automatically adapting the incoming bit rate based on
1012
+ * the viewers bandwidth and thus avoiding buffering events. This also disables the
1013
+ * sizeBasedResolutionCapEnabled option.
1014
+ *
1015
+ * Is enabled by default.
1016
+ *
1017
+ * Note: It is strongly recommended to keep this enabled as user experience can greatly suffer without ABR.
1018
+ */
1492
1019
  abrEnabled?: boolean;
1020
+ /**
1021
+ * Enable or disable telemetry. This allows for telemetry and errors being collected.
1022
+ *
1023
+ * Is enabled by default.
1024
+ *
1025
+ * We appreciate you turning it off during development/staging to not bloat real telemetry data.
1026
+ *
1027
+ * Note: It is strongly recommended to keep this enabled in production as it is required for insights and KPIs.
1028
+ */
1493
1029
  telemetryEnabled?: boolean;
1030
+ /**
1031
+ * Set a cap on the maximum video size.
1032
+ * This can be used to provide user options to limit the video bandwidth usage.
1033
+ *
1034
+ * Note: This takes presedence over any size based resolution caps.
1035
+ */
1494
1036
  maxSize?: Size;
1037
+ /**
1038
+ * Maximum audio bit rate allowed.
1039
+ * This can be used to provide user options to limit the audio bandwidth usage.
1040
+ */
1495
1041
  maxAudioBitRate?: number;
1042
+ /**
1043
+ * Maximum video bit rate allowed.
1044
+ * This can be used to provide user options to limit the video bandwidth usage.
1045
+ */
1496
1046
  maxVideoBitRate?: number;
1047
+ /**
1048
+ * Controls video element background behaviour while loading.
1049
+ * - If `false`, a black background will be shown.
1050
+ * - If undefined or `true`, a live thumbnail will be shown.
1051
+ * - If set to a string containing a URL (https://urltoimage), use that.
1052
+ * Default `true` - meaning a live thumbnail is shown
1053
+ */
1497
1054
  poster?: boolean | string;
1055
+ /**
1056
+ * Whether to start the player muted or to try to start playing audio automatically.
1057
+ */
1498
1058
  muted?: boolean;
1059
+ /**
1060
+ * Provide a custom reconnect handler to control when the instance should stop trying to
1061
+ * reconnect. The reconnect handler should either return true to allow the reconnect or
1062
+ * false to stop reconnecting. It can also return a promise with true or false if it needs
1063
+ * to make any async calls before determining wether to reconnect.
1064
+ *
1065
+ * The default reconnect handler allows 30 reconnects before stopping.
1066
+ *
1067
+ * Note: the ReconnectState gets reset every time the client instance makes a successful connection.
1068
+ * This means the default reconnect handler will only stop reconnecting after 30 _consecutive_ failed connections.
1069
+ *
1070
+ * ```typescript
1071
+ * // An example reconnect handler that will reconnect forever
1072
+ * const reconnectHandler = (state: ReconnectState) => true
1073
+ *
1074
+ * // An example reconnect handler that will fetch an url and determine whether to reconnect
1075
+ * const reconnectHandler = async (state: ReconnectState) => {
1076
+ * const result = await fetch("https://should-i-reconnect-now.com")
1077
+ * return result.ok
1078
+ * },
1079
+ * ```
1080
+ */
1499
1081
  reconnectHandler?: (state: ReconnectState) => Promise<boolean> | boolean;
1500
1082
  tags?: string[];
1501
1083
  ownerSessionId?: string;
1502
1084
  edgeUrl?: string;
1503
1085
  logShippingEnabled?: boolean;
1504
1086
  statsShippingEnabled?: boolean;
1087
+ /**
1088
+ * Enable wake lock for iOS devices.
1089
+ * The wake lock requires that the audio has been activated at least once for the instance, othwerwise it will not work.
1090
+ * Other devices already provide wake lock by default.
1091
+ *
1092
+ * This option is redundant and has no effect if iosMediaElementEnabled is enabled since that automatically enables wake lock.
1093
+ *
1094
+ * Disabled by default.
1095
+ */
1505
1096
  iosWakeLockEnabled?: boolean;
1097
+ /**
1098
+ * Disabling this will revert to legacy behaviour where Vindral will try to always keep the video element playing.
1099
+ */
1100
+ pauseSupportEnabled?: boolean;
1101
+ /**
1102
+ * Enables iOS devices to use a media element for playback. This enables fullscreen and picture in picture support on iOS.
1103
+ */
1104
+ iosMediaElementEnabled?: boolean;
1105
+ /**
1106
+ * Advanced options to override default behaviour.
1107
+ */
1506
1108
  advanced?: AdvancedOptions;
1507
1109
  media?: Media;
1508
1110
  }
1111
+ /**
1112
+ * Contextual information about the language switch
1113
+ */
1509
1114
  export interface LanguageSwitchContext {
1115
+ /**
1116
+ * The new language that was switched to
1117
+ */
1510
1118
  language: string;
1511
1119
  }
1120
+ /**
1121
+ * Contextual information about the channel switch
1122
+ */
1512
1123
  export interface ChannelSwitchContext {
1124
+ /**
1125
+ * The new channel id that was switched to
1126
+ */
1513
1127
  channelId: string;
1514
1128
  }
1129
+ interface VolumeState {
1130
+ /**
1131
+ * Wether the audio is muted
1132
+ */
1133
+ isMuted: boolean;
1134
+ /**
1135
+ * The volume level
1136
+ */
1137
+ volume: number;
1138
+ }
1139
+ /**
1140
+ * The events that can be emitted from the Vindral instance
1141
+ */
1515
1142
  export interface PublicVindralEvents {
1143
+ /**
1144
+ * When an error that requires action has occured
1145
+ *
1146
+ * Can be a fatal error that will unload the Vindral instance - this is indicated by `isFatal()` on the error object returning true.
1147
+ *
1148
+ * In case of a fatal error it is appropriate to indicate what the error was to the user, either by displaying the error.message or
1149
+ * by using the error.code() as a key to look up a localization string. To resume streaming it is required to create a new Vindral instance.
1150
+ */
1516
1151
  ["error"]: Readonly<VindralError>;
1152
+ /**
1153
+ * When the instance needs user input to activate audio or sometimes video playback.
1154
+ * Is called with an object
1155
+ * ```
1156
+ * {
1157
+ * forAudio: boolean // true if user input is needed for audio playback
1158
+ * forVideo: boolean // true if user input is needed for video playback
1159
+ * }
1160
+ * ```
1161
+ */
1517
1162
  ["needs user input"]: NeedsUserInputContext;
1163
+ /**
1164
+ * When a timed metadata event has been triggered
1165
+ */
1518
1166
  ["metadata"]: Readonly<Metadata>;
1167
+ /**
1168
+ * When the playback state changes
1169
+ */
1519
1170
  ["playback state"]: Readonly<PlaybackState>;
1171
+ /**
1172
+ * When the connection state changes
1173
+ */
1520
1174
  ["connection state"]: Readonly<State>;
1175
+ /**
1176
+ * When the available rendition levels is changed
1177
+ */
1521
1178
  ["rendition levels"]: ReadonlyArray<RenditionLevel>;
1179
+ /**
1180
+ * When the rendition level is changed
1181
+ */
1522
1182
  ["rendition level"]: Readonly<RenditionLevel>;
1183
+ /**
1184
+ * When the available languages is changed
1185
+ */
1523
1186
  ["languages"]: ReadonlyArray<string>;
1187
+ /**
1188
+ * When the available channels is changed
1189
+ */
1524
1190
  ["channels"]: ReadonlyArray<Channel>;
1191
+ /**
1192
+ * When a context switch state change has occured.
1193
+ * E.g. when a channel change has been requested, or quality is changed.
1194
+ */
1525
1195
  ["context switch"]: Readonly<ContextSwitchState>;
1196
+ /**
1197
+ * Emitted when a wallclock time message has been received from the server.
1198
+ *
1199
+ * Note: This is the edge server wallclock time and thus may differ slightly
1200
+ * between two viewers if they are connected to different edge servers.
1201
+ */
1526
1202
  ["server wallclock time"]: Readonly<number>;
1203
+ /**
1204
+ * Is emitted during connection whether the channel is live or not.
1205
+ *
1206
+ * If the channel is not live, the Vindral instance will try to reconnect until the `reconnectHandler`
1207
+ * determines that no more retries should be made.
1208
+ *
1209
+ * Note: If the web-sdk is instantiated at the same time as you are starting the stream it is possible
1210
+ * that this emits false until the started state has propagated through the system.
1211
+ */
1527
1212
  ["is live"]: boolean;
1213
+ /**
1214
+ * Emitted when a channel switch has been completed and the first frame of the new channel is rendered.
1215
+ * A string containing the channel id of the new channel is provided as an argument.
1216
+ */
1528
1217
  ["channel switch"]: Readonly<ChannelSwitchContext>;
1218
+ /**
1219
+ * Emitted when a language switch has been completed and the new language starts playing.
1220
+ */
1529
1221
  ["language switch"]: Readonly<LanguageSwitchContext>;
1222
+ /**
1223
+ * Emitted when the volume state changes.
1224
+ *
1225
+ * This is triggered triggered both when the user changes the volume through the Vindral instance, but also
1226
+ * from external sources such as OS media shortcuts or other native UI outside of the browser.
1227
+ */
1228
+ ["volume state"]: Readonly<VolumeState>;
1530
1229
  ["buffer state event"]: Readonly<BufferStateEvent>;
1531
1230
  ["initialized media"]: void;
1532
1231
  }
1533
- export interface Track {
1534
- type: Type;
1535
- mimeType: string;
1536
- }
1537
- declare type ValuesOf<T> = T[keyof T];
1538
- declare type UndefinedKeys<T> = {
1539
- [K in keyof T]: undefined extends T[K] ? K : never;
1540
- }[keyof T];
1541
- declare type ExtractOptional<T> = Pick<T, Exclude<UndefinedKeys<T>, undefined>>;
1542
- declare type ExtractRequired<T> = Omit<T, Exclude<UndefinedKeys<T>, undefined>>;
1543
- declare type NullableObjectValuesOf<T> = Exclude<Partial<Extract<ValuesOf<ExtractOptional<T>>, object>>, Array<unknown>>;
1544
- declare type ObjectValuesOf<T> = Exclude<Extract<ValuesOf<ExtractRequired<T>>, object>, Array<unknown>>;
1545
- declare type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never;
1546
- declare type Flatten<T> = UnionToIntersection<ObjectValuesOf<T> | NullableObjectValuesOf<T>>;
1547
- interface RenditionProps {
1548
- id: number;
1549
- bitRate: number;
1550
- codecString?: string;
1551
- language?: string;
1552
- meta?: Record<string, string>;
1553
- }
1554
- interface VideoRenditionProps {
1555
- codec: VideoCodec;
1556
- frameRate: [
1557
- number,
1558
- number
1559
- ];
1560
- width: number;
1561
- height: number;
1562
- }
1563
- interface AudioRenditionProps {
1564
- codec: AudioCodec;
1565
- channels: number;
1566
- sampleRate: number;
1567
- }
1568
- interface CodecProps {
1569
- codec: Codec;
1570
- codecString?: string;
1571
- }
1572
- export declare type VideoRendition = VideoRenditionProps & RenditionProps;
1573
- export declare type AudioRendition = AudioRenditionProps & RenditionProps;
1574
- export declare type Rendition = VideoRendition | AudioRendition;
1575
- export declare const isVideoRendition: (rendition: Readonly<Rendition>) => rendition is VideoRendition;
1576
- export declare const isAudioRendition: (rendition: Readonly<Rendition>) => rendition is AudioRendition;
1577
- export declare const getMimeType: (rendition: Readonly<CodecProps>) => string;
1578
- interface AudioConstraint {
1579
- bitRate: number;
1580
- codec?: AudioCodec;
1581
- codecString?: string;
1582
- language?: string;
1583
- }
1584
- interface VideoConstraint {
1585
- width: number;
1586
- height: number;
1587
- bitRate: number;
1588
- codec?: VideoCodec;
1589
- codecString?: string;
1590
- }
1591
- declare type SubscriptionInitiator = "manual" | "edge";
1592
- interface Subscription {
1593
- channelId: string;
1594
- video: VideoConstraint;
1595
- audio: AudioConstraint;
1596
- burstMs?: number;
1597
- meta?: Record<string, string>;
1598
- initiator?: SubscriptionInitiator;
1599
- }
1600
- interface ContextSwitch {
1601
- type: "context switch";
1602
- }
1603
- interface ContextSwitchComplete {
1604
- type: "context switch complete";
1605
- }
1606
- interface RenditionsSignal {
1607
- type: "renditions";
1608
- renditions: Rendition[];
1609
- }
1610
- interface SubscribeSignal {
1611
- type: "subscribe";
1612
- subscription: Subscription;
1613
- }
1614
- interface ClientIpSignal {
1615
- type: "client ip";
1616
- ip: string;
1617
- }
1618
- interface SubscriptionChangedSignal {
1619
- type: "subscription changed";
1620
- subscription: Subscription;
1621
- }
1622
- interface TimingInfoSignal {
1623
- type: "timing info";
1624
- timingInfo: {
1625
- channelId: string;
1626
- timestamp: number;
1627
- wallclockTime: number;
1232
+ declare const defaultOptions: {
1233
+ sizeBasedResolutionCapEnabled: boolean;
1234
+ pictureInPictureEnabled: boolean;
1235
+ abrEnabled: boolean;
1236
+ burstEnabled: boolean;
1237
+ mseEnabled: boolean;
1238
+ mseOpusEnabled: boolean;
1239
+ muted: boolean;
1240
+ minBufferTime: number;
1241
+ maxBufferTime: number;
1242
+ logLevel: Level;
1243
+ maxSize: Size;
1244
+ maxVideoBitRate: number;
1245
+ maxAudioBitRate: number;
1246
+ tags: string[];
1247
+ media: Media;
1248
+ poster: string | boolean;
1249
+ reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
1250
+ iosWakeLockEnabled: boolean;
1251
+ telemetryEnabled: boolean;
1252
+ iosMediaElementEnabled: boolean;
1253
+ pauseSupportEnabled: boolean;
1254
+ advanced: {
1255
+ wasmDecodingConstraint: Partial<VideoConstraint>;
1628
1256
  };
1629
- }
1630
- interface PingSignal {
1631
- type: "ping";
1632
- }
1633
- interface PongSignal {
1634
- type: "pong";
1635
- }
1636
- interface RefreshAuthSignal {
1637
- type: "refresh auth";
1638
- token: string;
1639
- }
1640
- declare type Signal = RenditionsSignal | SubscribeSignal | SubscriptionChangedSignal | TimingInfoSignal | PingSignal | PongSignal | ClientIpSignal | ContextSwitch | ContextSwitchComplete | RefreshAuthSignal;
1641
- export interface Channel {
1642
- channelId: string;
1643
- name: string;
1644
- isLive: boolean;
1645
- thumbnailUrls: string[];
1646
- }
1647
- interface ClientOverrides {
1648
- maxVideoBitRate?: number;
1649
- minBufferTime?: number;
1650
- maxBufferTime?: number;
1651
- burstEnabled?: boolean;
1652
- }
1653
- interface ChannelWithRenditionsAndOverrides extends Channel {
1654
- renditions: Rendition[];
1655
- overrides?: ClientOverrides;
1656
- }
1657
- interface ConnectOptions {
1658
- channelGroupId?: string;
1659
- channelId: string;
1660
- }
1661
- interface Telemetry {
1662
- url: string;
1663
- probability?: number;
1664
- includeErrors?: boolean;
1665
- includeEvents?: boolean;
1666
- includeStats?: boolean;
1667
- maxRetries?: number;
1668
- maxErrorReports?: number;
1669
- interval?: number;
1670
- }
1671
- export interface ConnectResponse {
1672
- logsUrl?: string;
1673
- statsUrl?: string;
1674
- telemetry?: Telemetry;
1675
- channels: ChannelWithRenditionsAndOverrides[];
1676
- edges: string[];
1677
- }
1678
- export interface ApiClientOptions {
1679
- publicEndpoint: string;
1680
- tokenFactory?: AuthorizationTokenFactory;
1681
- }
1682
- export interface AuthorizationContext {
1683
- channelGroupId?: string;
1684
- channelId?: string;
1685
- }
1686
- export declare type AuthorizationTokenFactory = (context: AuthorizationContext) => string | undefined;
1687
- export declare class ApiClient {
1688
- private baseUrl;
1689
- private tokenFactory?;
1690
- constructor(options: ApiClientOptions);
1691
- connect(options: ConnectOptions): Promise<ConnectResponse>;
1692
- getChannel(channelId: string): Promise<Channel>;
1693
- getChannels(channelGroupId: string): Promise<Channel[]>;
1694
- private getHeaders;
1695
- private getAuthToken;
1696
- private toChannels;
1697
- private toChannel;
1698
- }
1699
- export declare type CastState = "casting" | "not casting";
1700
- export interface CastSenderEvents {
1701
- ["connected"]: void;
1702
- ["resumed"]: void;
1703
- ["disconnected"]: void;
1704
- ["failed"]: void;
1705
- ["metadata"]: Metadata;
1706
- ["server wallclock time"]: number;
1707
- }
1708
- export interface CastConfig {
1709
- options: Options;
1710
- background?: string;
1711
- receiverApplicationId?: string;
1712
- }
1713
- export declare type CustomCastMessageType = "stop" | "start" | "updateAuthToken" | "serverWallclockTime" | "metadata" | "setChannelId" | "setLanguage";
1714
- export interface CastCustomMessage {
1715
- type: CustomCastMessageType;
1716
- channelId?: string;
1717
- language?: string;
1718
- config?: CastConfig;
1719
- token?: string;
1720
- serverWallclockTime?: number;
1721
- metadata?: Metadata;
1722
- }
1723
- export declare class CastSender extends Emitter<CastSenderEvents> {
1724
- private state;
1725
- private config;
1726
- private unloaded;
1727
- constructor(config: CastConfig);
1728
- get casting(): boolean;
1729
- get volume(): number;
1730
- set volume(volume: number);
1731
- get language(): string | undefined;
1732
- set language(language: string | undefined);
1733
- get channelId(): string;
1734
- set channelId(channelId: string);
1735
- updateAuthenticationToken: (token: string) => void;
1736
- unload: () => void;
1737
- init: () => Promise<void>;
1738
- start: () => Promise<void>;
1739
- stop: () => void;
1740
- getReceiverName: () => string | undefined;
1741
- private onGCastApiAvailable;
1742
- private send;
1743
- private onMessage;
1744
- private onSessionStarted;
1745
- private onSessionStateChanged;
1746
- private getInstance;
1747
- private getSession;
1748
- private castLibrariesAdded;
1749
- private verifyCastLibraries;
1750
- }
1751
- export declare const isValidOptions: (options: unknown) => options is Options;
1752
- export declare const validateOptions: (options: Options) => Options;
1753
- interface FullscreenEvents {
1754
- ["on fullscreen change"]: boolean;
1755
- }
1756
- declare class Fullscreen extends Emitter<FullscreenEvents> {
1757
- private container;
1758
- constructor(container: HTMLElement);
1759
- unload: () => void;
1760
- request: () => Promise<void>;
1761
- exit: () => Promise<void>;
1762
- private onChange;
1763
- isFullscreen: () => boolean;
1764
- isSupported: () => boolean;
1765
- private isFullscreenApiSupported;
1766
- private isInIframe;
1767
- private get requestFn();
1768
- private get exitFn();
1769
- }
1257
+ };
1258
+ /**
1259
+ * Available options when initializing the Player. Used for enabling/disabling features
1260
+ * and hiding/showing buttons in the control pane
1261
+ */
1770
1262
  export interface PlayerOptions {
1263
+ /**
1264
+ * Enable or disable controls
1265
+ */
1771
1266
  controlsEnabled?: boolean;
1267
+ /**
1268
+ * Enable or disable Google Cast (button and functionality). Will be disabled if
1269
+ * device is unable to use Cast Framework
1270
+ */
1772
1271
  castEnabled?: boolean;
1272
+ /**
1273
+ * Enable or disable fullscreen button
1274
+ */
1773
1275
  fullscreenButtonEnabled?: boolean;
1276
+ /**
1277
+ * Enable or disable Picture-in-picture button. Will be disabled if
1278
+ * device is unable to manually enter PiP
1279
+ */
1774
1280
  pipButtonEnabled?: boolean;
1281
+ /**
1282
+ @deprecated since 2.0.10. Use channelSelectionOptions.
1283
+
1284
+ Enable or disable channel selector (only available for channel groups with more than one channel)
1285
+ */
1775
1286
  channelSelectionEnabled?: boolean;
1287
+ /**
1288
+ * Customize and enable channel selection interfaces.
1289
+ * Will be disabled if not using a channel group with more than one channel.
1290
+ */
1776
1291
  channelSelectionOptions?: {
1777
1292
  barButton?: {
1778
1293
  enabled?: boolean;
@@ -1782,21 +1297,58 @@ export interface PlayerOptions {
1782
1297
  enabled?: boolean;
1783
1298
  };
1784
1299
  };
1300
+ /**
1301
+ * Enable or disable rendition (bitrate) selector
1302
+ */
1785
1303
  renditionLevelsEnabled?: boolean;
1304
+ /**
1305
+ * Enable or disable language selector
1306
+ */
1786
1307
  languagesButtonEnabled?: boolean;
1308
+ /**
1309
+ * Enable or disable one-to-one (real size, not filling the entire container)
1310
+ */
1787
1311
  oneToOneButtonEnabled?: boolean;
1312
+ /**
1313
+ * Hide controls after this time in ms
1314
+ */
1788
1315
  hideTimeout?: number;
1316
+ /**
1317
+ * Background image for Cast Receiver
1318
+ */
1789
1319
  castBackground?: string;
1320
+ /**
1321
+ * For custom Cast Receivers, enter your Application Id here
1322
+ */
1790
1323
  castReceiverApplicationId?: string;
1324
+ /**
1325
+ * How often we should refresh thumbnails if used in MS. Default is 60000.
1326
+ */
1791
1327
  thumbnailUpdateInterval?: number;
1792
- }
1793
- export interface PlayerState {
1794
- isBuffering: boolean;
1795
- fullscreen: Fullscreen;
1796
- lastInteractionTime: number;
1797
- }
1328
+ /**
1329
+ * Enable or disable the pause and play button
1330
+ */
1331
+ pauseButtonEnabled?: boolean;
1332
+ }
1333
+ /**
1334
+ * Represents a Vindral player
1335
+ *
1336
+ * ```typescript
1337
+ * // minimal configuration of a Vindral web player
1338
+ * const instance = new Player({
1339
+ * url: "https://lb.cdn.vindral.com",
1340
+ * channelId: "vindral_demo1_ci_099ee1fa-80f3-455e-aa23-3d184e93e04f",
1341
+ * })
1342
+ * ```
1343
+ */
1798
1344
  export declare class Player {
1345
+ /**
1346
+ * The Vindral instance
1347
+ */
1799
1348
  readonly core: Vindral;
1349
+ /**
1350
+ * The CastSender instance
1351
+ */
1800
1352
  readonly castSender: CastSender;
1801
1353
  private options;
1802
1354
  private state;
@@ -1806,16 +1358,31 @@ export declare class Player {
1806
1358
  private channelSelectionList?;
1807
1359
  private bar;
1808
1360
  private stateInterval?;
1809
- private showBufferingTimeout?;
1810
1361
  private thumbnailModule?;
1362
+ private browser;
1811
1363
  constructor(optionsOrInstance: Options | Vindral, playerOptions?: PlayerOptions);
1364
+ /**
1365
+ * Fully unloads the Player and the Vindral instance.
1366
+ */
1812
1367
  unload: () => void;
1368
+ /**
1369
+ * Attaches the Player root node to a container
1370
+ */
1813
1371
  attach: (container: HTMLElement) => void;
1814
1372
  private setupCastSender;
1815
1373
  private onMouseMove;
1816
1374
  private onClick;
1817
1375
  private togglePip;
1818
1376
  private toggleFullscreen;
1377
+ /**
1378
+ * This method is used to trigger play permissions without activating audio.
1379
+ * Setting muted to false will implicitly activate play permissions, and then we instantly revert the mute state to whatever it is.
1380
+ * The reason we need this method to begin with is because there are edge cases on iOS where users can enter fullscreen and then try to activate audio
1381
+ * from there. But that click (through a native ui control) to activate the audio only gives the media element permissions to play audio, but not the audio context.
1382
+ * By triggering this on any button that can lead to fullscreen mode we can ensure that the audio context already has permissions to play before the user might decide
1383
+ * to activate it through the native ui.
1384
+ */
1385
+ private triggerUserInput;
1819
1386
  private toggleOneToOne;
1820
1387
  private enterFullscreen;
1821
1388
  private exitFullscreen;