@vindral/web-sdk 3.4.4 → 4.0.0-191-g9f7294ed

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/core.d.ts ADDED
@@ -0,0 +1,1444 @@
1
+ type MatchingKeys<TRecord, TMatch, K extends keyof TRecord = keyof TRecord> = K extends (TRecord[K] extends TMatch ? K : never) ? K : never;
2
+ type VoidKeys<Record> = MatchingKeys<Record, void>;
3
+ type EventListenerReturnType = (() => void) | void;
4
+ declare class Emitter<TEvents, TEmits = TEvents, ArgLessEvents extends VoidKeys<TEvents> = VoidKeys<TEvents>, ArgEvents extends Exclude<keyof TEvents, ArgLessEvents> = Exclude<keyof TEvents, ArgLessEvents>, ArgLessEmits extends VoidKeys<TEmits> = VoidKeys<TEmits>, ArgEmits extends Exclude<keyof TEmits, ArgLessEmits> = Exclude<keyof TEmits, ArgLessEmits>> {
5
+ private listeners;
6
+ emit<T extends ArgLessEmits>(eventName: T): void;
7
+ emit<T extends ArgEmits>(eventName: T, args: TEmits[T]): void;
8
+ off<T extends ArgLessEvents>(eventName: T, fn: () => EventListenerReturnType): void;
9
+ off<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => EventListenerReturnType): void;
10
+ /**
11
+ * Add an event listener to `eventName`
12
+ *
13
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
14
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
15
+ */
16
+ on<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
17
+ on<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
18
+ /**
19
+ * Add an event listener to `eventName` that will be called once only
20
+ *
21
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
22
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
23
+ */
24
+ once<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
25
+ once<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
26
+ reset(): void;
27
+ private add;
28
+ }
29
+ declare const Levels: readonly [
30
+ "off",
31
+ "error",
32
+ "warn",
33
+ "info",
34
+ "debug",
35
+ "trace"
36
+ ];
37
+ export type Level = (typeof Levels)[number];
38
+ export declare const Level: {
39
+ ERROR: "error";
40
+ WARN: "warn";
41
+ INFO: "info";
42
+ DEBUG: "debug";
43
+ TRACE: "trace";
44
+ OFF: "off";
45
+ };
46
+ interface MinMaxAverage {
47
+ last: number;
48
+ /**
49
+ * Average value over a given interval.
50
+ */
51
+ average: number;
52
+ /**
53
+ * Maximum value over a given interval.
54
+ */
55
+ max: number;
56
+ /**
57
+ * Minimum value over a given interval.
58
+ */
59
+ min: number;
60
+ }
61
+ declare const tags: unique symbol;
62
+ type Tagged<BaseType, Tag extends PropertyKey> = BaseType & {
63
+ [tags]: {
64
+ [K in Tag]: void;
65
+ };
66
+ };
67
+ interface Channel {
68
+ /**
69
+ * Channel ID for the channel
70
+ */
71
+ channelId: string;
72
+ /**
73
+ * Display name
74
+ */
75
+ name: string;
76
+ /**
77
+ * Indicates whether there is an incoming source feed for the channel
78
+ */
79
+ isLive: boolean;
80
+ /**
81
+ * URLs to fetch thumbnail from
82
+ */
83
+ thumbnailUrls: string[];
84
+ }
85
+ interface ClientOverrides {
86
+ maxVideoBitRate?: number;
87
+ minBufferTime?: number;
88
+ maxBufferTime?: number;
89
+ burstEnabled?: boolean;
90
+ sizeBasedResolutionCapEnabled?: boolean;
91
+ separateVideoSocketEnabled?: boolean;
92
+ videoCodecs?: string[];
93
+ }
94
+ type AudioCodec = "aac" | "opus" | "mp3";
95
+ type VideoCodec = "h264" | "av1";
96
+ type Namespace = Tagged<Array<string>, "Namespace">;
97
+ interface TrackObject {
98
+ namespace?: Namespace;
99
+ name: string;
100
+ format: string;
101
+ label?: string;
102
+ renderGroup?: number;
103
+ altGroup?: number;
104
+ initData?: string;
105
+ initTrack?: string;
106
+ depends?: Array<string>;
107
+ temporalId?: number;
108
+ spatialId?: number;
109
+ codec?: string;
110
+ mimeType?: string;
111
+ framerate?: [
112
+ number,
113
+ number
114
+ ];
115
+ bitrate?: number;
116
+ width?: number;
117
+ height?: number;
118
+ samplerate?: number;
119
+ channelConfig?: string;
120
+ displayWidth?: number;
121
+ displayHeight?: number;
122
+ language?: string;
123
+ ["com.vindral.variant_uid"]?: string;
124
+ }
125
+ interface CatalogRoot {
126
+ version: number;
127
+ streamingFormat?: number;
128
+ streamingFormatVersion?: string;
129
+ }
130
+ interface TracksCatalog extends CatalogRoot {
131
+ namespace: Namespace;
132
+ tracks: Array<TrackObject>;
133
+ }
134
+ interface RenditionProps {
135
+ id: number;
136
+ bitRate: number;
137
+ codecString?: string;
138
+ language?: string;
139
+ meta?: Record<string, string>;
140
+ }
141
+ interface VideoRenditionProps {
142
+ codec: VideoCodec;
143
+ frameRate: [
144
+ number,
145
+ number
146
+ ];
147
+ width: number;
148
+ height: number;
149
+ }
150
+ interface AudioRenditionProps {
151
+ codec: AudioCodec;
152
+ channels: number;
153
+ sampleRate: number;
154
+ }
155
+ interface TextRenditionProps {
156
+ codec: "webvtt";
157
+ kind: "subtitles" | "captions";
158
+ label?: string;
159
+ }
160
+ type VideoRendition = VideoRenditionProps & RenditionProps;
161
+ type AudioRendition = AudioRenditionProps & RenditionProps;
162
+ type TextRendition = TextRenditionProps & RenditionProps;
163
+ type Rendition = VideoRendition | AudioRendition | TextRendition;
164
+ interface Telemetry {
165
+ url: string;
166
+ probability?: number;
167
+ includeErrors?: boolean;
168
+ includeEvents?: boolean;
169
+ includeStats?: boolean;
170
+ maxRetries?: number;
171
+ maxErrorReports?: number;
172
+ interval?: number;
173
+ }
174
+ interface ChannelWithCatalog extends Channel {
175
+ catalog: TracksCatalog;
176
+ renditions: Rendition[];
177
+ overrides?: ClientOverrides;
178
+ }
179
+ interface ChannelWithRenditions extends Channel {
180
+ renditions: Rendition[];
181
+ overrides?: ClientOverrides;
182
+ }
183
+ interface ServerCertificateHash {
184
+ algorithm: string;
185
+ value: string;
186
+ }
187
+ interface Edge {
188
+ moqUrl?: string;
189
+ moqWsUrl: string;
190
+ serverCertificateHashes?: ServerCertificateHash[];
191
+ }
192
+ interface MoQConnectInfo {
193
+ logsUrl?: string;
194
+ statsUrl?: string;
195
+ telemetry?: Telemetry;
196
+ channels: ChannelWithCatalog[];
197
+ edges: Edge[];
198
+ }
199
+ interface VindralConnectInfo {
200
+ logsUrl?: string;
201
+ statsUrl?: string;
202
+ telemetry?: Telemetry;
203
+ channels: ChannelWithRenditions[];
204
+ edges: string[];
205
+ }
206
+ type ConnectInfo = VindralConnectInfo | MoQConnectInfo;
207
+ /**
208
+ * Represents a timed metadata event
209
+ */
210
+ export interface Metadata {
211
+ /**
212
+ * The raw string content as it was ingested (if using JSON, it needs to be parsed on your end)
213
+ */
214
+ content: string;
215
+ /**
216
+ * Timestamp in ms
217
+ */
218
+ timestamp: number;
219
+ }
220
+ export interface TimeRange {
221
+ start: number;
222
+ end: number;
223
+ }
224
+ /**
225
+ * The current reconnect state to use to decide whether to kep reconnecting or not
226
+ */
227
+ export interface ReconnectState {
228
+ /**
229
+ * The number or retry attempts so far.
230
+ * This gets reset on every successful connect, so it will start from zero every
231
+ * time the client instance gets disconnected and will increment until the
232
+ * client instance makes a connection attempt is successful.
233
+ */
234
+ reconnectRetries: number;
235
+ }
236
+ interface Size {
237
+ width: number;
238
+ height: number;
239
+ }
240
+ interface VideoConstraint {
241
+ width: number;
242
+ height: number;
243
+ bitRate: number;
244
+ codec?: VideoCodec;
245
+ codecString?: string;
246
+ }
247
+ /**
248
+ * Advanced options to override default behaviour.
249
+ */
250
+ export interface AdvancedOptions {
251
+ /**
252
+ * Constrains wasm decoding to this resolution.
253
+ * By default it is set to 1280 in width and height.
254
+ * This guarantees better performance on older devices and reduces battery drain in general.
255
+ */
256
+ wasmDecodingConstraint: Partial<VideoConstraint>;
257
+ }
258
+ /**
259
+ * DRM options to provide to the Vindral instance
260
+ */
261
+ export interface DrmOptions {
262
+ /**
263
+ * Headers to be added to requests to license servers
264
+ */
265
+ headers?: Record<string, string>;
266
+ /**
267
+ * Query parameters to be added to requests to license servers
268
+ */
269
+ queryParams?: Record<string, string>;
270
+ }
271
+ type Media = "audio" | "video" | "audio+video";
272
+ /**
273
+ * Options for the Vindral instance
274
+ *
275
+ */
276
+ export interface Options {
277
+ /**
278
+ * URL to use when connecting to the stream
279
+ */
280
+ url: string;
281
+ /**
282
+ * Channel ID to connect to initially - can be changed later mid-stream when connected to a channel group.
283
+ */
284
+ channelId: string;
285
+ /**
286
+ * Channel group to connect to
287
+ * Note: Only needed for fast channel switching
288
+ */
289
+ channelGroupId?: string;
290
+ /**
291
+ * A container to attach the video view in - can be provided later with .attach() on the vindral core instance
292
+ */
293
+ container?: HTMLElement;
294
+ /**
295
+ * An authentication token to provide to the server when connecting - only needed for channels with authentication enabled
296
+ * Note: If not supplied when needed, an "Authentication Failed" error will be raised.
297
+ */
298
+ authenticationToken?: string;
299
+ /**
300
+ * Language to use initially - can be changed during during runtime on the vindral instance
301
+ * Note: Only needed when multiple languages are provided - if no language is specified, one will be automatically selected.
302
+ */
303
+ language?: string;
304
+ /**
305
+ * TextTrack to use initially - can be changed during during runtime on the vindral instance
306
+ */
307
+ textTrack?: string;
308
+ /**
309
+ * Sets the log level - defaults to info
310
+ */
311
+ logLevel?: Level;
312
+ /**
313
+ * Sets the minimum and initial buffer time
314
+ */
315
+ minBufferTime?: number;
316
+ /**
317
+ * Sets the maximum buffer time allowed. The vindral instance will automatically slowly increase
318
+ * the buffer time if the use experiences to much buffering with the initial buffer time.
319
+ */
320
+ maxBufferTime?: number;
321
+ /**
322
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
323
+ *
324
+ * Is enabled by default.
325
+ *
326
+ * Note: This is automatically set to false when abrEnabled is set to false.
327
+ */
328
+ sizeBasedResolutionCapEnabled?: boolean;
329
+ /**
330
+ * Enables or disables picture in picture support.
331
+ */
332
+ pictureInPictureEnabled?: boolean;
333
+ /**
334
+ * Enable bursting for initial connection and channel switches. This makes time to first frame faster at the
335
+ * cost of stability (more demanding due to the sudden burst of live content)
336
+ *
337
+ * Is disabled by default.
338
+ *
339
+ */
340
+ burstEnabled?: boolean;
341
+ /**
342
+ * Enable usage of the MediaSource API on supported browsers.
343
+ *
344
+ * Is enabled by default.
345
+ *
346
+ * Note: We recommend to keep this at the default value unless you have very specific needs.
347
+ */
348
+ mseEnabled?: boolean;
349
+ /**
350
+ * Enable Opus with the MediaSource API on supported browsers.
351
+ *
352
+ * Is enabled by default.
353
+ *
354
+ */
355
+ mseOpusEnabled?: boolean;
356
+ /**
357
+ * Enable or disable support for playing audio in the background for iOS devices.
358
+ *
359
+ * Is false (disabled) by default.
360
+ *
361
+ * Note: This may be enabled by default in a future (major) release
362
+ */
363
+ iosBackgroundPlayEnabled?: boolean;
364
+ /**
365
+ * Enable or disable Adaptive Bit Rate. This allows for automatically adapting the incoming bit rate based on
366
+ * the viewers bandwidth and thus avoiding buffering events. This also disables the
367
+ * sizeBasedResolutionCapEnabled option.
368
+ *
369
+ * Is enabled by default.
370
+ *
371
+ * Note: It is strongly recommended to keep this enabled as user experience can greatly suffer without ABR.
372
+ */
373
+ abrEnabled?: boolean;
374
+ /**
375
+ * Enable or disable telemetry. This allows for telemetry and errors being collected.
376
+ *
377
+ * Is enabled by default.
378
+ *
379
+ * We appreciate you turning it off during development/staging to not bloat real telemetry data.
380
+ *
381
+ * Note: It is strongly recommended to keep this enabled in production as it is required for insights and KPIs.
382
+ */
383
+ telemetryEnabled?: boolean;
384
+ /**
385
+ * Set a cap on the maximum video size.
386
+ * This can be used to provide user options to limit the video bandwidth usage.
387
+ *
388
+ * Note: This takes presedence over any size based resolution caps.
389
+ */
390
+ maxSize?: Size;
391
+ /**
392
+ * Maximum audio bit rate allowed.
393
+ * This can be used to provide user options to limit the audio bandwidth usage.
394
+ */
395
+ maxAudioBitRate?: number;
396
+ /**
397
+ * Maximum video bit rate allowed.
398
+ * This can be used to provide user options to limit the video bandwidth usage.
399
+ */
400
+ maxVideoBitRate?: number;
401
+ /**
402
+ * Controls video element background behaviour while loading.
403
+ * - If `false`, a black background will be shown.
404
+ * - If undefined or `true`, a live thumbnail will be shown.
405
+ * - If set to a string containing a URL (https://urltoimage), use that.
406
+ * Default `true` - meaning a live thumbnail is shown
407
+ */
408
+ poster?: boolean | string;
409
+ /**
410
+ * Whether to start the player muted or to try to start playing audio automatically.
411
+ */
412
+ muted?: boolean;
413
+ /**
414
+ * Provide a custom reconnect handler to control when the instance should stop trying to
415
+ * reconnect. The reconnect handler should either return true to allow the reconnect or
416
+ * false to stop reconnecting. It can also return a promise with true or false if it needs
417
+ * to make any async calls before determining wether to reconnect.
418
+ *
419
+ * The default reconnect handler allows 30 reconnects before stopping.
420
+ *
421
+ * Note: the ReconnectState gets reset every time the client instance makes a successful connection.
422
+ * This means the default reconnect handler will only stop reconnecting after 30 _consecutive_ failed connections.
423
+ *
424
+ * ```typescript
425
+ * // An example reconnect handler that will reconnect forever
426
+ * const reconnectHandler = (state: ReconnectState) => true
427
+ *
428
+ * // An example reconnect handler that will fetch an url and determine whether to reconnect
429
+ * const reconnectHandler = async (state: ReconnectState) => {
430
+ * const result = await fetch("https://should-i-reconnect-now.com")
431
+ * return result.ok
432
+ * },
433
+ * ```
434
+ */
435
+ reconnectHandler?: (state: ReconnectState) => Promise<boolean> | boolean;
436
+ tags?: string[];
437
+ ownerSessionId?: string;
438
+ edgeUrl?: string;
439
+ logShippingEnabled?: boolean;
440
+ statsShippingEnabled?: boolean;
441
+ /**
442
+ * Enable wake lock for iOS devices.
443
+ * The wake lock requires that the audio has been activated at least once for the instance, othwerwise it will not work.
444
+ * Other devices already provide wake lock by default.
445
+ *
446
+ * This option is redundant and has no effect if iosMediaElementEnabled is enabled since that automatically enables wake lock.
447
+ *
448
+ * Disabled by default.
449
+ */
450
+ iosWakeLockEnabled?: boolean;
451
+ /**
452
+ * Disabling this will revert to legacy behaviour where Vindral will try to always keep the video element playing.
453
+ */
454
+ pauseSupportEnabled?: boolean;
455
+ /**
456
+ * Enables iOS devices to use a media element for playback. This enables fullscreen and picture in picture support on iOS.
457
+ */
458
+ iosMediaElementEnabled?: boolean;
459
+ /**
460
+ * Advanced options to override default behaviour.
461
+ */
462
+ advanced?: AdvancedOptions;
463
+ media?: Media;
464
+ videoCodecs?: VideoCodec[];
465
+ /**
466
+ * DRM options to provide to the Vindral instance
467
+ */
468
+ drm?: DrmOptions;
469
+ }
470
+ /**
471
+ * Represents a rendition (quality level).
472
+ */
473
+ export interface RenditionLevel {
474
+ audio?: AudioRendition;
475
+ video?: VideoRendition;
476
+ }
477
+ type RenditionLevelChangedReason = "abr" | "manual";
478
+ /**
479
+ * Contextual information about the rendition level change.
480
+ */
481
+ export interface RenditionLevelChanged {
482
+ from?: RenditionLevel;
483
+ to?: RenditionLevel;
484
+ reason: RenditionLevelChangedReason;
485
+ }
486
+ declare const defaultOptions: {
487
+ sizeBasedResolutionCapEnabled: boolean;
488
+ pictureInPictureEnabled: boolean;
489
+ abrEnabled: boolean;
490
+ burstEnabled: boolean;
491
+ mseEnabled: boolean;
492
+ mseOpusEnabled: boolean;
493
+ muted: boolean;
494
+ minBufferTime: number;
495
+ maxBufferTime: number;
496
+ logLevel: Level;
497
+ maxSize: Size;
498
+ maxVideoBitRate: number;
499
+ maxAudioBitRate: number;
500
+ tags: string[];
501
+ media: Media;
502
+ poster: string | boolean;
503
+ reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
504
+ iosWakeLockEnabled: boolean;
505
+ telemetryEnabled: boolean;
506
+ iosMediaElementEnabled: boolean;
507
+ pauseSupportEnabled: boolean;
508
+ advanced: {
509
+ wasmDecodingConstraint: Partial<VideoConstraint>;
510
+ };
511
+ videoCodecs: VideoCodec[];
512
+ };
513
+ interface VindralErrorProps {
514
+ isFatal: boolean;
515
+ type?: ErrorType;
516
+ code: string;
517
+ source?: Error | MediaError;
518
+ }
519
+ export declare const CONNECTION_FAILED_CODE = "connection_failed";
520
+ export declare const CONNECTION_FAILED_AFTER_RETRIES_CODE = "connection_failed_will_not_attempt_again";
521
+ export declare const AUTHENTICATION_FAILED_CODE = "authentication_error";
522
+ export declare const AUTHENTICATION_EXPIRED_CODE = "authentication_expired";
523
+ export declare const CHANNEL_NOT_FOUND_CODE = "channel_not_found";
524
+ export declare const NO_INCOMING_DATA = "no_incoming_data_error";
525
+ export declare const INACTIVITY_CODE = "connection_inactivity";
526
+ export declare const DISCONNECTED_BY_EDGE = "disconnected_by_edge";
527
+ type ErrorType = "internal" | "external";
528
+ /**
529
+ * Represents a vindral error - all errors emitted from the Vindral instance inherit from this class.
530
+ */
531
+ export declare class VindralError extends Error {
532
+ private props;
533
+ private extra;
534
+ constructor(message: string, props: VindralErrorProps, extra?: {});
535
+ /**
536
+ * The error code is a stable string that represents the error type - this should be treated as an
537
+ * opaque string that can be used as a key for looking up localized strings for displaying error messages.
538
+ * @returns the error code
539
+ */
540
+ code: () => string;
541
+ /**
542
+ * Indicates whether the error is fatal - if it is that means the Vindral instance will be unloaded because of this error.
543
+ */
544
+ isFatal: () => boolean;
545
+ /**
546
+ * The underlying error that caused the Vindral error
547
+ * @returns the underlying error
548
+ */
549
+ source: () => Error | MediaError | undefined;
550
+ type: () => ErrorType;
551
+ /**
552
+ * @returns a stringifiable represenation of the error
553
+ */
554
+ toStringifiable: () => Record<string, unknown>;
555
+ }
556
+ type PlaybackState = "buffering" | "playing" | "paused";
557
+ type BufferStateEvent = "filled" | "drained";
558
+ interface PlaybackModuleStatistics {
559
+ /**
560
+ * Current target buffer time if using dynamic buffer. Otherwise, this is the statically set buffer time from instantiation.
561
+ */
562
+ bufferTime: number;
563
+ needsInputForAudioCount: number;
564
+ needsInputForVideoCount: number;
565
+ }
566
+ interface NeedsUserInputContext {
567
+ /**
568
+ * True if user input is needed for audio
569
+ */
570
+ forAudio: boolean;
571
+ /**
572
+ * True if user input is needed for video
573
+ */
574
+ forVideo: boolean;
575
+ }
576
+ interface ApiClientOptions {
577
+ /**
578
+ * String representing the URL to the public CDN API.
579
+ */
580
+ publicEndpoint: string;
581
+ /**
582
+ * Function that should return a string containing a signed authentication token.
583
+ */
584
+ tokenFactory?: AuthorizationTokenFactory;
585
+ }
586
+ interface AuthorizationContext {
587
+ /**
588
+ * The channelGroupId that might need authorization.
589
+ */
590
+ channelGroupId?: string;
591
+ /**
592
+ * The channelId that might need authorization.
593
+ */
594
+ channelId?: string;
595
+ }
596
+ interface ConnectOptions {
597
+ channelGroupId?: string;
598
+ channelId: string;
599
+ }
600
+ type AuthorizationTokenFactory = (context: AuthorizationContext) => string | undefined;
601
+ declare class ApiClient {
602
+ private baseUrl;
603
+ private tokenFactory?;
604
+ constructor(options: ApiClientOptions);
605
+ /**
606
+ * Returns everything needed to setup the connection of Vindral instance.
607
+ */
608
+ connect(options: ConnectOptions): Promise<ConnectInfo>;
609
+ /**
610
+ * Fetches information regarding a single channel.
611
+ *
612
+ * @param channelId the channel to fetch
613
+ * @returns a [[Channel]] containing information about the requested channel.
614
+ */
615
+ getChannel(channelId: string): Promise<Channel>;
616
+ /**
617
+ * Fetches channels within a channel group
618
+ *
619
+ * Note: The returned list includes inactive channels - check isLive to filter out only active channels
620
+ *
621
+ * @param channelGroup the channel group to fetch channels from
622
+ * @returns an array of [[Channel]] that belong to the channel group
623
+ */
624
+ getChannels(channelGroupId: string): Promise<Channel[]>;
625
+ private getHeaders;
626
+ private getAuthToken;
627
+ private toChannels;
628
+ private toChannel;
629
+ }
630
+ interface AdaptivityStatistics {
631
+ /**
632
+ * True if adaptive bitrate (ABR) is enabled.
633
+ */
634
+ isAbrEnabled: boolean;
635
+ }
636
+ interface BufferTimeStatistics {
637
+ /**
638
+ * Number of time buffer time has been adjusted. This will only happen when using dynamic buffer time
639
+ * (different min/max values of bufferTime).
640
+ */
641
+ bufferTimeAdjustmentCount: number;
642
+ }
643
+ interface RenditionsModuleStatistics {
644
+ /**
645
+ * Id of current video rendition subscribed to.
646
+ */
647
+ videoRenditionId?: number;
648
+ /**
649
+ * Id of current audio rendition subscribed to.
650
+ */
651
+ audioRenditionId?: number;
652
+ /**
653
+ * Current video codec being used.
654
+ */
655
+ videoCodec?: string;
656
+ /**
657
+ * Current audio codec being used.
658
+ */
659
+ audioCodec?: string;
660
+ /**
661
+ * Width of current video rendition (if any).
662
+ */
663
+ videoWidth?: number;
664
+ /**
665
+ * Height of current video rendition (if any).
666
+ */
667
+ videoHeight?: number;
668
+ /**
669
+ * Currently expected video bit rate according to metadata in bits/s.
670
+ */
671
+ expectedVideoBitRate?: number;
672
+ /**
673
+ * Currently expected audio bit rate according to metadata in bits/s.
674
+ */
675
+ expectedAudioBitRate?: number;
676
+ /**
677
+ * Current language. For non-multi language streams, this will often be unset.
678
+ */
679
+ language?: string;
680
+ /**
681
+ * Frame rate. Example: `"frameRate": [24000, 1001]`.
682
+ */
683
+ frameRate?: [
684
+ number,
685
+ number
686
+ ];
687
+ /**
688
+ * Total count of rendition level changes (quality downgrades/upgrades).
689
+ */
690
+ renditionLevelChangeCount: number;
691
+ }
692
+ interface VideoConstraintCap {
693
+ width: number;
694
+ height: number;
695
+ bitRate: number;
696
+ }
697
+ interface AudioConstraintCap {
698
+ bitRate: number;
699
+ }
700
+ interface ConstraintCap {
701
+ video: VideoConstraintCap;
702
+ audio: AudioConstraintCap;
703
+ }
704
+ interface ConstraintCapStatistics {
705
+ constraintCap?: ConstraintCap;
706
+ windowInnerWidth: number;
707
+ windowInnerHeight: number;
708
+ elementWidth: number;
709
+ elementHeight: number;
710
+ pixelRatio: number;
711
+ }
712
+ interface DecoderStatistics {
713
+ videoDecodeRate: number;
714
+ videoDecodeTime: MinMaxAverage;
715
+ audioDecodeTime: MinMaxAverage;
716
+ videoTransportTime: MinMaxAverage;
717
+ }
718
+ interface DocumentStateModulesStatistics {
719
+ isVisible: boolean;
720
+ isOnline: boolean;
721
+ isVisibleCount: number;
722
+ isHiddenCount: number;
723
+ isOnlineCount: number;
724
+ isOfflineCount: number;
725
+ navigatorRtt?: number;
726
+ navigatorEffectiveType?: EffectiveConnectionType;
727
+ navigatorConnectionType?: ConnectionType;
728
+ navigatorSaveData?: boolean;
729
+ navigatorDownlink?: number;
730
+ }
731
+ interface IncomingDataModuleStatistics {
732
+ /**
733
+ * Current video bitrate in bits/second.
734
+ */
735
+ videoBitRate?: number;
736
+ /**
737
+ * Current audio bitrate in bits/second.
738
+ */
739
+ audioBitRate?: number;
740
+ /**
741
+ * Counter of number of bytes received.
742
+ */
743
+ bytesReceived: number;
744
+ }
745
+ interface MseModuleStatistics {
746
+ quotaErrorCount: number;
747
+ mediaSourceOpenTime: number;
748
+ totalVideoFrames?: number;
749
+ droppedVideoFrames?: number;
750
+ successfulVideoAppendCalls?: number;
751
+ successfulAudioAppendsCalls?: number;
752
+ }
753
+ interface QualityOfServiceModuleStatistics {
754
+ /**
755
+ * Time in milliseconds spent in buffering state. Note that this value will increase while in background if
756
+ * buffering when leaving foreground.
757
+ */
758
+ timeSpentBuffering: number;
759
+ /**
760
+ * Total number of buffering events since instantiation.
761
+ */
762
+ bufferingEventsCount: number;
763
+ /**
764
+ * Number of fatal quality of service events.
765
+ */
766
+ fatalQosCount: number;
767
+ /**
768
+ * Ratio of time being spent on different bitrates.
769
+ * Example: `"timeSpentRatio": { "1160000": 0.2, "2260000": 0.8 }` shows 20% spent on 1.16 Mbps, 80% spent on 2.26 Mbps.
770
+ */
771
+ timeSpentRatio: {
772
+ [bitRate: string]: number;
773
+ };
774
+ }
775
+ interface SyncModuleStatistics {
776
+ drift: number | undefined;
777
+ driftAdjustmentCount: number;
778
+ timeshiftDriftAdjustmentCount: number;
779
+ seekTime: number;
780
+ }
781
+ interface VideoPlayerStatistics {
782
+ renderedFrameCount: number;
783
+ rendererDroppedFrameCount: number;
784
+ contextLostCount: number;
785
+ contextRestoredCount: number;
786
+ }
787
+ declare class UserAgentInformation {
788
+ private highEntropyValues?;
789
+ constructor();
790
+ getUserAgentInformation(): {
791
+ locationOrigin: string;
792
+ locationPath: string;
793
+ ancestorOrigins: string[] | undefined;
794
+ hardwareConcurrency: number;
795
+ deviceMemory: number | undefined;
796
+ userAgentLegacy: string;
797
+ ua: {
798
+ browser: {
799
+ brands: string[];
800
+ fullVersionBrands: string[];
801
+ majorVersions: string[];
802
+ };
803
+ device: string;
804
+ os: {
805
+ family: string;
806
+ version: string;
807
+ major_version: number;
808
+ };
809
+ };
810
+ } | {
811
+ locationOrigin: string;
812
+ locationPath: string;
813
+ ancestorOrigins: string[] | undefined;
814
+ hardwareConcurrency: number;
815
+ deviceMemory: number | undefined;
816
+ userAgent: string;
817
+ };
818
+ }
819
+ type ModuleStatistics = AdaptivityStatistics & BufferTimeStatistics & ConnectionStatistics & ConstraintCapStatistics & DecoderStatistics & DocumentStateModulesStatistics & IncomingDataModuleStatistics & MseModuleStatistics & PlaybackModuleStatistics & QualityOfServiceModuleStatistics & RenditionsModuleStatistics & SyncModuleStatistics & TelemetryModuleStatistics & VideoPlayerStatistics;
820
+ /**
821
+ * Contains internal statistics.
822
+ *
823
+ * Note that this object will have some undocumented properties, used internally or temporarily,
824
+ * for monitoring and improving the performance of the service.
825
+ *
826
+ * @interface
827
+ */
828
+ export type Statistics = ModuleStatistics & ReturnType<UserAgentInformation["getUserAgentInformation"]> & {
829
+ /**
830
+ * Version of the @vindral/web-sdk being used.
831
+ */
832
+ version: string;
833
+ /**
834
+ * IP of the client.
835
+ */
836
+ ip?: string;
837
+ /**
838
+ * URL being used for connecting to the stream.
839
+ */
840
+ url: string;
841
+ /**
842
+ * A session is bound to a connection. If the client reconnects for any reason (e.g. coming back from inactivity
843
+ * or a problem with network on client side), a new sessionId will be used.
844
+ *
845
+ */
846
+ sessionId?: string;
847
+ /**
848
+ * Unlike `sessionId`, `clientId` will remain the same even after reconnections and represents this unique Vindral instance.
849
+ */
850
+ clientId: string;
851
+ /**
852
+ * How long in milliseconds since the instance was created.
853
+ */
854
+ uptime: number;
855
+ /**
856
+ * Current channel ID being subscribed to.
857
+ */
858
+ channelId: string;
859
+ /**
860
+ * Channel group being subscribed to.
861
+ */
862
+ channelGroupId?: string;
863
+ /**
864
+ * Time in milliseconds from instantiation to playback of video and audio being started.
865
+ * Note that an actual frame render often happens much quicker, but that is not counted as TTFF.
866
+ */
867
+ timeToFirstFrame?: number;
868
+ iosMediaElementEnabled?: boolean;
869
+ };
870
+ /**
871
+ * Represents a Vindral client instance
872
+ *
873
+ * The most most essential methods when using the Vindral class are:
874
+ *
875
+ * - connect() - this has to be called to actually start connecting
876
+ * - attach() - to attach the Vindral video view to the DOM so that users can see it
877
+ * - userInput() - to activate audio on browsers that require a user gesture to play audio
878
+ * - unload() - unloads the instance, its very important that this is called when cleaning up the Vindral instance, otherwise background timers may leak.
879
+ *
880
+ * The Vindral instance will emit a variety of events during its lifetime. Use .on("event-name", callback) to listen to these events.
881
+ * See [[PublicVindralEvents]] for the events types that can be emitted.
882
+ *
883
+ * ```typescript
884
+ * // minimal configuration of a Vindral client instance
885
+ * const instance = new Vindral({
886
+ * url: "https://lb.cdn.vindral.com",
887
+ * channelId: "vindral_demo1_ci_099ee1fa-80f3-455e-aa23-3d184e93e04f",
888
+ * })
889
+ *
890
+ * // Will be called when timed metadata is received
891
+ * instance.on("metadata", console.log)
892
+ *
893
+ * // Will be called when a user interaction is needed to activate audio
894
+ * instance.on("needs user input", console.log)
895
+ *
896
+ * // Start connecting to the cdn
897
+ * instance.connect()
898
+ *
899
+ * // Attach the video view to the DOM
900
+ * instance.attach(document.getElementById("root"))
901
+ *
902
+ * // When done with the instance
903
+ * instance.unload()
904
+ * ```
905
+ */
906
+ export declare class Vindral extends Emitter<PublicVindralEvents> {
907
+ #private;
908
+ private static MAX_POOL_SIZE;
909
+ private static INITIAL_MAX_BIT_RATE;
910
+ private static DISCONNECT_TIMEOUT;
911
+ private static REMOVE_CUE_THRESHOLD;
912
+ /**
913
+ * Picture in picture
914
+ */
915
+ readonly pictureInPicture: {
916
+ /**
917
+ * Enters picture in picture
918
+ * @returns a promise that resolves if successful
919
+ */
920
+ enter: () => Promise<void>;
921
+ /**
922
+ * Exits picture in picture
923
+ * @returns a promise that resolves if successful
924
+ */
925
+ exit: () => Promise<void>;
926
+ /**
927
+ * returns whether picture in picture is currently active
928
+ */
929
+ isActive: () => boolean;
930
+ /**
931
+ * returns whether picture in picture is supported
932
+ */
933
+ isSupported: () => boolean;
934
+ };
935
+ private browser;
936
+ private options;
937
+ private element;
938
+ private playbackSource;
939
+ private emitter;
940
+ private logger;
941
+ private modules;
942
+ private clientIp?;
943
+ private sessionId?;
944
+ private clientId;
945
+ private _channels;
946
+ private createdAt;
947
+ private hasCalledConnect;
948
+ private latestEmittedLanguages;
949
+ private wakeLock;
950
+ private pool;
951
+ private userAgentInformation;
952
+ private encryptedMediaExtensions;
953
+ private sampleProcessingSesssions;
954
+ private sizes;
955
+ private isSuspended;
956
+ private disconnectTimeout;
957
+ constructor(options: Options);
958
+ /**
959
+ * Attaches the video view to a DOM element. The Vindral video view will be sized to fill this element while
960
+ * maintaining the correct aspect ratio.
961
+ * @param container the container element to append the video view to. Often a div element.
962
+ * @returns
963
+ */
964
+ attach: (container: HTMLElement) => void;
965
+ /**
966
+ * Set the current volume.
967
+ * Setting this to 0 is not equivalent to muting the audio.
968
+ * Setting this to >0 is not equivalent to unmuting the audio.
969
+ *
970
+ * Note that setting volume is not allowed on iPadOS and iOS devices.
971
+ * This is an OS/browser limitation on the video element.
972
+ *
973
+ * [Read more about it on Apple docs](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html)
974
+ * for iOS-Specific Considerations. The following section is the important part:
975
+ * On iOS devices, the audio level is always under the user's physical control. The volume property is not settable in JavaScript. Reading the volume property always returns 1.
976
+ */
977
+ set volume(volume: number);
978
+ /**
979
+ * The current volume. Note that if the playback is muted volume can still be set.
980
+ */
981
+ get volume(): number;
982
+ /**
983
+ * Set playback to muted/unmuted
984
+ */
985
+ set muted(muted: boolean);
986
+ /**
987
+ * Whether the playback is muted or not
988
+ */
989
+ get muted(): boolean;
990
+ /**
991
+ * Media (audio | video | audio+video)
992
+ */
993
+ get media(): Media;
994
+ /**
995
+ * The current average video bit rate in bits/s
996
+ */
997
+ get videoBitRate(): number;
998
+ /**
999
+ * The current average audio bit rate in bits/s
1000
+ */
1001
+ get audioBitRate(): number;
1002
+ /**
1003
+ * The current connection state
1004
+ */
1005
+ get connectionState(): Readonly<State>;
1006
+ /**
1007
+ * The current playback state
1008
+ */
1009
+ get playbackState(): Readonly<PlaybackState>;
1010
+ /**
1011
+ * The current buffer fullness as a floating point value between 0-1, where 1 is full and 0 i empty.
1012
+ */
1013
+ get bufferFullness(): number;
1014
+ /**
1015
+ * Whether user bandwidth savings by capping the video resolution to the size of the video element is enabled
1016
+ */
1017
+ get sizeBasedResolutionCapEnabled(): boolean;
1018
+ /**
1019
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
1020
+ */
1021
+ set sizeBasedResolutionCapEnabled(enabled: boolean);
1022
+ /**
1023
+ * Whether ABR is currently enabled
1024
+ */
1025
+ get abrEnabled(): boolean;
1026
+ /**
1027
+ * Enable or disable ABR
1028
+ *
1029
+ * The client will immediatly stop changing renditon level based on QoS metrics
1030
+ *
1031
+ * Note: It is strongly recommended to keep this enabled as it can severly increase
1032
+ * the number of buffering events for viewers.
1033
+ */
1034
+ set abrEnabled(enabled: boolean);
1035
+ /**
1036
+ * Estimated live edge time for the current channel
1037
+ */
1038
+ get serverEdgeTime(): number | undefined;
1039
+ /**
1040
+ * @returns Estimated wallclock time on the edge server in milliseconds
1041
+ */
1042
+ get serverWallclockTime(): number | undefined;
1043
+ /**
1044
+ * Local current time normalized between all channels in the channel group
1045
+ */
1046
+ get currentTime(): number;
1047
+ /**
1048
+ * Current time for the channel. This is the actual stream time, passed on from your ingress.
1049
+ * Integer overflow could make this value differ from your encoder timestamps if it has been rolling for more
1050
+ * than 42 days with RTMP as target.
1051
+ *
1052
+ * Note: This is not normalized between channels, thus it can make jumps when switching channels
1053
+ */
1054
+ get channelCurrentTime(): number;
1055
+ /**
1056
+ * The current target buffer time in milliseconds
1057
+ */
1058
+ get targetBufferTime(): number;
1059
+ /**
1060
+ * Set the current target buffer time in milliseconds
1061
+ */
1062
+ set targetBufferTime(bufferTimeMs: number);
1063
+ /**
1064
+ * The estimated playback latency based on target buffer time, the connection rtt and local playback drift
1065
+ */
1066
+ get playbackLatency(): number | undefined;
1067
+ /**
1068
+ * The estimated utc timestamp (in ms) for the playhead.
1069
+ */
1070
+ get playbackWallclockTime(): number | undefined;
1071
+ /**
1072
+ * Channels that can be switched between
1073
+ */
1074
+ get channels(): ReadonlyArray<Channel>;
1075
+ /**
1076
+ * Languages available
1077
+ */
1078
+ get languages(): ReadonlyArray<string>;
1079
+ /**
1080
+ * The current language
1081
+ */
1082
+ get language(): string | undefined;
1083
+ /**
1084
+ * Set the current language
1085
+ */
1086
+ set language(language: string | undefined);
1087
+ /**
1088
+ * Set the active text track
1089
+ */
1090
+ set textTrack(label: string | undefined);
1091
+ /**
1092
+ * Get the available text tracks
1093
+ */
1094
+ get textTracks(): string[];
1095
+ /**
1096
+ * Get the active text track
1097
+ */
1098
+ get textTrack(): string | undefined;
1099
+ /**
1100
+ * The current channelId
1101
+ */
1102
+ get channelId(): string;
1103
+ /**
1104
+ * Set the current channelId
1105
+ *
1106
+ * Possible channels to set are available from [[channels]]
1107
+ *
1108
+ * Note that the following scenarios are not possible right now:
1109
+ * - switching channel from a channel with audio to a channel without audio (unless audio only mode is active)
1110
+ * - switching channel from a channel with video to a channel without video (unless video only mode is active)
1111
+ */
1112
+ set channelId(channelId: string);
1113
+ /**
1114
+ * Max size that will be subcribed to
1115
+ */
1116
+ get maxSize(): Size;
1117
+ /**
1118
+ * Set max size that will be subscribed to
1119
+ *
1120
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this size
1121
+ */
1122
+ set maxSize(size: Size);
1123
+ /**
1124
+ * The max video bit rate that will be subscribed to
1125
+ *
1126
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
1127
+ */
1128
+ get maxVideoBitRate(): number;
1129
+ /**
1130
+ * Set max video bit rate that will be subscribed to
1131
+ *
1132
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bitrate
1133
+ */
1134
+ set maxVideoBitRate(bitRate: number);
1135
+ /**
1136
+ * The max audio bit rate that will be subscribed to
1137
+ *
1138
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
1139
+ */
1140
+ get maxAudioBitRate(): number;
1141
+ /**
1142
+ * Set max audio bit rate that will be subscribed to
1143
+ *
1144
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bit rate
1145
+ */
1146
+ set maxAudioBitRate(bitRate: number);
1147
+ /**
1148
+ * The rendition levels available.
1149
+ */
1150
+ get renditionLevels(): ReadonlyArray<RenditionLevel>;
1151
+ /**
1152
+ * The current rendition level
1153
+ */
1154
+ get currentRenditionLevel(): Readonly<RenditionLevel> | undefined;
1155
+ /**
1156
+ * The target rendition level that the client is currently switching to
1157
+ */
1158
+ get targetRenditionLevel(): Readonly<RenditionLevel> | undefined;
1159
+ /**
1160
+ * True if the client is currently switching from one rendition level to another
1161
+ */
1162
+ get isSwitchingRenditionLevel(): boolean;
1163
+ /**
1164
+ * The time ranges buffered for video.
1165
+ * The ranges are specified in milliseconds.
1166
+ */
1167
+ get videoBufferedRanges(): ReadonlyArray<TimeRange>;
1168
+ /**
1169
+ * The time ranges buffered for audio.
1170
+ * The ranges are specified in milliseconds.
1171
+ */
1172
+ get audioBufferedRanges(): ReadonlyArray<TimeRange>;
1173
+ /**
1174
+ * The API client for calls to the public available endpoints of the Vindral Live CDN.
1175
+ */
1176
+ getApiClient(): ApiClient;
1177
+ get lastBufferEvent(): Readonly<BufferStateEvent>;
1178
+ get activeRatios(): Map<string, number>;
1179
+ get bufferingRatios(): Map<string, number>;
1180
+ get timeSpentBuffering(): number;
1181
+ get timeActive(): number;
1182
+ get mediaElement(): HTMLMediaElement | HTMLCanvasElement;
1183
+ get audioNode(): AudioNode | undefined;
1184
+ /**
1185
+ * Get active Vindral Options
1186
+ */
1187
+ getOptions: () => Options & typeof defaultOptions;
1188
+ /**
1189
+ * Get url for fetching thumbnail. Note that fetching thumbnails only works for an active channel.
1190
+ */
1191
+ getThumbnailUrl: () => string;
1192
+ /**
1193
+ * Update authentication token on an already established and authenticated connection
1194
+ */
1195
+ updateAuthenticationToken: (token: string) => void;
1196
+ /**
1197
+ * @deprecated since 3.0.0 Use play instead.
1198
+ * Connects to the configured channel and starts streaming
1199
+ */
1200
+ connect: () => void;
1201
+ private _connect;
1202
+ /**
1203
+ * Get options that can be used for CastSender
1204
+ */
1205
+ getCastOptions: () => Options;
1206
+ private onConnectInfo;
1207
+ private emitLanguagesIfChanged;
1208
+ private updateTextTracks;
1209
+ private cleanupTextTracks;
1210
+ private filterRenditions;
1211
+ /**
1212
+ * Patch the subscription with properties from the channel that isn't known until connection
1213
+ * @param channel Channel with the renditions to patch the subscription based on
1214
+ */
1215
+ private patchSubscription;
1216
+ private isSupportedVideoCodecProfile;
1217
+ private supportedAudioCodecs;
1218
+ private initializeDecodingModule;
1219
+ /**
1220
+ * Fully unloads the instance. This disconnects the clients and stops any background tasks.
1221
+ * This client instance can not be used after this has been called.
1222
+ */
1223
+ unload: () => Promise<void>;
1224
+ /**
1225
+ * @deprecated since 3.0.0 Use play instead.
1226
+ *
1227
+ * Activates audio or video on web browsers that require a user gesture to enable media playback.
1228
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
1229
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
1230
+ * the Vindral instance itself is created in a user input event.
1231
+ *
1232
+ * Requirements: This method needs to be called within an user-input event handler to function properly, such as
1233
+ * an onclick handler.
1234
+ *
1235
+ * Note: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
1236
+ * and handle that event gracefully.
1237
+ */
1238
+ userInput: () => void;
1239
+ /**
1240
+ * Pauses the stream. Call .play() to resume playback again.
1241
+ */
1242
+ pause: () => void;
1243
+ private registerDebugInstance;
1244
+ /**
1245
+ *
1246
+ * Start playing the stream.
1247
+ *
1248
+ * This method also activates audio or video on web browsers that require a user gesture to enable media playback.
1249
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
1250
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
1251
+ * the Vindral instance itself is created in a user input event.
1252
+ *
1253
+ * Note: In most browsers this method needs to be called within an user-input event handler, such as
1254
+ * an onclick handler in order to activate audio. Most implementations call this directly after constructing the Vindral
1255
+ * instance once in order to start playing, and then listen to a user-event in order to allow audio to be activated.
1256
+ *
1257
+ * Note 2: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
1258
+ * and handle that event gracefully.
1259
+ */
1260
+ play: () => void;
1261
+ /**
1262
+ * How long in milliseconds since the instance was created
1263
+ */
1264
+ get uptime(): number;
1265
+ /**
1266
+ * This method collects a statistics report from internal modules. While many of the report's properties are documented, the report may also contain undocumented
1267
+ * properties used internally or temporarily for monitoring and improving the performance of the service.
1268
+ *
1269
+ * Use undocumented properties at your own risk.
1270
+ */
1271
+ getStatistics: () => Statistics;
1272
+ private resetModules;
1273
+ private suspend;
1274
+ private unsuspend;
1275
+ private getRuntimeInfo;
1276
+ private onMediaElementState;
1277
+ private onBufferEvent;
1278
+ /**
1279
+ * Aligns size and bitrate to match a rendition level correctly
1280
+ */
1281
+ private alignSizeAndBitRate;
1282
+ private get currentSubscription();
1283
+ private get targetSubscription();
1284
+ private timeToFirstFrame;
1285
+ private willUseMediaSource;
1286
+ }
1287
+ interface TelemetryModuleStatistics {
1288
+ /**
1289
+ * The total amount of errors being spawned. Note that some media errors can trigger
1290
+ * thousands of errors for a single client in a few seconds before recovering. Therefore,
1291
+ * consider the number of viewers with errors, not just the total amount. Also, consider the median
1292
+ * instead of the mean for average calculation.
1293
+ */
1294
+ errorCount: number;
1295
+ }
1296
+ type State = "connected" | "disconnected" | "connecting";
1297
+ type ContextSwitchState = "completed" | "started";
1298
+ interface ConnectionStatistics {
1299
+ /**
1300
+ * RTT (round trip time) between client and server(s).
1301
+ */
1302
+ rtt: MinMaxAverage;
1303
+ /**
1304
+ * A very rough initial estimation of minimum available bandwidth.
1305
+ */
1306
+ estimatedBandwidth: number;
1307
+ edgeUrl?: string;
1308
+ /**
1309
+ * Total number of connections that have been established since instantiation.
1310
+ */
1311
+ connectCount: number;
1312
+ /**
1313
+ * Total number of connection attempts since instantiation.
1314
+ */
1315
+ connectionAttemptCount: number;
1316
+ connectionProtocol: "vindral_ws" | "moq" | undefined;
1317
+ }
1318
+ /**
1319
+ * Contextual information about the language switch
1320
+ */
1321
+ export interface LanguageSwitchContext {
1322
+ /**
1323
+ * The new language that was switched to
1324
+ */
1325
+ language: string;
1326
+ }
1327
+ /**
1328
+ * Contextual information about the channel switch
1329
+ */
1330
+ export interface ChannelSwitchContext {
1331
+ /**
1332
+ * The new channel id that was switched to
1333
+ */
1334
+ channelId: string;
1335
+ }
1336
+ interface VolumeState {
1337
+ /**
1338
+ * Wether the audio is muted
1339
+ */
1340
+ isMuted: boolean;
1341
+ /**
1342
+ * The volume level
1343
+ */
1344
+ volume: number;
1345
+ }
1346
+ /**
1347
+ * The events that can be emitted from the Vindral instance
1348
+ */
1349
+ export interface PublicVindralEvents {
1350
+ /**
1351
+ * When an error that requires action has occured
1352
+ *
1353
+ * Can be a fatal error that will unload the Vindral instance - this is indicated by `isFatal()` on the error object returning true.
1354
+ *
1355
+ * In case of a fatal error it is appropriate to indicate what the error was to the user, either by displaying the error.message or
1356
+ * by using the error.code() as a key to look up a localization string. To resume streaming it is required to create a new Vindral instance.
1357
+ */
1358
+ ["error"]: Readonly<VindralError>;
1359
+ /**
1360
+ * When the instance needs user input to activate audio or sometimes video playback.
1361
+ * Is called with an object
1362
+ * ```javascript
1363
+ * {
1364
+ * forAudio: boolean // true if user input is needed for audio playback
1365
+ * forVideo: boolean // true if user input is needed for video playback
1366
+ * }
1367
+ * ```
1368
+ */
1369
+ ["needs user input"]: NeedsUserInputContext;
1370
+ /**
1371
+ * When a timed metadata event has been triggered
1372
+ */
1373
+ ["metadata"]: Readonly<Metadata>;
1374
+ /**
1375
+ * When the playback state changes
1376
+ */
1377
+ ["playback state"]: Readonly<PlaybackState>;
1378
+ /**
1379
+ * When the connection state changes
1380
+ */
1381
+ ["connection state"]: Readonly<State>;
1382
+ /**
1383
+ * When the available rendition levels is changed
1384
+ */
1385
+ ["rendition levels"]: ReadonlyArray<RenditionLevel>;
1386
+ /**
1387
+ * When the rendition level is changed
1388
+ */
1389
+ ["rendition level"]: Readonly<RenditionLevel>;
1390
+ /**
1391
+ * When the available languages is changed
1392
+ */
1393
+ ["languages"]: ReadonlyArray<string>;
1394
+ /**
1395
+ * When the available text tracks are changed
1396
+ */
1397
+ ["text tracks"]: ReadonlyArray<string>;
1398
+ /**
1399
+ * When the available channels is changed
1400
+ */
1401
+ ["channels"]: ReadonlyArray<Channel>;
1402
+ /**
1403
+ * When a context switch state change has occured.
1404
+ * E.g. when a channel change has been requested, or quality is changed.
1405
+ */
1406
+ ["context switch"]: Readonly<ContextSwitchState>;
1407
+ /**
1408
+ * Emitted when a wallclock time message has been received from the server.
1409
+ *
1410
+ * Note: This is the edge server wallclock time and thus may differ slightly
1411
+ * between two viewers if they are connected to different edge servers.
1412
+ */
1413
+ ["server wallclock time"]: Readonly<number>;
1414
+ /**
1415
+ * Is emitted during connection whether the channel is live or not.
1416
+ *
1417
+ * If the channel is not live, the Vindral instance will try to reconnect until the `reconnectHandler`
1418
+ * determines that no more retries should be made.
1419
+ *
1420
+ * Note: If the web-sdk is instantiated at the same time as you are starting the stream it is possible
1421
+ * that this emits false until the started state has propagated through the system.
1422
+ */
1423
+ ["is live"]: boolean;
1424
+ /**
1425
+ * Emitted when a channel switch has been completed and the first frame of the new channel is rendered.
1426
+ * A string containing the channel id of the new channel is provided as an argument.
1427
+ */
1428
+ ["channel switch"]: Readonly<ChannelSwitchContext>;
1429
+ /**
1430
+ * Emitted when a language switch has been completed and the new language starts playing.
1431
+ */
1432
+ ["language switch"]: Readonly<LanguageSwitchContext>;
1433
+ /**
1434
+ * Emitted when the volume state changes.
1435
+ *
1436
+ * This is triggered triggered both when the user changes the volume through the Vindral instance, but also
1437
+ * from external sources such as OS media shortcuts or other native UI outside of the browser.
1438
+ */
1439
+ ["volume state"]: Readonly<VolumeState>;
1440
+ ["buffer state event"]: Readonly<BufferStateEvent>;
1441
+ ["initialized media"]: void;
1442
+ }
1443
+
1444
+ export {};