@vindral/web-sdk 3.4.3 → 4.0.0-190-g016e452d

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/player.d.ts ADDED
@@ -0,0 +1,1691 @@
1
+ type MatchingKeys<TRecord, TMatch, K extends keyof TRecord = keyof TRecord> = K extends (TRecord[K] extends TMatch ? K : never) ? K : never;
2
+ type VoidKeys<Record> = MatchingKeys<Record, void>;
3
+ type EventListenerReturnType = (() => void) | void;
4
+ declare class Emitter<TEvents, TEmits = TEvents, ArgLessEvents extends VoidKeys<TEvents> = VoidKeys<TEvents>, ArgEvents extends Exclude<keyof TEvents, ArgLessEvents> = Exclude<keyof TEvents, ArgLessEvents>, ArgLessEmits extends VoidKeys<TEmits> = VoidKeys<TEmits>, ArgEmits extends Exclude<keyof TEmits, ArgLessEmits> = Exclude<keyof TEmits, ArgLessEmits>> {
5
+ private listeners;
6
+ emit<T extends ArgLessEmits>(eventName: T): void;
7
+ emit<T extends ArgEmits>(eventName: T, args: TEmits[T]): void;
8
+ off<T extends ArgLessEvents>(eventName: T, fn: () => EventListenerReturnType): void;
9
+ off<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => EventListenerReturnType): void;
10
+ /**
11
+ * Add an event listener to `eventName`
12
+ *
13
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
14
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
15
+ */
16
+ on<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
17
+ on<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
18
+ /**
19
+ * Add an event listener to `eventName` that will be called once only
20
+ *
21
+ * Event listeners may optionally return a "defer function" that will be called once all other listeners have been called.
22
+ * This is useful when one listener may want everone to have reacted to an event before calling something.
23
+ */
24
+ once<T extends ArgLessEvents>(eventName: T, fn: () => void): void;
25
+ once<T extends ArgEvents>(eventName: T, fn: (args: TEvents[T]) => void): void;
26
+ reset(): void;
27
+ private add;
28
+ }
29
+ declare const Levels: readonly [
30
+ "off",
31
+ "error",
32
+ "warn",
33
+ "info",
34
+ "debug",
35
+ "trace"
36
+ ];
37
+ type Level = (typeof Levels)[number];
38
+ declare const Level: {
39
+ ERROR: "error";
40
+ WARN: "warn";
41
+ INFO: "info";
42
+ DEBUG: "debug";
43
+ TRACE: "trace";
44
+ OFF: "off";
45
+ };
46
+ interface MinMaxAverage {
47
+ last: number;
48
+ /**
49
+ * Average value over a given interval.
50
+ */
51
+ average: number;
52
+ /**
53
+ * Maximum value over a given interval.
54
+ */
55
+ max: number;
56
+ /**
57
+ * Minimum value over a given interval.
58
+ */
59
+ min: number;
60
+ }
61
+ declare const tags: unique symbol;
62
+ type Tagged<BaseType, Tag extends PropertyKey> = BaseType & {
63
+ [tags]: {
64
+ [K in Tag]: void;
65
+ };
66
+ };
67
+ interface Channel {
68
+ /**
69
+ * Channel ID for the channel
70
+ */
71
+ channelId: string;
72
+ /**
73
+ * Display name
74
+ */
75
+ name: string;
76
+ /**
77
+ * Indicates whether there is an incoming source feed for the channel
78
+ */
79
+ isLive: boolean;
80
+ /**
81
+ * URLs to fetch thumbnail from
82
+ */
83
+ thumbnailUrls: string[];
84
+ }
85
+ interface ClientOverrides {
86
+ maxVideoBitRate?: number;
87
+ minBufferTime?: number;
88
+ maxBufferTime?: number;
89
+ burstEnabled?: boolean;
90
+ sizeBasedResolutionCapEnabled?: boolean;
91
+ separateVideoSocketEnabled?: boolean;
92
+ videoCodecs?: string[];
93
+ }
94
+ type AudioCodec = "aac" | "opus" | "mp3";
95
+ type VideoCodec = "h264" | "av1";
96
+ type Namespace = Tagged<Array<string>, "Namespace">;
97
+ interface TrackObject {
98
+ namespace?: Namespace;
99
+ name: string;
100
+ format: string;
101
+ label?: string;
102
+ renderGroup?: number;
103
+ altGroup?: number;
104
+ initData?: string;
105
+ initTrack?: string;
106
+ depends?: Array<string>;
107
+ temporalId?: number;
108
+ spatialId?: number;
109
+ codec?: string;
110
+ mimeType?: string;
111
+ framerate?: [
112
+ number,
113
+ number
114
+ ];
115
+ bitrate?: number;
116
+ width?: number;
117
+ height?: number;
118
+ samplerate?: number;
119
+ channelConfig?: string;
120
+ displayWidth?: number;
121
+ displayHeight?: number;
122
+ language?: string;
123
+ ["com.vindral.variant_uid"]?: string;
124
+ }
125
+ interface CatalogRoot {
126
+ version: number;
127
+ streamingFormat?: number;
128
+ streamingFormatVersion?: string;
129
+ }
130
+ interface TracksCatalog extends CatalogRoot {
131
+ namespace: Namespace;
132
+ tracks: Array<TrackObject>;
133
+ }
134
+ interface RenditionProps {
135
+ id: number;
136
+ bitRate: number;
137
+ codecString?: string;
138
+ language?: string;
139
+ meta?: Record<string, string>;
140
+ }
141
+ interface VideoRenditionProps {
142
+ codec: VideoCodec;
143
+ frameRate: [
144
+ number,
145
+ number
146
+ ];
147
+ width: number;
148
+ height: number;
149
+ }
150
+ interface AudioRenditionProps {
151
+ codec: AudioCodec;
152
+ channels: number;
153
+ sampleRate: number;
154
+ }
155
+ interface TextRenditionProps {
156
+ codec: "webvtt";
157
+ kind: "subtitles" | "captions";
158
+ label?: string;
159
+ }
160
+ type VideoRendition = VideoRenditionProps & RenditionProps;
161
+ type AudioRendition = AudioRenditionProps & RenditionProps;
162
+ type TextRendition = TextRenditionProps & RenditionProps;
163
+ type Rendition = VideoRendition | AudioRendition | TextRendition;
164
+ interface Telemetry {
165
+ url: string;
166
+ probability?: number;
167
+ includeErrors?: boolean;
168
+ includeEvents?: boolean;
169
+ includeStats?: boolean;
170
+ maxRetries?: number;
171
+ maxErrorReports?: number;
172
+ interval?: number;
173
+ }
174
+ interface ChannelWithCatalog extends Channel {
175
+ catalog: TracksCatalog;
176
+ renditions: Rendition[];
177
+ overrides?: ClientOverrides;
178
+ }
179
+ interface ChannelWithRenditions extends Channel {
180
+ renditions: Rendition[];
181
+ overrides?: ClientOverrides;
182
+ }
183
+ interface ServerCertificateHash {
184
+ algorithm: string;
185
+ value: string;
186
+ }
187
+ interface Edge {
188
+ moqUrl?: string;
189
+ moqWsUrl: string;
190
+ serverCertificateHashes?: ServerCertificateHash[];
191
+ }
192
+ interface MoQConnectInfo {
193
+ logsUrl?: string;
194
+ statsUrl?: string;
195
+ telemetry?: Telemetry;
196
+ channels: ChannelWithCatalog[];
197
+ edges: Edge[];
198
+ }
199
+ interface VindralConnectInfo {
200
+ logsUrl?: string;
201
+ statsUrl?: string;
202
+ telemetry?: Telemetry;
203
+ channels: ChannelWithRenditions[];
204
+ edges: string[];
205
+ }
206
+ type ConnectInfo = VindralConnectInfo | MoQConnectInfo;
207
+ interface Metadata {
208
+ /**
209
+ * The raw string content as it was ingested (if using JSON, it needs to be parsed on your end)
210
+ */
211
+ content: string;
212
+ /**
213
+ * Timestamp in ms
214
+ */
215
+ timestamp: number;
216
+ }
217
+ interface TimeRange {
218
+ start: number;
219
+ end: number;
220
+ }
221
+ interface ReconnectState {
222
+ /**
223
+ * The number or retry attempts so far.
224
+ * This gets reset on every successful connect, so it will start from zero every
225
+ * time the client instance gets disconnected and will increment until the
226
+ * client instance makes a connection attempt is successful.
227
+ */
228
+ reconnectRetries: number;
229
+ }
230
+ interface Size {
231
+ width: number;
232
+ height: number;
233
+ }
234
+ interface VideoConstraint {
235
+ width: number;
236
+ height: number;
237
+ bitRate: number;
238
+ codec?: VideoCodec;
239
+ codecString?: string;
240
+ }
241
+ interface AdvancedOptions {
242
+ /**
243
+ * Constrains wasm decoding to this resolution.
244
+ * By default it is set to 1280 in width and height.
245
+ * This guarantees better performance on older devices and reduces battery drain in general.
246
+ */
247
+ wasmDecodingConstraint: Partial<VideoConstraint>;
248
+ }
249
+ interface DrmOptions {
250
+ /**
251
+ * Headers to be added to requests to license servers
252
+ */
253
+ headers?: Record<string, string>;
254
+ /**
255
+ * Query parameters to be added to requests to license servers
256
+ */
257
+ queryParams?: Record<string, string>;
258
+ }
259
+ type Media = "audio" | "video" | "audio+video";
260
+ interface Options {
261
+ /**
262
+ * URL to use when connecting to the stream
263
+ */
264
+ url: string;
265
+ /**
266
+ * Channel ID to connect to initially - can be changed later mid-stream when connected to a channel group.
267
+ */
268
+ channelId: string;
269
+ /**
270
+ * Channel group to connect to
271
+ * Note: Only needed for fast channel switching
272
+ */
273
+ channelGroupId?: string;
274
+ /**
275
+ * A container to attach the video view in - can be provided later with .attach() on the vindral core instance
276
+ */
277
+ container?: HTMLElement;
278
+ /**
279
+ * An authentication token to provide to the server when connecting - only needed for channels with authentication enabled
280
+ * Note: If not supplied when needed, an "Authentication Failed" error will be raised.
281
+ */
282
+ authenticationToken?: string;
283
+ /**
284
+ * Language to use initially - can be changed during during runtime on the vindral instance
285
+ * Note: Only needed when multiple languages are provided - if no language is specified, one will be automatically selected.
286
+ */
287
+ language?: string;
288
+ /**
289
+ * TextTrack to use initially - can be changed during during runtime on the vindral instance
290
+ */
291
+ textTrack?: string;
292
+ /**
293
+ * Sets the log level - defaults to info
294
+ */
295
+ logLevel?: Level;
296
+ /**
297
+ * Sets the minimum and initial buffer time
298
+ */
299
+ minBufferTime?: number;
300
+ /**
301
+ * Sets the maximum buffer time allowed. The vindral instance will automatically slowly increase
302
+ * the buffer time if the use experiences to much buffering with the initial buffer time.
303
+ */
304
+ maxBufferTime?: number;
305
+ /**
306
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
307
+ *
308
+ * Is enabled by default.
309
+ *
310
+ * Note: This is automatically set to false when abrEnabled is set to false.
311
+ */
312
+ sizeBasedResolutionCapEnabled?: boolean;
313
+ /**
314
+ * Enables or disables picture in picture support.
315
+ */
316
+ pictureInPictureEnabled?: boolean;
317
+ /**
318
+ * Enable bursting for initial connection and channel switches. This makes time to first frame faster at the
319
+ * cost of stability (more demanding due to the sudden burst of live content)
320
+ *
321
+ * Is disabled by default.
322
+ *
323
+ */
324
+ burstEnabled?: boolean;
325
+ /**
326
+ * Enable usage of the MediaSource API on supported browsers.
327
+ *
328
+ * Is enabled by default.
329
+ *
330
+ * Note: We recommend to keep this at the default value unless you have very specific needs.
331
+ */
332
+ mseEnabled?: boolean;
333
+ /**
334
+ * Enable Opus with the MediaSource API on supported browsers.
335
+ *
336
+ * Is enabled by default.
337
+ *
338
+ */
339
+ mseOpusEnabled?: boolean;
340
+ /**
341
+ * Enable or disable support for playing audio in the background for iOS devices.
342
+ *
343
+ * Is false (disabled) by default.
344
+ *
345
+ * Note: This may be enabled by default in a future (major) release
346
+ */
347
+ iosBackgroundPlayEnabled?: boolean;
348
+ /**
349
+ * Enable or disable Adaptive Bit Rate. This allows for automatically adapting the incoming bit rate based on
350
+ * the viewers bandwidth and thus avoiding buffering events. This also disables the
351
+ * sizeBasedResolutionCapEnabled option.
352
+ *
353
+ * Is enabled by default.
354
+ *
355
+ * Note: It is strongly recommended to keep this enabled as user experience can greatly suffer without ABR.
356
+ */
357
+ abrEnabled?: boolean;
358
+ /**
359
+ * Enable or disable telemetry. This allows for telemetry and errors being collected.
360
+ *
361
+ * Is enabled by default.
362
+ *
363
+ * We appreciate you turning it off during development/staging to not bloat real telemetry data.
364
+ *
365
+ * Note: It is strongly recommended to keep this enabled in production as it is required for insights and KPIs.
366
+ */
367
+ telemetryEnabled?: boolean;
368
+ /**
369
+ * Set a cap on the maximum video size.
370
+ * This can be used to provide user options to limit the video bandwidth usage.
371
+ *
372
+ * Note: This takes presedence over any size based resolution caps.
373
+ */
374
+ maxSize?: Size;
375
+ /**
376
+ * Maximum audio bit rate allowed.
377
+ * This can be used to provide user options to limit the audio bandwidth usage.
378
+ */
379
+ maxAudioBitRate?: number;
380
+ /**
381
+ * Maximum video bit rate allowed.
382
+ * This can be used to provide user options to limit the video bandwidth usage.
383
+ */
384
+ maxVideoBitRate?: number;
385
+ /**
386
+ * Controls video element background behaviour while loading.
387
+ * - If `false`, a black background will be shown.
388
+ * - If undefined or `true`, a live thumbnail will be shown.
389
+ * - If set to a string containing a URL (https://urltoimage), use that.
390
+ * Default `true` - meaning a live thumbnail is shown
391
+ */
392
+ poster?: boolean | string;
393
+ /**
394
+ * Whether to start the player muted or to try to start playing audio automatically.
395
+ */
396
+ muted?: boolean;
397
+ /**
398
+ * Provide a custom reconnect handler to control when the instance should stop trying to
399
+ * reconnect. The reconnect handler should either return true to allow the reconnect or
400
+ * false to stop reconnecting. It can also return a promise with true or false if it needs
401
+ * to make any async calls before determining wether to reconnect.
402
+ *
403
+ * The default reconnect handler allows 30 reconnects before stopping.
404
+ *
405
+ * Note: the ReconnectState gets reset every time the client instance makes a successful connection.
406
+ * This means the default reconnect handler will only stop reconnecting after 30 _consecutive_ failed connections.
407
+ *
408
+ * ```typescript
409
+ * // An example reconnect handler that will reconnect forever
410
+ * const reconnectHandler = (state: ReconnectState) => true
411
+ *
412
+ * // An example reconnect handler that will fetch an url and determine whether to reconnect
413
+ * const reconnectHandler = async (state: ReconnectState) => {
414
+ * const result = await fetch("https://should-i-reconnect-now.com")
415
+ * return result.ok
416
+ * },
417
+ * ```
418
+ */
419
+ reconnectHandler?: (state: ReconnectState) => Promise<boolean> | boolean;
420
+ tags?: string[];
421
+ ownerSessionId?: string;
422
+ edgeUrl?: string;
423
+ logShippingEnabled?: boolean;
424
+ statsShippingEnabled?: boolean;
425
+ /**
426
+ * Enable wake lock for iOS devices.
427
+ * The wake lock requires that the audio has been activated at least once for the instance, othwerwise it will not work.
428
+ * Other devices already provide wake lock by default.
429
+ *
430
+ * This option is redundant and has no effect if iosMediaElementEnabled is enabled since that automatically enables wake lock.
431
+ *
432
+ * Disabled by default.
433
+ */
434
+ iosWakeLockEnabled?: boolean;
435
+ /**
436
+ * Disabling this will revert to legacy behaviour where Vindral will try to always keep the video element playing.
437
+ */
438
+ pauseSupportEnabled?: boolean;
439
+ /**
440
+ * Enables iOS devices to use a media element for playback. This enables fullscreen and picture in picture support on iOS.
441
+ */
442
+ iosMediaElementEnabled?: boolean;
443
+ /**
444
+ * Advanced options to override default behaviour.
445
+ */
446
+ advanced?: AdvancedOptions;
447
+ media?: Media;
448
+ videoCodecs?: VideoCodec[];
449
+ /**
450
+ * DRM options to provide to the Vindral instance
451
+ */
452
+ drm?: DrmOptions;
453
+ }
454
+ interface RenditionLevel {
455
+ audio?: AudioRendition;
456
+ video?: VideoRendition;
457
+ }
458
+ declare const defaultOptions: {
459
+ sizeBasedResolutionCapEnabled: boolean;
460
+ pictureInPictureEnabled: boolean;
461
+ abrEnabled: boolean;
462
+ burstEnabled: boolean;
463
+ mseEnabled: boolean;
464
+ mseOpusEnabled: boolean;
465
+ muted: boolean;
466
+ minBufferTime: number;
467
+ maxBufferTime: number;
468
+ logLevel: Level;
469
+ maxSize: Size;
470
+ maxVideoBitRate: number;
471
+ maxAudioBitRate: number;
472
+ tags: string[];
473
+ media: Media;
474
+ poster: string | boolean;
475
+ reconnectHandler: (state: ReconnectState) => Promise<boolean> | boolean;
476
+ iosWakeLockEnabled: boolean;
477
+ telemetryEnabled: boolean;
478
+ iosMediaElementEnabled: boolean;
479
+ pauseSupportEnabled: boolean;
480
+ advanced: {
481
+ wasmDecodingConstraint: Partial<VideoConstraint>;
482
+ };
483
+ videoCodecs: VideoCodec[];
484
+ };
485
+ interface VindralErrorProps {
486
+ isFatal: boolean;
487
+ type?: ErrorType;
488
+ code: string;
489
+ source?: Error | MediaError;
490
+ }
491
+ type ErrorType = "internal" | "external";
492
+ declare class VindralError extends Error {
493
+ private props;
494
+ private extra;
495
+ constructor(message: string, props: VindralErrorProps, extra?: {});
496
+ /**
497
+ * The error code is a stable string that represents the error type - this should be treated as an
498
+ * opaque string that can be used as a key for looking up localized strings for displaying error messages.
499
+ * @returns the error code
500
+ */
501
+ code: () => string;
502
+ /**
503
+ * Indicates whether the error is fatal - if it is that means the Vindral instance will be unloaded because of this error.
504
+ */
505
+ isFatal: () => boolean;
506
+ /**
507
+ * The underlying error that caused the Vindral error
508
+ * @returns the underlying error
509
+ */
510
+ source: () => Error | MediaError | undefined;
511
+ type: () => ErrorType;
512
+ /**
513
+ * @returns a stringifiable represenation of the error
514
+ */
515
+ toStringifiable: () => Record<string, unknown>;
516
+ }
517
+ type PlaybackState = "buffering" | "playing" | "paused";
518
+ type BufferStateEvent = "filled" | "drained";
519
+ interface PlaybackModuleStatistics {
520
+ /**
521
+ * Current target buffer time if using dynamic buffer. Otherwise, this is the statically set buffer time from instantiation.
522
+ */
523
+ bufferTime: number;
524
+ needsInputForAudioCount: number;
525
+ needsInputForVideoCount: number;
526
+ }
527
+ interface NeedsUserInputContext {
528
+ /**
529
+ * True if user input is needed for audio
530
+ */
531
+ forAudio: boolean;
532
+ /**
533
+ * True if user input is needed for video
534
+ */
535
+ forVideo: boolean;
536
+ }
537
+ interface ApiClientOptions {
538
+ /**
539
+ * String representing the URL to the public CDN API.
540
+ */
541
+ publicEndpoint: string;
542
+ /**
543
+ * Function that should return a string containing a signed authentication token.
544
+ */
545
+ tokenFactory?: AuthorizationTokenFactory;
546
+ }
547
+ interface AuthorizationContext {
548
+ /**
549
+ * The channelGroupId that might need authorization.
550
+ */
551
+ channelGroupId?: string;
552
+ /**
553
+ * The channelId that might need authorization.
554
+ */
555
+ channelId?: string;
556
+ }
557
+ interface ConnectOptions {
558
+ channelGroupId?: string;
559
+ channelId: string;
560
+ }
561
+ type AuthorizationTokenFactory = (context: AuthorizationContext) => string | undefined;
562
+ declare class ApiClient {
563
+ private baseUrl;
564
+ private tokenFactory?;
565
+ constructor(options: ApiClientOptions);
566
+ /**
567
+ * Returns everything needed to setup the connection of Vindral instance.
568
+ */
569
+ connect(options: ConnectOptions): Promise<ConnectInfo>;
570
+ /**
571
+ * Fetches information regarding a single channel.
572
+ *
573
+ * @param channelId the channel to fetch
574
+ * @returns a [[Channel]] containing information about the requested channel.
575
+ */
576
+ getChannel(channelId: string): Promise<Channel>;
577
+ /**
578
+ * Fetches channels within a channel group
579
+ *
580
+ * Note: The returned list includes inactive channels - check isLive to filter out only active channels
581
+ *
582
+ * @param channelGroup the channel group to fetch channels from
583
+ * @returns an array of [[Channel]] that belong to the channel group
584
+ */
585
+ getChannels(channelGroupId: string): Promise<Channel[]>;
586
+ private getHeaders;
587
+ private getAuthToken;
588
+ private toChannels;
589
+ private toChannel;
590
+ }
591
+ interface AdaptivityStatistics {
592
+ /**
593
+ * True if adaptive bitrate (ABR) is enabled.
594
+ */
595
+ isAbrEnabled: boolean;
596
+ }
597
+ interface BufferTimeStatistics {
598
+ /**
599
+ * Number of time buffer time has been adjusted. This will only happen when using dynamic buffer time
600
+ * (different min/max values of bufferTime).
601
+ */
602
+ bufferTimeAdjustmentCount: number;
603
+ }
604
+ interface RenditionsModuleStatistics {
605
+ /**
606
+ * Id of current video rendition subscribed to.
607
+ */
608
+ videoRenditionId?: number;
609
+ /**
610
+ * Id of current audio rendition subscribed to.
611
+ */
612
+ audioRenditionId?: number;
613
+ /**
614
+ * Current video codec being used.
615
+ */
616
+ videoCodec?: string;
617
+ /**
618
+ * Current audio codec being used.
619
+ */
620
+ audioCodec?: string;
621
+ /**
622
+ * Width of current video rendition (if any).
623
+ */
624
+ videoWidth?: number;
625
+ /**
626
+ * Height of current video rendition (if any).
627
+ */
628
+ videoHeight?: number;
629
+ /**
630
+ * Currently expected video bit rate according to metadata in bits/s.
631
+ */
632
+ expectedVideoBitRate?: number;
633
+ /**
634
+ * Currently expected audio bit rate according to metadata in bits/s.
635
+ */
636
+ expectedAudioBitRate?: number;
637
+ /**
638
+ * Current language. For non-multi language streams, this will often be unset.
639
+ */
640
+ language?: string;
641
+ /**
642
+ * Frame rate. Example: `"frameRate": [24000, 1001]`.
643
+ */
644
+ frameRate?: [
645
+ number,
646
+ number
647
+ ];
648
+ /**
649
+ * Total count of rendition level changes (quality downgrades/upgrades).
650
+ */
651
+ renditionLevelChangeCount: number;
652
+ }
653
+ interface VideoConstraintCap {
654
+ width: number;
655
+ height: number;
656
+ bitRate: number;
657
+ }
658
+ interface AudioConstraintCap {
659
+ bitRate: number;
660
+ }
661
+ interface ConstraintCap {
662
+ video: VideoConstraintCap;
663
+ audio: AudioConstraintCap;
664
+ }
665
+ interface ConstraintCapStatistics {
666
+ constraintCap?: ConstraintCap;
667
+ windowInnerWidth: number;
668
+ windowInnerHeight: number;
669
+ elementWidth: number;
670
+ elementHeight: number;
671
+ pixelRatio: number;
672
+ }
673
+ interface DecoderStatistics {
674
+ videoDecodeRate: number;
675
+ videoDecodeTime: MinMaxAverage;
676
+ audioDecodeTime: MinMaxAverage;
677
+ videoTransportTime: MinMaxAverage;
678
+ }
679
+ interface DocumentStateModulesStatistics {
680
+ isVisible: boolean;
681
+ isOnline: boolean;
682
+ isVisibleCount: number;
683
+ isHiddenCount: number;
684
+ isOnlineCount: number;
685
+ isOfflineCount: number;
686
+ navigatorRtt?: number;
687
+ navigatorEffectiveType?: EffectiveConnectionType;
688
+ navigatorConnectionType?: ConnectionType;
689
+ navigatorSaveData?: boolean;
690
+ navigatorDownlink?: number;
691
+ }
692
+ interface IncomingDataModuleStatistics {
693
+ /**
694
+ * Current video bitrate in bits/second.
695
+ */
696
+ videoBitRate?: number;
697
+ /**
698
+ * Current audio bitrate in bits/second.
699
+ */
700
+ audioBitRate?: number;
701
+ /**
702
+ * Counter of number of bytes received.
703
+ */
704
+ bytesReceived: number;
705
+ }
706
+ interface MseModuleStatistics {
707
+ quotaErrorCount: number;
708
+ mediaSourceOpenTime: number;
709
+ totalVideoFrames?: number;
710
+ droppedVideoFrames?: number;
711
+ successfulVideoAppendCalls?: number;
712
+ successfulAudioAppendsCalls?: number;
713
+ }
714
+ interface QualityOfServiceModuleStatistics {
715
+ /**
716
+ * Time in milliseconds spent in buffering state. Note that this value will increase while in background if
717
+ * buffering when leaving foreground.
718
+ */
719
+ timeSpentBuffering: number;
720
+ /**
721
+ * Total number of buffering events since instantiation.
722
+ */
723
+ bufferingEventsCount: number;
724
+ /**
725
+ * Number of fatal quality of service events.
726
+ */
727
+ fatalQosCount: number;
728
+ /**
729
+ * Ratio of time being spent on different bitrates.
730
+ * Example: `"timeSpentRatio": { "1160000": 0.2, "2260000": 0.8 }` shows 20% spent on 1.16 Mbps, 80% spent on 2.26 Mbps.
731
+ */
732
+ timeSpentRatio: {
733
+ [bitRate: string]: number;
734
+ };
735
+ }
736
+ interface SyncModuleStatistics {
737
+ drift: number | undefined;
738
+ driftAdjustmentCount: number;
739
+ timeshiftDriftAdjustmentCount: number;
740
+ seekTime: number;
741
+ }
742
+ interface VideoPlayerStatistics {
743
+ renderedFrameCount: number;
744
+ rendererDroppedFrameCount: number;
745
+ contextLostCount: number;
746
+ contextRestoredCount: number;
747
+ }
748
+ declare class UserAgentInformation {
749
+ private highEntropyValues?;
750
+ constructor();
751
+ getUserAgentInformation(): {
752
+ locationOrigin: string;
753
+ locationPath: string;
754
+ ancestorOrigins: string[] | undefined;
755
+ hardwareConcurrency: number;
756
+ deviceMemory: number | undefined;
757
+ userAgentLegacy: string;
758
+ ua: {
759
+ browser: {
760
+ brands: string[];
761
+ fullVersionBrands: string[];
762
+ majorVersions: string[];
763
+ };
764
+ device: string;
765
+ os: {
766
+ family: string;
767
+ version: string;
768
+ major_version: number;
769
+ };
770
+ };
771
+ } | {
772
+ locationOrigin: string;
773
+ locationPath: string;
774
+ ancestorOrigins: string[] | undefined;
775
+ hardwareConcurrency: number;
776
+ deviceMemory: number | undefined;
777
+ userAgent: string;
778
+ };
779
+ }
780
+ type ModuleStatistics = AdaptivityStatistics & BufferTimeStatistics & ConnectionStatistics & ConstraintCapStatistics & DecoderStatistics & DocumentStateModulesStatistics & IncomingDataModuleStatistics & MseModuleStatistics & PlaybackModuleStatistics & QualityOfServiceModuleStatistics & RenditionsModuleStatistics & SyncModuleStatistics & TelemetryModuleStatistics & VideoPlayerStatistics;
781
+ type Statistics = ModuleStatistics & ReturnType<UserAgentInformation["getUserAgentInformation"]> & {
782
+ /**
783
+ * Version of the @vindral/web-sdk being used.
784
+ */
785
+ version: string;
786
+ /**
787
+ * IP of the client.
788
+ */
789
+ ip?: string;
790
+ /**
791
+ * URL being used for connecting to the stream.
792
+ */
793
+ url: string;
794
+ /**
795
+ * A session is bound to a connection. If the client reconnects for any reason (e.g. coming back from inactivity
796
+ * or a problem with network on client side), a new sessionId will be used.
797
+ *
798
+ */
799
+ sessionId?: string;
800
+ /**
801
+ * Unlike `sessionId`, `clientId` will remain the same even after reconnections and represents this unique Vindral instance.
802
+ */
803
+ clientId: string;
804
+ /**
805
+ * How long in milliseconds since the instance was created.
806
+ */
807
+ uptime: number;
808
+ /**
809
+ * Current channel ID being subscribed to.
810
+ */
811
+ channelId: string;
812
+ /**
813
+ * Channel group being subscribed to.
814
+ */
815
+ channelGroupId?: string;
816
+ /**
817
+ * Time in milliseconds from instantiation to playback of video and audio being started.
818
+ * Note that an actual frame render often happens much quicker, but that is not counted as TTFF.
819
+ */
820
+ timeToFirstFrame?: number;
821
+ iosMediaElementEnabled?: boolean;
822
+ };
823
+ declare class Vindral extends Emitter<PublicVindralEvents> {
824
+ #private;
825
+ private static MAX_POOL_SIZE;
826
+ private static INITIAL_MAX_BIT_RATE;
827
+ private static DISCONNECT_TIMEOUT;
828
+ private static REMOVE_CUE_THRESHOLD;
829
+ /**
830
+ * Picture in picture
831
+ */
832
+ readonly pictureInPicture: {
833
+ /**
834
+ * Enters picture in picture
835
+ * @returns a promise that resolves if successful
836
+ */
837
+ enter: () => Promise<void>;
838
+ /**
839
+ * Exits picture in picture
840
+ * @returns a promise that resolves if successful
841
+ */
842
+ exit: () => Promise<void>;
843
+ /**
844
+ * returns whether picture in picture is currently active
845
+ */
846
+ isActive: () => boolean;
847
+ /**
848
+ * returns whether picture in picture is supported
849
+ */
850
+ isSupported: () => boolean;
851
+ };
852
+ private browser;
853
+ private options;
854
+ private element;
855
+ private playbackSource;
856
+ private emitter;
857
+ private logger;
858
+ private modules;
859
+ private clientIp?;
860
+ private sessionId?;
861
+ private clientId;
862
+ private _channels;
863
+ private createdAt;
864
+ private hasCalledConnect;
865
+ private latestEmittedLanguages;
866
+ private wakeLock;
867
+ private pool;
868
+ private userAgentInformation;
869
+ private encryptedMediaExtensions;
870
+ private sampleProcessingSesssions;
871
+ private sizes;
872
+ private isSuspended;
873
+ private disconnectTimeout;
874
+ constructor(options: Options);
875
+ /**
876
+ * Attaches the video view to a DOM element. The Vindral video view will be sized to fill this element while
877
+ * maintaining the correct aspect ratio.
878
+ * @param container the container element to append the video view to. Often a div element.
879
+ * @returns
880
+ */
881
+ attach: (container: HTMLElement) => void;
882
+ /**
883
+ * Set the current volume.
884
+ * Setting this to 0 is not equivalent to muting the audio.
885
+ * Setting this to >0 is not equivalent to unmuting the audio.
886
+ *
887
+ * Note that setting volume is not allowed on iPadOS and iOS devices.
888
+ * This is an OS/browser limitation on the video element.
889
+ *
890
+ * [Read more about it on Apple docs](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html)
891
+ * for iOS-Specific Considerations. The following section is the important part:
892
+ * On iOS devices, the audio level is always under the user's physical control. The volume property is not settable in JavaScript. Reading the volume property always returns 1.
893
+ */
894
+ set volume(volume: number);
895
+ /**
896
+ * The current volume. Note that if the playback is muted volume can still be set.
897
+ */
898
+ get volume(): number;
899
+ /**
900
+ * Set playback to muted/unmuted
901
+ */
902
+ set muted(muted: boolean);
903
+ /**
904
+ * Whether the playback is muted or not
905
+ */
906
+ get muted(): boolean;
907
+ /**
908
+ * Media (audio | video | audio+video)
909
+ */
910
+ get media(): Media;
911
+ /**
912
+ * The current average video bit rate in bits/s
913
+ */
914
+ get videoBitRate(): number;
915
+ /**
916
+ * The current average audio bit rate in bits/s
917
+ */
918
+ get audioBitRate(): number;
919
+ /**
920
+ * The current connection state
921
+ */
922
+ get connectionState(): Readonly<State>;
923
+ /**
924
+ * The current playback state
925
+ */
926
+ get playbackState(): Readonly<PlaybackState>;
927
+ /**
928
+ * The current buffer fullness as a floating point value between 0-1, where 1 is full and 0 i empty.
929
+ */
930
+ get bufferFullness(): number;
931
+ /**
932
+ * Whether user bandwidth savings by capping the video resolution to the size of the video element is enabled
933
+ */
934
+ get sizeBasedResolutionCapEnabled(): boolean;
935
+ /**
936
+ * Enables or disables user bandwidth savings by capping the video resolution to the size of the video element.
937
+ */
938
+ set sizeBasedResolutionCapEnabled(enabled: boolean);
939
+ /**
940
+ * Whether ABR is currently enabled
941
+ */
942
+ get abrEnabled(): boolean;
943
+ /**
944
+ * Enable or disable ABR
945
+ *
946
+ * The client will immediatly stop changing renditon level based on QoS metrics
947
+ *
948
+ * Note: It is strongly recommended to keep this enabled as it can severly increase
949
+ * the number of buffering events for viewers.
950
+ */
951
+ set abrEnabled(enabled: boolean);
952
+ /**
953
+ * Estimated live edge time for the current channel
954
+ */
955
+ get serverEdgeTime(): number | undefined;
956
+ /**
957
+ * @returns Estimated wallclock time on the edge server in milliseconds
958
+ */
959
+ get serverWallclockTime(): number | undefined;
960
+ /**
961
+ * Local current time normalized between all channels in the channel group
962
+ */
963
+ get currentTime(): number;
964
+ /**
965
+ * Current time for the channel. This is the actual stream time, passed on from your ingress.
966
+ * Integer overflow could make this value differ from your encoder timestamps if it has been rolling for more
967
+ * than 42 days with RTMP as target.
968
+ *
969
+ * Note: This is not normalized between channels, thus it can make jumps when switching channels
970
+ */
971
+ get channelCurrentTime(): number;
972
+ /**
973
+ * The current target buffer time in milliseconds
974
+ */
975
+ get targetBufferTime(): number;
976
+ /**
977
+ * Set the current target buffer time in milliseconds
978
+ */
979
+ set targetBufferTime(bufferTimeMs: number);
980
+ /**
981
+ * The estimated playback latency based on target buffer time, the connection rtt and local playback drift
982
+ */
983
+ get playbackLatency(): number | undefined;
984
+ /**
985
+ * The estimated utc timestamp (in ms) for the playhead.
986
+ */
987
+ get playbackWallclockTime(): number | undefined;
988
+ /**
989
+ * Channels that can be switched between
990
+ */
991
+ get channels(): ReadonlyArray<Channel>;
992
+ /**
993
+ * Languages available
994
+ */
995
+ get languages(): ReadonlyArray<string>;
996
+ /**
997
+ * The current language
998
+ */
999
+ get language(): string | undefined;
1000
+ /**
1001
+ * Set the current language
1002
+ */
1003
+ set language(language: string | undefined);
1004
+ /**
1005
+ * Set the active text track
1006
+ */
1007
+ set textTrack(label: string | undefined);
1008
+ /**
1009
+ * Get the available text tracks
1010
+ */
1011
+ get textTracks(): string[];
1012
+ /**
1013
+ * Get the active text track
1014
+ */
1015
+ get textTrack(): string | undefined;
1016
+ /**
1017
+ * The current channelId
1018
+ */
1019
+ get channelId(): string;
1020
+ /**
1021
+ * Set the current channelId
1022
+ *
1023
+ * Possible channels to set are available from [[channels]]
1024
+ *
1025
+ * Note that the following scenarios are not possible right now:
1026
+ * - switching channel from a channel with audio to a channel without audio (unless audio only mode is active)
1027
+ * - switching channel from a channel with video to a channel without video (unless video only mode is active)
1028
+ */
1029
+ set channelId(channelId: string);
1030
+ /**
1031
+ * Max size that will be subcribed to
1032
+ */
1033
+ get maxSize(): Size;
1034
+ /**
1035
+ * Set max size that will be subscribed to
1036
+ *
1037
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this size
1038
+ */
1039
+ set maxSize(size: Size);
1040
+ /**
1041
+ * The max video bit rate that will be subscribed to
1042
+ *
1043
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
1044
+ */
1045
+ get maxVideoBitRate(): number;
1046
+ /**
1047
+ * Set max video bit rate that will be subscribed to
1048
+ *
1049
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bitrate
1050
+ */
1051
+ set maxVideoBitRate(bitRate: number);
1052
+ /**
1053
+ * The max audio bit rate that will be subscribed to
1054
+ *
1055
+ * Note: Returns Number.MAX_SAFE_INTEGER if no limits have been set
1056
+ */
1057
+ get maxAudioBitRate(): number;
1058
+ /**
1059
+ * Set max audio bit rate that will be subscribed to
1060
+ *
1061
+ * Note: If ABR is disabled, setting this will make the client instantly subscribe to this bit rate
1062
+ */
1063
+ set maxAudioBitRate(bitRate: number);
1064
+ /**
1065
+ * The rendition levels available.
1066
+ */
1067
+ get renditionLevels(): ReadonlyArray<RenditionLevel>;
1068
+ /**
1069
+ * The current rendition level
1070
+ */
1071
+ get currentRenditionLevel(): Readonly<RenditionLevel> | undefined;
1072
+ /**
1073
+ * The target rendition level that the client is currently switching to
1074
+ */
1075
+ get targetRenditionLevel(): Readonly<RenditionLevel> | undefined;
1076
+ /**
1077
+ * True if the client is currently switching from one rendition level to another
1078
+ */
1079
+ get isSwitchingRenditionLevel(): boolean;
1080
+ /**
1081
+ * The time ranges buffered for video.
1082
+ * The ranges are specified in milliseconds.
1083
+ */
1084
+ get videoBufferedRanges(): ReadonlyArray<TimeRange>;
1085
+ /**
1086
+ * The time ranges buffered for audio.
1087
+ * The ranges are specified in milliseconds.
1088
+ */
1089
+ get audioBufferedRanges(): ReadonlyArray<TimeRange>;
1090
+ /**
1091
+ * The API client for calls to the public available endpoints of the Vindral Live CDN.
1092
+ */
1093
+ getApiClient(): ApiClient;
1094
+ get lastBufferEvent(): Readonly<BufferStateEvent>;
1095
+ get activeRatios(): Map<string, number>;
1096
+ get bufferingRatios(): Map<string, number>;
1097
+ get timeSpentBuffering(): number;
1098
+ get timeActive(): number;
1099
+ get mediaElement(): HTMLMediaElement | HTMLCanvasElement;
1100
+ get audioNode(): AudioNode | undefined;
1101
+ /**
1102
+ * Get active Vindral Options
1103
+ */
1104
+ getOptions: () => Options & typeof defaultOptions;
1105
+ /**
1106
+ * Get url for fetching thumbnail. Note that fetching thumbnails only works for an active channel.
1107
+ */
1108
+ getThumbnailUrl: () => string;
1109
+ /**
1110
+ * Update authentication token on an already established and authenticated connection
1111
+ */
1112
+ updateAuthenticationToken: (token: string) => void;
1113
+ /**
1114
+ * @deprecated since 3.0.0 Use play instead.
1115
+ * Connects to the configured channel and starts streaming
1116
+ */
1117
+ connect: () => void;
1118
+ private _connect;
1119
+ /**
1120
+ * Get options that can be used for CastSender
1121
+ */
1122
+ getCastOptions: () => Options;
1123
+ private onConnectInfo;
1124
+ private emitLanguagesIfChanged;
1125
+ private updateTextTracks;
1126
+ private cleanupTextTracks;
1127
+ private filterRenditions;
1128
+ /**
1129
+ * Patch the subscription with properties from the channel that isn't known until connection
1130
+ * @param channel Channel with the renditions to patch the subscription based on
1131
+ */
1132
+ private patchSubscription;
1133
+ private isSupportedVideoCodecProfile;
1134
+ private supportedAudioCodecs;
1135
+ private initializeDecodingModule;
1136
+ /**
1137
+ * Fully unloads the instance. This disconnects the clients and stops any background tasks.
1138
+ * This client instance can not be used after this has been called.
1139
+ */
1140
+ unload: () => Promise<void>;
1141
+ /**
1142
+ * @deprecated since 3.0.0 Use play instead.
1143
+ *
1144
+ * Activates audio or video on web browsers that require a user gesture to enable media playback.
1145
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
1146
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
1147
+ * the Vindral instance itself is created in a user input event.
1148
+ *
1149
+ * Requirements: This method needs to be called within an user-input event handler to function properly, such as
1150
+ * an onclick handler.
1151
+ *
1152
+ * Note: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
1153
+ * and handle that event gracefully.
1154
+ */
1155
+ userInput: () => void;
1156
+ /**
1157
+ * Pauses the stream. Call .play() to resume playback again.
1158
+ */
1159
+ pause: () => void;
1160
+ private registerDebugInstance;
1161
+ /**
1162
+ *
1163
+ * Start playing the stream.
1164
+ *
1165
+ * This method also activates audio or video on web browsers that require a user gesture to enable media playback.
1166
+ * The Vindral instance will emit a "needs user input" event to indicate when this is needed.
1167
+ * But it is also safe to pre-emptively call this if it is more convenient - such as in cases where
1168
+ * the Vindral instance itself is created in a user input event.
1169
+ *
1170
+ * Note: In most browsers this method needs to be called within an user-input event handler, such as
1171
+ * an onclick handler in order to activate audio. Most implementations call this directly after constructing the Vindral
1172
+ * instance once in order to start playing, and then listen to a user-event in order to allow audio to be activated.
1173
+ *
1174
+ * Note 2: Even if you pre-emptively call this it is still recommended to listen to "needs user input"
1175
+ * and handle that event gracefully.
1176
+ */
1177
+ play: () => void;
1178
+ /**
1179
+ * How long in milliseconds since the instance was created
1180
+ */
1181
+ get uptime(): number;
1182
+ /**
1183
+ * This method collects a statistics report from internal modules. While many of the report's properties are documented, the report may also contain undocumented
1184
+ * properties used internally or temporarily for monitoring and improving the performance of the service.
1185
+ *
1186
+ * Use undocumented properties at your own risk.
1187
+ */
1188
+ getStatistics: () => Statistics;
1189
+ private resetModules;
1190
+ private suspend;
1191
+ private unsuspend;
1192
+ private getRuntimeInfo;
1193
+ private onMediaElementState;
1194
+ private onBufferEvent;
1195
+ /**
1196
+ * Aligns size and bitrate to match a rendition level correctly
1197
+ */
1198
+ private alignSizeAndBitRate;
1199
+ private get currentSubscription();
1200
+ private get targetSubscription();
1201
+ private timeToFirstFrame;
1202
+ private willUseMediaSource;
1203
+ }
1204
+ interface TelemetryModuleStatistics {
1205
+ /**
1206
+ * The total amount of errors being spawned. Note that some media errors can trigger
1207
+ * thousands of errors for a single client in a few seconds before recovering. Therefore,
1208
+ * consider the number of viewers with errors, not just the total amount. Also, consider the median
1209
+ * instead of the mean for average calculation.
1210
+ */
1211
+ errorCount: number;
1212
+ }
1213
+ type State = "connected" | "disconnected" | "connecting";
1214
+ type ContextSwitchState = "completed" | "started";
1215
+ interface ConnectionStatistics {
1216
+ /**
1217
+ * RTT (round trip time) between client and server(s).
1218
+ */
1219
+ rtt: MinMaxAverage;
1220
+ /**
1221
+ * A very rough initial estimation of minimum available bandwidth.
1222
+ */
1223
+ estimatedBandwidth: number;
1224
+ edgeUrl?: string;
1225
+ /**
1226
+ * Total number of connections that have been established since instantiation.
1227
+ */
1228
+ connectCount: number;
1229
+ /**
1230
+ * Total number of connection attempts since instantiation.
1231
+ */
1232
+ connectionAttemptCount: number;
1233
+ connectionProtocol: "vindral_ws" | "moq" | undefined;
1234
+ }
1235
+ interface LanguageSwitchContext {
1236
+ /**
1237
+ * The new language that was switched to
1238
+ */
1239
+ language: string;
1240
+ }
1241
+ interface ChannelSwitchContext {
1242
+ /**
1243
+ * The new channel id that was switched to
1244
+ */
1245
+ channelId: string;
1246
+ }
1247
+ interface VolumeState {
1248
+ /**
1249
+ * Wether the audio is muted
1250
+ */
1251
+ isMuted: boolean;
1252
+ /**
1253
+ * The volume level
1254
+ */
1255
+ volume: number;
1256
+ }
1257
+ interface PublicVindralEvents {
1258
+ /**
1259
+ * When an error that requires action has occured
1260
+ *
1261
+ * Can be a fatal error that will unload the Vindral instance - this is indicated by `isFatal()` on the error object returning true.
1262
+ *
1263
+ * In case of a fatal error it is appropriate to indicate what the error was to the user, either by displaying the error.message or
1264
+ * by using the error.code() as a key to look up a localization string. To resume streaming it is required to create a new Vindral instance.
1265
+ */
1266
+ ["error"]: Readonly<VindralError>;
1267
+ /**
1268
+ * When the instance needs user input to activate audio or sometimes video playback.
1269
+ * Is called with an object
1270
+ * ```javascript
1271
+ * {
1272
+ * forAudio: boolean // true if user input is needed for audio playback
1273
+ * forVideo: boolean // true if user input is needed for video playback
1274
+ * }
1275
+ * ```
1276
+ */
1277
+ ["needs user input"]: NeedsUserInputContext;
1278
+ /**
1279
+ * When a timed metadata event has been triggered
1280
+ */
1281
+ ["metadata"]: Readonly<Metadata>;
1282
+ /**
1283
+ * When the playback state changes
1284
+ */
1285
+ ["playback state"]: Readonly<PlaybackState>;
1286
+ /**
1287
+ * When the connection state changes
1288
+ */
1289
+ ["connection state"]: Readonly<State>;
1290
+ /**
1291
+ * When the available rendition levels is changed
1292
+ */
1293
+ ["rendition levels"]: ReadonlyArray<RenditionLevel>;
1294
+ /**
1295
+ * When the rendition level is changed
1296
+ */
1297
+ ["rendition level"]: Readonly<RenditionLevel>;
1298
+ /**
1299
+ * When the available languages is changed
1300
+ */
1301
+ ["languages"]: ReadonlyArray<string>;
1302
+ /**
1303
+ * When the available text tracks are changed
1304
+ */
1305
+ ["text tracks"]: ReadonlyArray<string>;
1306
+ /**
1307
+ * When the available channels is changed
1308
+ */
1309
+ ["channels"]: ReadonlyArray<Channel>;
1310
+ /**
1311
+ * When a context switch state change has occured.
1312
+ * E.g. when a channel change has been requested, or quality is changed.
1313
+ */
1314
+ ["context switch"]: Readonly<ContextSwitchState>;
1315
+ /**
1316
+ * Emitted when a wallclock time message has been received from the server.
1317
+ *
1318
+ * Note: This is the edge server wallclock time and thus may differ slightly
1319
+ * between two viewers if they are connected to different edge servers.
1320
+ */
1321
+ ["server wallclock time"]: Readonly<number>;
1322
+ /**
1323
+ * Is emitted during connection whether the channel is live or not.
1324
+ *
1325
+ * If the channel is not live, the Vindral instance will try to reconnect until the `reconnectHandler`
1326
+ * determines that no more retries should be made.
1327
+ *
1328
+ * Note: If the web-sdk is instantiated at the same time as you are starting the stream it is possible
1329
+ * that this emits false until the started state has propagated through the system.
1330
+ */
1331
+ ["is live"]: boolean;
1332
+ /**
1333
+ * Emitted when a channel switch has been completed and the first frame of the new channel is rendered.
1334
+ * A string containing the channel id of the new channel is provided as an argument.
1335
+ */
1336
+ ["channel switch"]: Readonly<ChannelSwitchContext>;
1337
+ /**
1338
+ * Emitted when a language switch has been completed and the new language starts playing.
1339
+ */
1340
+ ["language switch"]: Readonly<LanguageSwitchContext>;
1341
+ /**
1342
+ * Emitted when the volume state changes.
1343
+ *
1344
+ * This is triggered triggered both when the user changes the volume through the Vindral instance, but also
1345
+ * from external sources such as OS media shortcuts or other native UI outside of the browser.
1346
+ */
1347
+ ["volume state"]: Readonly<VolumeState>;
1348
+ ["buffer state event"]: Readonly<BufferStateEvent>;
1349
+ ["initialized media"]: void;
1350
+ }
1351
+ declare abstract class VindralButton extends HTMLElement {
1352
+ #private;
1353
+ static observedAttributes: string[];
1354
+ constructor();
1355
+ get keysUsed(): string[];
1356
+ connectedCallback(): void;
1357
+ enable(): void;
1358
+ disable(): void;
1359
+ disconnectedCallback(): void;
1360
+ attributeChangedCallback(name: string, _old: string, value: string): void;
1361
+ protected abstract handleClick(e: Event): void;
1362
+ }
1363
+ declare class AirPlayButton extends VindralButton {
1364
+ #private;
1365
+ static observedAttributes: string[];
1366
+ constructor();
1367
+ connectedCallback(): void;
1368
+ disconnectedCallback(): void;
1369
+ attributeChangedCallback(name: string, old: string, value: string): void;
1370
+ set isAirPlaying(value: boolean);
1371
+ get isAirPlaying(): boolean;
1372
+ protected handleClick(_: Event): void;
1373
+ }
1374
+ declare class BufferingOverlay extends HTMLElement {
1375
+ #private;
1376
+ static observedAttributes: "buffering"[];
1377
+ constructor();
1378
+ connectedCallback(): void;
1379
+ disconnectedCallback(): void;
1380
+ }
1381
+ declare class CastButton extends VindralButton {
1382
+ #private;
1383
+ static observedAttributes: string[];
1384
+ constructor();
1385
+ connectedCallback(): void;
1386
+ disconnectedCallback(): void;
1387
+ attributeChangedCallback(name: string, old: string, value: string): void;
1388
+ set isCasting(value: boolean);
1389
+ get isCasting(): boolean;
1390
+ protected handleClick(_: Event): void;
1391
+ }
1392
+ type CastOverlayAttributes = (typeof CastOverlay.observedAttributes)[number];
1393
+ declare class CastOverlay extends HTMLElement {
1394
+ #private;
1395
+ static observedAttributes: ("is-casting" | "cast-receiver-name")[];
1396
+ constructor();
1397
+ connectedCallback(): void;
1398
+ disconnectedCallback(): void;
1399
+ attributeChangedCallback(name: CastOverlayAttributes, oldValue: string, newValue: string): void;
1400
+ }
1401
+ declare class ChannelGrid extends HTMLElement {
1402
+ #private;
1403
+ static observedAttributes: string[];
1404
+ constructor();
1405
+ connectedCallback(): void;
1406
+ disconnectedCallback(): void;
1407
+ attributeChangedCallback(name: string, _old: string, value: string): void;
1408
+ get keysUsed(): string[];
1409
+ handleEvent: (event: Event) => void;
1410
+ focus(): void;
1411
+ }
1412
+ declare class ChannelGridButton extends VindralButton {
1413
+ #private;
1414
+ static observedAttributes: string[];
1415
+ constructor();
1416
+ connectedCallback(): void;
1417
+ disconnectedCallback(): void;
1418
+ attributeChangedCallback(name: string, old: string, value: string): void;
1419
+ enable(): void;
1420
+ protected handleClick(_: MouseEvent): void;
1421
+ get isOpen(): boolean;
1422
+ }
1423
+ declare class ChannelGridItem extends HTMLElement {
1424
+ #private;
1425
+ lastThumbnailUpdate?: number;
1426
+ static observedAttributes: string[];
1427
+ constructor();
1428
+ attributeChangedCallback(name: string, old: string, value: string): void;
1429
+ updateThumbnail(): void;
1430
+ }
1431
+ declare class ControlBar extends HTMLElement {
1432
+ static observedAttributes: never[];
1433
+ constructor();
1434
+ connectedCallback(): void;
1435
+ disconnectedCallback(): void;
1436
+ }
1437
+ type ControllerAttributes = (typeof Controller.observedAttributes)[number];
1438
+ declare class Controller extends HTMLElement {
1439
+ #private;
1440
+ static observedAttributes: readonly [
1441
+ ...("language" | "channels" | "buffering" | "paused" | "volume" | "muted" | "user-interacting" | "is-casting" | "cast-available" | "cast-receiver-name" | "ui-locked" | "is-fullscreen" | "is-fullscreen-fallback" | "rendition-levels" | "rendition-level" | "max-video-bit-rate" | "channel-id" | "channel-group-id" | "pip-available" | "is-pip" | "airplay-available" | "is-airplaying" | "media" | "languages" | "text-tracks" | "text-track" | "needs-user-input" | "authentication-token" | "volume-level" | "cast" | "airplay" | "pip" | "fullscreen" | "vu-meter" | "poster-src")[],
1442
+ "url",
1443
+ "edge-url",
1444
+ "target-buffer-time",
1445
+ "cast-receiver-id",
1446
+ "cast-background",
1447
+ "log-level",
1448
+ "max-size",
1449
+ "min-buffer-time",
1450
+ "max-buffer-time",
1451
+ "max-audio-bit-rate",
1452
+ "burst-enabled",
1453
+ "mse-enabled",
1454
+ "mse-opus-enabled",
1455
+ "ios-background-play-enabled",
1456
+ "ios-wake-lock-enabled",
1457
+ "ios-media-element-enabled",
1458
+ "abr-enabled",
1459
+ "size-based-resolution-cap-enabled",
1460
+ "telemetry-enabled",
1461
+ "video-codecs",
1462
+ "poster",
1463
+ "advanced",
1464
+ "drm-headers",
1465
+ "drm-queryparams",
1466
+ "auto-instance-enabled"
1467
+ ];
1468
+ constructor();
1469
+ connectedCallback(): Promise<void>;
1470
+ disconnectedCallback(): void;
1471
+ handleEvent: (event: Event) => void;
1472
+ attributeChangedCallback(name: ControllerAttributes, oldValue: string, newValue?: string): void;
1473
+ connectListener(component: HTMLElement): void;
1474
+ disconnectListener(component: HTMLElement): void;
1475
+ get instance(): Vindral | undefined;
1476
+ connect(): void;
1477
+ }
1478
+ declare class FullscreenButton extends VindralButton {
1479
+ static observedAttributes: string[];
1480
+ constructor();
1481
+ connectedCallback(): void;
1482
+ disconnectedCallback(): void;
1483
+ attributeChangedCallback(name: string, old: string, value: string): void;
1484
+ set isFullscreen(value: boolean);
1485
+ get isFullscreen(): boolean;
1486
+ protected handleClick(_: Event): void;
1487
+ }
1488
+ declare class VindralMenuButton extends VindralButton {
1489
+ #private;
1490
+ constructor();
1491
+ connectedCallback(): void;
1492
+ set button(button: HTMLElement);
1493
+ set listbox(listbox: HTMLElement);
1494
+ set listboxSlot(listboxSlot: HTMLElement);
1495
+ enable(): void;
1496
+ hide(): void;
1497
+ protected handleClick(e: Event): void;
1498
+ }
1499
+ declare class LanguageMenu extends VindralMenuButton {
1500
+ #private;
1501
+ static observedAttributes: string[];
1502
+ constructor();
1503
+ connectedCallback(): void;
1504
+ attributeChangedCallback(name: string, old: string, value: string): void;
1505
+ }
1506
+ declare class LanguageMenuList extends HTMLElement {
1507
+ #private;
1508
+ static observedAttributes: ("language" | "languages" | "text-tracks" | "text-track")[];
1509
+ constructor();
1510
+ connectedCallback(): void;
1511
+ disconnectedCallback(): void;
1512
+ attributeChangedCallback(name: string, old: string, value: string): void;
1513
+ private set languages(value);
1514
+ private set textTracks(value);
1515
+ private set language(value);
1516
+ private set textTrack(value);
1517
+ get keysUsed(): string[];
1518
+ handleEvent: (event: Event) => void;
1519
+ focus(): void;
1520
+ }
1521
+ declare class MuteButton extends VindralButton {
1522
+ static observedAttributes: string[];
1523
+ constructor();
1524
+ connectedCallback(): void;
1525
+ disconnectedCallback(): void;
1526
+ attributeChangedCallback(name: string, old: string, value: string): void;
1527
+ set muted(value: boolean);
1528
+ get muted(): boolean;
1529
+ protected handleClick(_: Event): void;
1530
+ }
1531
+ declare class PictureInPictureButton extends VindralButton {
1532
+ static observedAttributes: string[];
1533
+ constructor();
1534
+ connectedCallback(): void;
1535
+ disconnectedCallback(): void;
1536
+ attributeChangedCallback(name: string, old: string, value: string): void;
1537
+ set isPictureInPictureActive(value: boolean);
1538
+ get isPictureInPictureActive(): boolean;
1539
+ protected handleClick(_: Event): void;
1540
+ }
1541
+ declare class PlayButton extends VindralButton {
1542
+ static observedAttributes: string[];
1543
+ constructor();
1544
+ connectedCallback(): void;
1545
+ disconnectedCallback(): void;
1546
+ attributeChangedCallback(name: string, old: string, value: string): void;
1547
+ set paused(value: boolean);
1548
+ get paused(): boolean;
1549
+ protected handleClick(_: Event): void;
1550
+ }
1551
+ declare class Player extends HTMLElement {
1552
+ #private;
1553
+ static observedAttributes: ("title" | "poster" | "language" | "channels" | "buffering" | "paused" | "volume" | "muted" | "user-interacting" | "is-casting" | "cast-available" | "cast-receiver-name" | "ui-locked" | "is-fullscreen" | "is-fullscreen-fallback" | "rendition-levels" | "rendition-level" | "max-video-bit-rate" | "channel-id" | "channel-group-id" | "pip-available" | "is-pip" | "airplay-available" | "is-airplaying" | "media" | "languages" | "text-tracks" | "text-track" | "needs-user-input" | "authentication-token" | "volume-level" | "cast" | "airplay" | "pip" | "fullscreen" | "vu-meter" | "poster-src" | "url" | "advanced" | "offline" | "edge-url" | "target-buffer-time" | "cast-receiver-id" | "cast-background" | "log-level" | "max-size" | "min-buffer-time" | "max-buffer-time" | "max-audio-bit-rate" | "burst-enabled" | "mse-enabled" | "mse-opus-enabled" | "ios-background-play-enabled" | "ios-wake-lock-enabled" | "ios-media-element-enabled" | "abr-enabled" | "size-based-resolution-cap-enabled" | "telemetry-enabled" | "video-codecs" | "drm-headers" | "drm-queryparams" | "auto-instance-enabled" | "refresh-poster-enabled")[];
1554
+ constructor();
1555
+ connectedCallback(): void;
1556
+ disconnectedCallback(): void;
1557
+ attributeChangedCallback(name: string, oldValue?: string, newValue?: string): void;
1558
+ get instance(): Vindral | undefined;
1559
+ }
1560
+ type PosterOverlayAttributes = (typeof PosterOverlay.observedAttributes)[number];
1561
+ declare class PosterOverlay extends HTMLElement {
1562
+ #private;
1563
+ static observedAttributes: string[];
1564
+ constructor();
1565
+ connectedCallback(): void;
1566
+ disconnectedCallback(): void;
1567
+ attributeChangedCallback(name: PosterOverlayAttributes, oldValue: string, newValue: string): void;
1568
+ get disabled(): boolean;
1569
+ set disabled(value: boolean);
1570
+ get posterSrc(): string | null;
1571
+ get paused(): boolean;
1572
+ }
1573
+ declare class RenditionLevelsMenu extends VindralMenuButton {
1574
+ #private;
1575
+ static observedAttributes: string[];
1576
+ constructor();
1577
+ connectedCallback(): void;
1578
+ attributeChangedCallback(name: string, old: string, value: string): void;
1579
+ }
1580
+ declare class RenditionLevelsMenuList extends HTMLElement {
1581
+ #private;
1582
+ static observedAttributes: ("rendition-levels" | "max-video-bit-rate")[];
1583
+ constructor();
1584
+ connectedCallback(): void;
1585
+ disconnectedCallback(): void;
1586
+ attributeChangedCallback(name: string, old: string, value: string): void;
1587
+ private set list(value);
1588
+ private set maxVideoBitrate(value);
1589
+ get keysUsed(): string[];
1590
+ handleEvent: (event: Event) => void;
1591
+ focus(): void;
1592
+ }
1593
+ declare class ScrollOverlay extends HTMLElement {
1594
+ #private;
1595
+ static observedAttributes: string[];
1596
+ constructor();
1597
+ connectedCallback(): void;
1598
+ disconnectedCallback(): void;
1599
+ attributeChangedCallback(name: string, old: string, value: string): void;
1600
+ handleEvent: (event: Event) => void;
1601
+ set open(value: boolean);
1602
+ get open(): boolean;
1603
+ set visible(value: boolean);
1604
+ get visible(): boolean;
1605
+ }
1606
+ declare class VindralRange extends HTMLElement {
1607
+ #private;
1608
+ static observedAttributes: string[];
1609
+ range: HTMLInputElement;
1610
+ constructor();
1611
+ connectedCallback(): void;
1612
+ disconnectedCallback(): void;
1613
+ attributeChangedCallback(name: string, oldValue: string, newValue: string): void;
1614
+ enable(): void;
1615
+ disable(): void;
1616
+ handleEvent(event: Event): void;
1617
+ updateBar(): void;
1618
+ get keysUsed(): string[];
1619
+ }
1620
+ declare class VolumeRange extends VindralRange {
1621
+ #private;
1622
+ static observedAttributes: string[];
1623
+ constructor();
1624
+ connectedCallback(): void;
1625
+ disconnectedCallback(): void;
1626
+ attributeChangedCallback(name: string, old: string, value: string): void;
1627
+ get volume(): string;
1628
+ get muted(): boolean;
1629
+ }
1630
+ declare class BufferingIcon extends HTMLElement {
1631
+ #private;
1632
+ static observedAttributes: "buffering"[];
1633
+ constructor();
1634
+ connectedCallback(): void;
1635
+ disconnectedCallback(): void;
1636
+ }
1637
+ declare class VindralPlayOverlay extends HTMLElement {
1638
+ #private;
1639
+ static observedAttributes: string[];
1640
+ constructor();
1641
+ connectedCallback(): void;
1642
+ disconnectedCallback(): void;
1643
+ }
1644
+ declare class VindralUserInputPlayOverlay extends VindralPlayOverlay {
1645
+ #private;
1646
+ static observedAttributes: string[];
1647
+ constructor();
1648
+ connectedCallback(): void;
1649
+ disconnectedCallback(): void;
1650
+ }
1651
+ declare class VindralMessage extends HTMLElement {
1652
+ #private;
1653
+ static observedAttributes: string[];
1654
+ constructor();
1655
+ connectedCallback(): void;
1656
+ disconnectedCallback(): void;
1657
+ attributeChangedCallback(name: string, old: string, value: string): void;
1658
+ }
1659
+ /**
1660
+ * Register custom elements for the Vindral player
1661
+ */
1662
+ export declare function registerComponents(): void;
1663
+ export declare interface VindralHTMLElementTagNameMap {
1664
+ "vindral-controller": Controller;
1665
+ "vindral-control-bar": ControlBar;
1666
+ "vindral-play-button": PlayButton;
1667
+ "vindral-mute-button": MuteButton;
1668
+ "vindral-buffering-overlay": BufferingOverlay;
1669
+ "vindral-scroll-overlay": ScrollOverlay;
1670
+ "vindral-play-overlay": VindralPlayOverlay;
1671
+ "vindral-user-input-play-overlay": VindralUserInputPlayOverlay;
1672
+ "vindral-fullscreen-button": FullscreenButton;
1673
+ "vindral-rendition-levels-menu": RenditionLevelsMenu;
1674
+ "vindral-rendition-levels-menu-list": RenditionLevelsMenuList;
1675
+ "vindral-channel-grid-button": ChannelGridButton;
1676
+ "vindral-channel-grid": ChannelGrid;
1677
+ "vindral-channel-grid-item": ChannelGridItem;
1678
+ "vindral-pip-button": PictureInPictureButton;
1679
+ "vindral-airplay-button": AirPlayButton;
1680
+ "vindral-cast-button": CastButton;
1681
+ "vindral-cast-overlay": CastOverlay;
1682
+ "vindral-buffering-icon": BufferingIcon;
1683
+ "vindral-language-menu": LanguageMenu;
1684
+ "vindral-language-menu-list": LanguageMenuList;
1685
+ "vindral-message": VindralMessage;
1686
+ "vindral-volume-range": VolumeRange;
1687
+ "vindral-poster-overlay": PosterOverlay;
1688
+ "vindral-player": Player;
1689
+ }
1690
+
1691
+ export {};