@stream-io/video-react-sdk 1.26.1 → 1.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,23 +4,83 @@ import {
4
4
  useCallback,
5
5
  useContext,
6
6
  useEffect,
7
+ useMemo,
7
8
  useRef,
8
9
  useState,
9
10
  } from 'react';
10
11
  import { flushSync } from 'react-dom';
11
- import clsx from 'clsx';
12
- import { useCall } from '@stream-io/video-react-bindings';
12
+ import { useCall, useCallStateHooks } from '@stream-io/video-react-bindings';
13
13
  import { Call, disposeOfMediaStream } from '@stream-io/video-client';
14
14
  import {
15
15
  BackgroundBlurLevel,
16
16
  BackgroundFilter,
17
17
  createRenderer,
18
18
  isPlatformSupported,
19
+ isMediaPipePlatformSupported,
19
20
  loadTFLite,
21
+ loadMediaPipe,
20
22
  PlatformSupportFlags,
23
+ VirtualBackground,
21
24
  Renderer,
22
25
  TFLite,
26
+ PerformanceStats,
23
27
  } from '@stream-io/video-filters-web';
28
+ import clsx from 'clsx';
29
+
30
+ /**
31
+ * Constants for FPS warning calculation.
32
+ * Smooths out quick spikes using an EMA, ignores brief outliers,
33
+ * and uses two thresholds to avoid flickering near the limit.
34
+ */
35
+ const ALPHA = 0.2;
36
+ const FPS_WARNING_THRESHOLD_LOWER = 23;
37
+ const FPS_WARNING_THRESHOLD_UPPER = 25;
38
+ const DEFAULT_FPS = 30;
39
+ const DEVIATION_LIMIT = 0.5;
40
+ const OUTLIER_PERSISTENCE = 5;
41
+
42
+ /**
43
+ * Configuration for performance metric thresholds.
44
+ */
45
+ export type BackgroundFiltersPerformanceThresholds = {
46
+ /**
47
+ * The lower FPS threshold for triggering a performance warning.
48
+ * When the EMA FPS falls below this value, a warning is shown.
49
+ * @default 23
50
+ */
51
+ fpsWarningThresholdLower?: number;
52
+
53
+ /**
54
+ * The upper FPS threshold for clearing a performance warning.
55
+ * When the EMA FPS rises above this value, the warning is cleared.
56
+ * @default 25
57
+ */
58
+ fpsWarningThresholdUpper?: number;
59
+
60
+ /**
61
+ * The default FPS value used as the initial value for the EMA (Exponential Moving Average)
62
+ * calculation and when stats are unavailable or when resetting the filter.
63
+ * @default 30
64
+ */
65
+ defaultFps?: number;
66
+ };
67
+
68
+ /**
69
+ * Represents the available background filter processing engines.
70
+ */
71
+ enum FilterEngine {
72
+ TF,
73
+ MEDIA_PIPE,
74
+ NONE,
75
+ }
76
+
77
+ /**
78
+ * Represents the possible reasons for background filter performance degradation.
79
+ */
80
+ export enum PerformanceDegradationReason {
81
+ FRAME_DROP = 'frame-drop',
82
+ CPU_THROTTLING = 'cpu-throttling',
83
+ }
24
84
 
25
85
  export type BackgroundFiltersProps = PlatformSupportFlags & {
26
86
  /**
@@ -47,7 +107,7 @@ export type BackgroundFiltersProps = PlatformSupportFlags & {
47
107
 
48
108
  /**
49
109
  * The base path for the TensorFlow Lite files.
50
- * @default 'https://unpkg.com/@stream-io/video-filters-web/tf'.
110
+ * @default 'https://unpkg.com/@stream-io/video-filters-web/mediapipe'.
51
111
  */
52
112
  basePath?: string;
53
113
 
@@ -60,19 +120,50 @@ export type BackgroundFiltersProps = PlatformSupportFlags & {
60
120
  tfFilePath?: string;
61
121
 
62
122
  /**
63
- * The path to the TensorFlow Lite model file.
64
- * Override this prop to use a custom path to the TensorFlow Lite model file
123
+ * The path to the MediaPipe model file.
124
+ * Override this prop to use a custom path to the MediaPipe model file
65
125
  * (e.g., if you choose to host it yourself).
66
126
  */
67
127
  modelFilePath?: string;
68
128
 
129
+ /**
130
+ * When true, the filter uses the legacy TensorFlow-based segmentation model.
131
+ * When false, it uses the default MediaPipe Tasks Vision model.
132
+ *
133
+ * Only enable this if you need to mimic the behavior of older SDK versions.
134
+ */
135
+ useLegacyFilter?: boolean;
136
+
69
137
  /**
70
138
  * When a started filter encounters an error, this callback will be executed.
71
139
  * The default behavior (not overridable) is unregistering a failed filter.
72
- * Use this callback to display UI error message, disable the corresponsing stream,
140
+ * Use this callback to display UI error message, disable the corresponding stream,
73
141
  * or to try registering the filter again.
74
142
  */
75
143
  onError?: (error: any) => void;
144
+
145
+ /**
146
+ * Configuration for performance metric thresholds.
147
+ * Use this to customize when performance warnings are triggered.
148
+ */
149
+ performanceThresholds?: BackgroundFiltersPerformanceThresholds;
150
+ };
151
+
152
+ /**
153
+ * Performance degradation information for background filters.
154
+ *
155
+ * Performance is calculated using an Exponential Moving Average (EMA) of FPS values
156
+ * to smooth out quick spikes and provide stable performance warnings.
157
+ */
158
+ export type BackgroundFiltersPerformance = {
159
+ /**
160
+ * Whether performance is currently degraded.
161
+ */
162
+ degraded: boolean;
163
+ /**
164
+ * Reasons for performance degradation.
165
+ */
166
+ reason?: Array<PerformanceDegradationReason>;
76
167
  };
77
168
 
78
169
  export type BackgroundFiltersAPI = {
@@ -86,6 +177,11 @@ export type BackgroundFiltersAPI = {
86
177
  */
87
178
  isReady: boolean;
88
179
 
180
+ /**
181
+ * Performance information for background filters.
182
+ */
183
+ performance: BackgroundFiltersPerformance;
184
+
89
185
  /**
90
186
  * Disables all background filters applied to the video.
91
187
  */
@@ -132,6 +228,34 @@ export const useBackgroundFilters = () => {
132
228
  return context;
133
229
  };
134
230
 
231
+ /**
232
+ * Determines which filter engine is available.
233
+ * MEDIA_PIPE is the default unless legacy filters are requested or MediaPipe is unsupported.
234
+ *
235
+ * Returns NONE if neither is supported.
236
+ */
237
+ const determineEngine = async (
238
+ useLegacyFilter: boolean | undefined,
239
+ forceSafariSupport: boolean | undefined,
240
+ forceMobileSupport: boolean | undefined,
241
+ ): Promise<FilterEngine> => {
242
+ const isTfPlatformSupported = await isPlatformSupported({
243
+ forceSafariSupport,
244
+ forceMobileSupport,
245
+ });
246
+
247
+ if (useLegacyFilter) {
248
+ return isTfPlatformSupported ? FilterEngine.TF : FilterEngine.NONE;
249
+ }
250
+
251
+ const isMediaPipeSupported = await isMediaPipePlatformSupported({
252
+ forceSafariSupport,
253
+ forceMobileSupport,
254
+ });
255
+
256
+ return isMediaPipeSupported ? FilterEngine.MEDIA_PIPE : FilterEngine.NONE;
257
+ };
258
+
135
259
  /**
136
260
  * A provider component that enables the use of background filters in your app.
137
261
  *
@@ -149,17 +273,122 @@ export const BackgroundFiltersProvider = (
149
273
  backgroundBlurLevel: bgBlurLevelFromProps = undefined,
150
274
  tfFilePath,
151
275
  modelFilePath,
276
+ useLegacyFilter,
152
277
  basePath,
153
278
  onError,
279
+ performanceThresholds,
154
280
  forceSafariSupport,
155
281
  forceMobileSupport,
156
282
  } = props;
157
283
 
284
+ const call = useCall();
285
+ const { useCallStatsReport } = useCallStateHooks();
286
+ const callStatsReport = useCallStatsReport();
287
+
158
288
  const [backgroundFilter, setBackgroundFilter] = useState(bgFilterFromProps);
159
289
  const [backgroundImage, setBackgroundImage] = useState(bgImageFromProps);
160
290
  const [backgroundBlurLevel, setBackgroundBlurLevel] =
161
291
  useState(bgBlurLevelFromProps);
162
292
 
293
+ const [showLowFpsWarning, setShowLowFpsWarning] = useState<boolean>(false);
294
+
295
+ const fpsWarningThresholdLower =
296
+ performanceThresholds?.fpsWarningThresholdLower ??
297
+ FPS_WARNING_THRESHOLD_LOWER;
298
+ const fpsWarningThresholdUpper =
299
+ performanceThresholds?.fpsWarningThresholdUpper ??
300
+ FPS_WARNING_THRESHOLD_UPPER;
301
+ const defaultFps = performanceThresholds?.defaultFps ?? DEFAULT_FPS;
302
+
303
+ const emaRef = useRef<number>(defaultFps);
304
+ const outlierStreakRef = useRef<number>(0);
305
+
306
+ const handleStats = useCallback(
307
+ (stats: PerformanceStats) => {
308
+ const fps = stats?.fps;
309
+ if (fps === undefined || fps === null) {
310
+ emaRef.current = defaultFps;
311
+ outlierStreakRef.current = 0;
312
+ setShowLowFpsWarning(false);
313
+ return;
314
+ }
315
+
316
+ const prevEma = emaRef.current;
317
+ const deviation = Math.abs(fps - prevEma) / prevEma;
318
+
319
+ const isOutlier = fps < prevEma && deviation > DEVIATION_LIMIT;
320
+ outlierStreakRef.current = isOutlier ? outlierStreakRef.current + 1 : 0;
321
+ if (isOutlier && outlierStreakRef.current < OUTLIER_PERSISTENCE) return;
322
+
323
+ emaRef.current = ALPHA * fps + (1 - ALPHA) * prevEma;
324
+
325
+ setShowLowFpsWarning((prev) => {
326
+ if (prev && emaRef.current > fpsWarningThresholdUpper) return false;
327
+ if (!prev && emaRef.current < fpsWarningThresholdLower) return true;
328
+
329
+ return prev;
330
+ });
331
+ },
332
+ [fpsWarningThresholdLower, fpsWarningThresholdUpper, defaultFps],
333
+ );
334
+
335
+ const performance: BackgroundFiltersPerformance = useMemo(() => {
336
+ if (!backgroundFilter) {
337
+ return { degraded: false };
338
+ }
339
+
340
+ const reasons: Array<PerformanceDegradationReason> = [];
341
+
342
+ if (showLowFpsWarning) {
343
+ reasons.push(PerformanceDegradationReason.FRAME_DROP);
344
+ }
345
+
346
+ const qualityLimitationReasons =
347
+ callStatsReport?.publisherStats?.qualityLimitationReasons;
348
+
349
+ if (
350
+ showLowFpsWarning &&
351
+ qualityLimitationReasons &&
352
+ qualityLimitationReasons?.includes('cpu')
353
+ ) {
354
+ reasons.push(PerformanceDegradationReason.CPU_THROTTLING);
355
+ }
356
+
357
+ return {
358
+ degraded: reasons.length > 0,
359
+ reason: reasons.length > 0 ? reasons : undefined,
360
+ };
361
+ }, [
362
+ showLowFpsWarning,
363
+ callStatsReport?.publisherStats?.qualityLimitationReasons,
364
+ backgroundFilter,
365
+ ]);
366
+
367
+ const prevDegradedRef = useRef<boolean | undefined>(undefined);
368
+ useEffect(() => {
369
+ const currentDegraded = performance.degraded;
370
+ const prevDegraded = prevDegradedRef.current;
371
+
372
+ if (
373
+ !!backgroundFilter &&
374
+ prevDegraded !== undefined &&
375
+ prevDegraded !== currentDegraded
376
+ ) {
377
+ call?.tracer.trace('backgroundFilters.performance', {
378
+ degraded: currentDegraded,
379
+ reason: performance?.reason,
380
+ fps: emaRef.current,
381
+ });
382
+ }
383
+ prevDegradedRef.current = currentDegraded;
384
+ }, [
385
+ performanceThresholds,
386
+ performance.degraded,
387
+ performance.reason,
388
+ backgroundFilter,
389
+ call?.tracer,
390
+ ]);
391
+
163
392
  const applyBackgroundImageFilter = useCallback((imageUrl: string) => {
164
393
  setBackgroundFilter('image');
165
394
  setBackgroundImage(imageUrl);
@@ -177,24 +406,45 @@ export const BackgroundFiltersProvider = (
177
406
  setBackgroundFilter(undefined);
178
407
  setBackgroundImage(undefined);
179
408
  setBackgroundBlurLevel(undefined);
180
- }, []);
181
409
 
410
+ emaRef.current = defaultFps;
411
+ outlierStreakRef.current = 0;
412
+ setShowLowFpsWarning(false);
413
+ }, [defaultFps]);
414
+
415
+ const [engine, setEngine] = useState<FilterEngine>(FilterEngine.NONE);
182
416
  const [isSupported, setIsSupported] = useState(false);
183
417
  useEffect(() => {
184
- isPlatformSupported({
418
+ determineEngine(
419
+ useLegacyFilter,
185
420
  forceSafariSupport,
186
421
  forceMobileSupport,
187
- }).then(setIsSupported);
188
- }, [forceMobileSupport, forceSafariSupport]);
422
+ ).then((determinedEngine) => {
423
+ setEngine(determinedEngine);
424
+ setIsSupported(determinedEngine !== FilterEngine.NONE);
425
+ });
426
+ }, [forceMobileSupport, forceSafariSupport, useLegacyFilter]);
189
427
 
190
428
  const [tfLite, setTfLite] = useState<TFLite>();
191
429
  useEffect(() => {
192
- // don't try to load TFLite if the platform is not supported
193
- if (!isSupported) return;
430
+ if (engine !== FilterEngine.TF) return;
431
+
194
432
  loadTFLite({ basePath, modelFilePath, tfFilePath })
195
433
  .then(setTfLite)
196
434
  .catch((err) => console.error('Failed to load TFLite', err));
197
- }, [basePath, isSupported, modelFilePath, tfFilePath]);
435
+ }, [basePath, engine, modelFilePath, tfFilePath]);
436
+
437
+ const [mediaPipe, setMediaPipe] = useState<ArrayBuffer>();
438
+ useEffect(() => {
439
+ if (engine !== FilterEngine.MEDIA_PIPE) return;
440
+
441
+ loadMediaPipe({
442
+ basePath: basePath,
443
+ modelPath: modelFilePath,
444
+ })
445
+ .then(setMediaPipe)
446
+ .catch((err) => console.error('Failed to preload MediaPipe', err));
447
+ }, [engine, modelFilePath, basePath]);
198
448
 
199
449
  const handleError = useCallback(
200
450
  (error: any) => {
@@ -207,11 +457,13 @@ export const BackgroundFiltersProvider = (
207
457
  [disableBackgroundFilter, onError],
208
458
  );
209
459
 
460
+ const isReady = useLegacyFilter ? !!tfLite : !!mediaPipe;
210
461
  return (
211
462
  <BackgroundFiltersContext.Provider
212
463
  value={{
213
464
  isSupported,
214
- isReady: !!tfLite,
465
+ performance,
466
+ isReady,
215
467
  backgroundImage,
216
468
  backgroundBlurLevel,
217
469
  backgroundFilter,
@@ -226,34 +478,64 @@ export const BackgroundFiltersProvider = (
226
478
  }}
227
479
  >
228
480
  {children}
229
- {tfLite && <BackgroundFilters tfLite={tfLite} />}
481
+ {isReady && (
482
+ <BackgroundFilters
483
+ tfLite={tfLite}
484
+ engine={engine}
485
+ onStats={handleStats}
486
+ />
487
+ )}
230
488
  </BackgroundFiltersContext.Provider>
231
489
  );
232
490
  };
233
491
 
234
- const BackgroundFilters = (props: { tfLite: TFLite }) => {
492
+ const BackgroundFilters = (props: {
493
+ tfLite?: TFLite;
494
+ engine: FilterEngine;
495
+ onStats: (stats: PerformanceStats) => void;
496
+ }) => {
235
497
  const call = useCall();
236
- const { children, start } = useRenderer(props.tfLite, call);
237
- const { backgroundFilter, onError } = useBackgroundFilters();
498
+ const { children, start } = useRenderer(props.tfLite, call, props.engine);
499
+ const { onError, backgroundFilter } = useBackgroundFilters();
238
500
  const handleErrorRef = useRef<((error: any) => void) | undefined>(undefined);
239
501
  handleErrorRef.current = onError;
240
502
 
503
+ const handleStatsRef = useRef<
504
+ ((stats: PerformanceStats) => void) | undefined
505
+ >(undefined);
506
+ handleStatsRef.current = props.onStats;
507
+
241
508
  useEffect(() => {
242
509
  if (!call || !backgroundFilter) return;
243
- const { unregister } = call.camera.registerFilter((ms) =>
244
- start(ms, (error) => handleErrorRef.current?.(error)),
245
- );
510
+
511
+ const { unregister } = call.camera.registerFilter((ms) => {
512
+ return start(
513
+ ms,
514
+ (error) => handleErrorRef.current?.(error),
515
+ (stats: PerformanceStats) => handleStatsRef.current?.(stats),
516
+ );
517
+ });
246
518
  return () => {
247
519
  unregister().catch((err) => console.warn(`Can't unregister filter`, err));
248
520
  };
249
- }, [backgroundFilter, call, start]);
521
+ }, [call, start, backgroundFilter]);
250
522
 
251
523
  return children;
252
524
  };
253
525
 
254
- const useRenderer = (tfLite: TFLite, call: Call | undefined) => {
255
- const { backgroundFilter, backgroundBlurLevel, backgroundImage } =
256
- useBackgroundFilters();
526
+ const useRenderer = (
527
+ tfLite: TFLite | undefined,
528
+ call: Call | undefined,
529
+ engine: FilterEngine,
530
+ ) => {
531
+ const {
532
+ backgroundFilter,
533
+ backgroundBlurLevel,
534
+ backgroundImage,
535
+ modelFilePath,
536
+ basePath,
537
+ } = useBackgroundFilters();
538
+
257
539
  const videoRef = useRef<HTMLVideoElement>(null);
258
540
  const canvasRef = useRef<HTMLCanvasElement>(null);
259
541
  const bgImageRef = useRef<HTMLImageElement>(null);
@@ -265,8 +547,13 @@ const useRenderer = (tfLite: TFLite, call: Call | undefined) => {
265
547
  );
266
548
 
267
549
  const start = useCallback(
268
- (ms: MediaStream, onError?: (error: any) => void) => {
550
+ (
551
+ ms: MediaStream,
552
+ onError?: (error: any) => void,
553
+ onStats?: (stats: PerformanceStats) => void,
554
+ ) => {
269
555
  let outputStream: MediaStream | undefined;
556
+ let processor: VirtualBackground | undefined;
270
557
  let renderer: Renderer | undefined;
271
558
 
272
559
  const output = new Promise<MediaStream>((resolve, reject) => {
@@ -279,58 +566,116 @@ const useRenderer = (tfLite: TFLite, call: Call | undefined) => {
279
566
  const canvasEl = canvasRef.current;
280
567
  const bgImageEl = bgImageRef.current;
281
568
 
282
- if (!videoEl || !canvasEl || (backgroundImage && !bgImageEl)) {
283
- // You should start renderer in effect or event handlers
284
- reject(new Error('Renderer started before elements are ready'));
569
+ const [track] = ms.getVideoTracks();
570
+
571
+ if (!track) {
572
+ reject(new Error('No video tracks in input media stream'));
285
573
  return;
286
574
  }
287
575
 
288
- videoEl.srcObject = ms;
289
- videoEl.play().then(
290
- () => {
291
- const [track] = ms.getVideoTracks();
292
-
293
- if (!track) {
294
- reject(new Error('No video tracks in input media stream'));
295
- return;
296
- }
297
-
298
- const trackSettings = track.getSettings();
299
- flushSync(() =>
300
- setVideoSize({
301
- width: trackSettings.width ?? 0,
302
- height: trackSettings.height ?? 0,
303
- }),
304
- );
305
- call?.tracer.trace('backgroundFilters.enable', {
306
- backgroundFilter,
576
+ if (engine === FilterEngine.MEDIA_PIPE) {
577
+ call?.tracer.trace('backgroundFilters.enable', {
578
+ backgroundFilter,
579
+ backgroundBlurLevel,
580
+ backgroundImage,
581
+ engine,
582
+ });
583
+
584
+ if (!videoEl) {
585
+ reject(new Error('Renderer started before elements are ready'));
586
+ return;
587
+ }
588
+
589
+ const trackSettings = track.getSettings();
590
+ flushSync(() =>
591
+ setVideoSize({
592
+ width: trackSettings.width ?? 0,
593
+ height: trackSettings.height ?? 0,
594
+ }),
595
+ );
596
+
597
+ processor = new VirtualBackground(
598
+ track,
599
+ {
600
+ basePath: basePath,
601
+ modelPath: modelFilePath,
307
602
  backgroundBlurLevel,
308
603
  backgroundImage,
604
+ backgroundFilter,
605
+ },
606
+ { onError, onStats },
607
+ );
608
+ processor
609
+ .start()
610
+ .then((processedTrack) => {
611
+ outputStream = new MediaStream([processedTrack]);
612
+ resolve(outputStream);
613
+ })
614
+ .catch((error) => {
615
+ reject(error);
309
616
  });
310
- renderer = createRenderer(
311
- tfLite,
312
- videoEl,
313
- canvasEl,
314
- {
617
+
618
+ return;
619
+ }
620
+
621
+ if (engine === FilterEngine.TF) {
622
+ if (!videoEl || !canvasEl || (backgroundImage && !bgImageEl)) {
623
+ reject(new Error('Renderer started before elements are ready'));
624
+ return;
625
+ }
626
+
627
+ videoEl.srcObject = ms;
628
+ videoEl.play().then(
629
+ () => {
630
+ const trackSettings = track.getSettings();
631
+ flushSync(() =>
632
+ setVideoSize({
633
+ width: trackSettings.width ?? 0,
634
+ height: trackSettings.height ?? 0,
635
+ }),
636
+ );
637
+ call?.tracer.trace('backgroundFilters.enable', {
315
638
  backgroundFilter,
316
639
  backgroundBlurLevel,
317
- backgroundImage: bgImageEl ?? undefined,
318
- },
319
- onError,
320
- );
321
- outputStream = canvasEl.captureStream();
322
- resolve(outputStream);
323
- },
324
- () => {
325
- reject(new Error('Could not play the source video stream'));
326
- },
327
- );
640
+ backgroundImage,
641
+ engine,
642
+ });
643
+
644
+ if (!tfLite) {
645
+ reject(new Error('TensorFlow Lite not loaded'));
646
+ return;
647
+ }
648
+
649
+ renderer = createRenderer(
650
+ tfLite,
651
+ videoEl,
652
+ canvasEl,
653
+ {
654
+ backgroundFilter,
655
+ backgroundBlurLevel,
656
+ backgroundImage: bgImageEl ?? undefined,
657
+ },
658
+ onError,
659
+ );
660
+ outputStream = canvasEl.captureStream();
661
+
662
+ resolve(outputStream);
663
+ },
664
+ () => {
665
+ reject(new Error('Could not play the source video stream'));
666
+ },
667
+ );
668
+ return;
669
+ }
670
+
671
+ reject(new Error('No supported engine available'));
328
672
  });
329
673
 
330
674
  return {
331
675
  output,
332
676
  stop: () => {
333
677
  call?.tracer.trace('backgroundFilters.disable', null);
678
+ processor?.stop();
334
679
  renderer?.dispose();
335
680
  if (videoRef.current) videoRef.current.srcObject = null;
336
681
  if (outputStream) disposeOfMediaStream(outputStream);
@@ -343,6 +688,9 @@ const useRenderer = (tfLite: TFLite, call: Call | undefined) => {
343
688
  backgroundImage,
344
689
  call?.tracer,
345
690
  tfLite,
691
+ engine,
692
+ modelFilePath,
693
+ basePath,
346
694
  ],
347
695
  );
348
696
 
@@ -378,8 +726,5 @@ const useRenderer = (tfLite: TFLite, call: Call | undefined) => {
378
726
  </div>
379
727
  );
380
728
 
381
- return {
382
- start,
383
- children,
384
- };
729
+ return { start, children };
385
730
  };
@@ -1,6 +1,7 @@
1
1
  import { useCallStateHooks } from '@stream-io/video-react-bindings';
2
2
  import { DeviceSelector } from './DeviceSelector';
3
3
  import { AudioVolumeIndicator } from './AudioVolumeIndicator';
4
+ import { SpeakerTest } from './SpeakerTest';
4
5
 
5
6
  export type DeviceSelectorAudioInputProps = {
6
7
  title?: string;
@@ -41,11 +42,15 @@ export const DeviceSelectorAudioInput = ({
41
42
  export type DeviceSelectorAudioOutputProps = {
42
43
  title?: string;
43
44
  visualType?: 'list' | 'dropdown';
45
+ speakerTestVisible?: boolean;
46
+ speakerTestAudioUrl?: string;
44
47
  };
45
48
 
46
49
  export const DeviceSelectorAudioOutput = ({
47
50
  title,
48
51
  visualType,
52
+ speakerTestVisible = true,
53
+ speakerTestAudioUrl,
49
54
  }: DeviceSelectorAudioOutputProps) => {
50
55
  const { useSpeakerState } = useCallStateHooks();
51
56
  const { speaker, selectedDevice, devices, isDeviceSelectionSupported } =
@@ -64,6 +69,13 @@ export const DeviceSelectorAudioOutput = ({
64
69
  title={title}
65
70
  visualType={visualType}
66
71
  icon="speaker"
67
- />
72
+ >
73
+ {speakerTestVisible && (
74
+ <>
75
+ <hr className="str-video__device-settings__separator" />
76
+ <SpeakerTest audioUrl={speakerTestAudioUrl} />
77
+ </>
78
+ )}
79
+ </DeviceSelector>
68
80
  );
69
81
  };