@stream-io/video-react-sdk 1.27.2 → 1.28.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,10 @@ import { ReactNode } from 'react';
2
2
  export type CallStatsProps = {
3
3
  latencyLowBound?: number;
4
4
  latencyHighBound?: number;
5
+ audioJitterLowBound?: number;
6
+ audioJitterHighBound?: number;
7
+ videoJitterLowBound?: number;
8
+ videoJitterHighBound?: number;
5
9
  showCodecInfo?: boolean;
6
10
  LatencyChartSuspenseFallback?: ReactNode;
7
11
  };
@@ -11,6 +11,15 @@ export type LivestreamLayoutProps = {
11
11
  * Whether to show the participant count. Defaults to `true`.
12
12
  */
13
13
  showParticipantCount?: boolean;
14
+ /**
15
+ * Whether to humanize the participant count. Defaults to `true`.
16
+ * @example
17
+ * 1000 participants -> 1k
18
+ * 1500 participants -> 1.5k
19
+ * 10_000 participants -> 10k
20
+ * 100_000 participants -> 100k
21
+ */
22
+ humanizeParticipantCount?: boolean;
14
23
  /**
15
24
  * Whether to enable fullscreen mode. Defaults to `true`.
16
25
  */
@@ -27,6 +36,10 @@ export type LivestreamLayoutProps = {
27
36
  * Whether to show the speaker name. Defaults to `false`.
28
37
  */
29
38
  showSpeakerName?: boolean;
39
+ /**
40
+ * Whether to show the mute button. Defaults to `true`.
41
+ */
42
+ showMuteButton?: boolean;
30
43
  /**
31
44
  * When set to `false` disables mirroring of the local participant's video.
32
45
  * @default true
@@ -59,6 +72,15 @@ export type BackstageLayoutProps = {
59
72
  * the livestream went live. Defaults to `true`.
60
73
  */
61
74
  showEarlyParticipantCount?: boolean;
75
+ /**
76
+ * Show the participant count in a humanized format. Defaults to `true`.
77
+ * @example
78
+ * 1000 participants -> 1k
79
+ * 1500 participants -> 1.5k
80
+ * 10_000 participants -> 10k
81
+ * 10_0000 participants -> 100k
82
+ */
83
+ humanizeParticipantCount?: boolean;
62
84
  };
63
85
  export declare const BackstageLayout: {
64
86
  (props: BackstageLayoutProps): import("react/jsx-runtime").JSX.Element;
@@ -3,3 +3,4 @@ export * from './usePersistedDevicePreferences';
3
3
  export * from './useScrollPosition';
4
4
  export * from './useRequestPermission';
5
5
  export * from './useDeviceList';
6
+ export * from './useModeration';
@@ -0,0 +1,8 @@
1
+ export interface ModerationOptions {
2
+ /**
3
+ * How long the moderation effect should stay active before being disabled.
4
+ * Set to `0` to keep it active indefinitely. Defaults to 5000 ms.
5
+ */
6
+ duration?: number;
7
+ }
8
+ export declare const useModeration: (options?: ModerationOptions) => void;
package/index.ts CHANGED
@@ -14,6 +14,7 @@ export {
14
14
  useRequestPermission,
15
15
  usePersistedDevicePreferences,
16
16
  useDeviceList,
17
+ useModeration,
17
18
  } from './src/hooks';
18
19
  export { applyFilter, type Filter } from './src/utilities/filter';
19
20
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stream-io/video-react-sdk",
3
- "version": "1.27.2",
3
+ "version": "1.28.1",
4
4
  "main": "./dist/index.cjs.js",
5
5
  "module": "./dist/index.es.js",
6
6
  "types": "./dist/index.d.ts",
@@ -31,9 +31,9 @@
31
31
  ],
32
32
  "dependencies": {
33
33
  "@floating-ui/react": "^0.27.6",
34
- "@stream-io/video-client": "1.37.3",
35
- "@stream-io/video-filters-web": "0.5.1",
36
- "@stream-io/video-react-bindings": "1.11.4",
34
+ "@stream-io/video-client": "1.38.1",
35
+ "@stream-io/video-filters-web": "0.6.0",
36
+ "@stream-io/video-react-bindings": "1.12.1",
37
37
  "chart.js": "^4.4.4",
38
38
  "clsx": "^2.0.0",
39
39
  "react-chartjs-2": "^5.3.0"
@@ -47,7 +47,7 @@
47
47
  "@rollup/plugin-replace": "^6.0.2",
48
48
  "@rollup/plugin-typescript": "^12.1.4",
49
49
  "@stream-io/audio-filters-web": "^0.6.1",
50
- "@stream-io/video-styling": "^1.8.0",
50
+ "@stream-io/video-styling": "^1.9.0",
51
51
  "@types/react": "~19.1.17",
52
52
  "@types/react-dom": "~19.1.11",
53
53
  "react": "19.1.0",
@@ -20,6 +20,10 @@ enum Status {
20
20
  export type CallStatsProps = {
21
21
  latencyLowBound?: number;
22
22
  latencyHighBound?: number;
23
+ audioJitterLowBound?: number;
24
+ audioJitterHighBound?: number;
25
+ videoJitterLowBound?: number;
26
+ videoJitterHighBound?: number;
23
27
  showCodecInfo?: boolean;
24
28
  LatencyChartSuspenseFallback?: ReactNode;
25
29
  };
@@ -28,6 +32,10 @@ export const CallStats = (props: CallStatsProps) => {
28
32
  const {
29
33
  latencyLowBound = 75,
30
34
  latencyHighBound = 400,
35
+ audioJitterLowBound = 10,
36
+ audioJitterHighBound = 30,
37
+ videoJitterLowBound = 20,
38
+ videoJitterHighBound = 50,
31
39
  showCodecInfo = false,
32
40
  LatencyChartSuspenseFallback = null,
33
41
  } = props;
@@ -41,6 +49,8 @@ export const CallStats = (props: CallStatsProps) => {
41
49
  const { t } = useI18n();
42
50
  const [publishBitrate, setPublishBitrate] = useState('-');
43
51
  const [subscribeBitrate, setSubscribeBitrate] = useState('-');
52
+ const [publishAudioBitrate, setPublishAudioBitrate] = useState('-');
53
+ const [subscribeAudioBitrate, setSubscribeAudioBitrate] = useState('-');
44
54
  const previousStats = useRef<CallStatsReport>(undefined);
45
55
  const { useCallStatsReport } = useCallStateHooks();
46
56
  const callStatsReport = useCallStatsReport();
@@ -61,12 +71,26 @@ export const CallStats = (props: CallStatsProps) => {
61
71
  callStatsReport,
62
72
  );
63
73
  });
74
+ setPublishAudioBitrate(() => {
75
+ return calculatePublishAudioBitrate(
76
+ previousCallStatsReport,
77
+ callStatsReport,
78
+ );
79
+ });
80
+ setSubscribeAudioBitrate(() => {
81
+ return calculateSubscribeAudioBitrate(
82
+ previousCallStatsReport,
83
+ callStatsReport,
84
+ );
85
+ });
64
86
 
65
87
  setLatencyBuffer((latencyBuf) => {
66
88
  const newLatencyBuffer = latencyBuf.slice(-19);
67
89
  newLatencyBuffer.push({
68
90
  x: callStatsReport.timestamp,
69
- y: callStatsReport.publisherStats.averageRoundTripTimeInMs,
91
+ y:
92
+ callStatsReport.publisherStats.averageRoundTripTimeInMs ||
93
+ callStatsReport.publisherAudioStats.averageRoundTripTimeInMs,
70
94
  });
71
95
  return newLatencyBuffer;
72
96
  });
@@ -80,6 +104,16 @@ export const CallStats = (props: CallStatsProps) => {
80
104
  value: callStatsReport?.publisherStats.averageRoundTripTimeInMs || 0,
81
105
  };
82
106
 
107
+ const audioJitterComparison = {
108
+ lowBound: audioJitterLowBound,
109
+ highBound: audioJitterHighBound,
110
+ };
111
+
112
+ const videoJitterComparison = {
113
+ lowBound: videoJitterLowBound,
114
+ highBound: videoJitterHighBound,
115
+ };
116
+
83
117
  return (
84
118
  <div className="str-video__call-stats">
85
119
  {callStatsReport && (
@@ -111,7 +145,7 @@ export const CallStats = (props: CallStatsProps) => {
111
145
  className="str-video__call-stats__icon"
112
146
  icon="network-quality"
113
147
  />
114
- {t('Call performance')}
148
+ {t('Video performance')}
115
149
  </h3>
116
150
  <p className="str-video__call-stats__description">
117
151
  {t('Review the key data points below to assess call performance')}
@@ -129,7 +163,7 @@ export const CallStats = (props: CallStatsProps) => {
129
163
  label={t('Receive jitter')}
130
164
  value={`${callStatsReport.subscriberStats.averageJitterInMs} ms.`}
131
165
  comparison={{
132
- ...latencyComparison,
166
+ ...videoJitterComparison,
133
167
  value: callStatsReport.subscriberStats.averageJitterInMs,
134
168
  }}
135
169
  />
@@ -137,7 +171,7 @@ export const CallStats = (props: CallStatsProps) => {
137
171
  label={t('Publish jitter')}
138
172
  value={`${callStatsReport.publisherStats.averageJitterInMs} ms.`}
139
173
  comparison={{
140
- ...latencyComparison,
174
+ ...videoJitterComparison,
141
175
  value: callStatsReport.publisherStats.averageJitterInMs,
142
176
  }}
143
177
  />
@@ -160,6 +194,54 @@ export const CallStats = (props: CallStatsProps) => {
160
194
  <StatCard label={t('Publish bitrate')} value={publishBitrate} />
161
195
  <StatCard label={t('Receiving bitrate')} value={subscribeBitrate} />
162
196
  </div>
197
+
198
+ <div className="str-video__call-stats__header">
199
+ <h3 className="str-video__call-stats__heading">
200
+ <Icon className="str-video__call-stats__icon" icon="mic" />
201
+ {t('Audio Performance')}
202
+ </h3>
203
+ <p className="str-video__call-stats__description">
204
+ {t(
205
+ 'Review the key audio data points below to assess audio performance',
206
+ )}
207
+ </p>
208
+ </div>
209
+
210
+ <div className="str-video__call-stats__card-container">
211
+ <StatCard
212
+ label={t('Latency')}
213
+ value={`${callStatsReport.publisherAudioStats.averageRoundTripTimeInMs} ms.`}
214
+ comparison={latencyComparison}
215
+ />
216
+ <StatCard
217
+ label={t('Audio bitrate (publish)')}
218
+ value={publishAudioBitrate}
219
+ />
220
+ <StatCard
221
+ label={t('Audio bitrate (receive)')}
222
+ value={subscribeAudioBitrate}
223
+ />
224
+ <StatCard
225
+ label={t('Audio jitter (publish)')}
226
+ value={`${callStatsReport.publisherAudioStats.averageJitterInMs} ms.`}
227
+ comparison={{
228
+ ...audioJitterComparison,
229
+ value: callStatsReport.publisherAudioStats.averageJitterInMs,
230
+ }}
231
+ />
232
+ <StatCard
233
+ label={t('Audio jitter (receive)')}
234
+ value={`${callStatsReport.subscriberAudioStats.averageJitterInMs} ms.`}
235
+ comparison={{
236
+ ...audioJitterComparison,
237
+ value: callStatsReport.subscriberAudioStats.averageJitterInMs,
238
+ }}
239
+ />
240
+ <StatCard
241
+ label={t('Audio codec')}
242
+ value={formatAudioCodec(callStatsReport)}
243
+ />
244
+ </div>
163
245
  </>
164
246
  )}
165
247
  </div>
@@ -279,6 +361,15 @@ const formatCodec = (callStatsReport: CallStatsReport): string => {
279
361
  return name ? ` (${name})` : '';
280
362
  };
281
363
 
364
+ const formatAudioCodec = (callStatsReport: CallStatsReport): string => {
365
+ const { codecPerTrackType } = callStatsReport.publisherAudioStats;
366
+ if (!codecPerTrackType || !codecPerTrackType[SfuModels.TrackType.AUDIO]) {
367
+ return '';
368
+ }
369
+ const [, name] = codecPerTrackType[SfuModels.TrackType.AUDIO].split('/');
370
+ return name ?? '';
371
+ };
372
+
282
373
  const calculatePublishBitrate = (
283
374
  previousCallStatsReport: CallStatsReport,
284
375
  callStatsReport: CallStatsReport,
@@ -318,3 +409,31 @@ const calculateSubscribeBitrate = (
318
409
  const timeElapsed = timestamp - previousTimestamp;
319
410
  return `${((bytesReceived * 8) / timeElapsed).toFixed(2)} kbps`;
320
411
  };
412
+
413
+ const calculatePublishAudioBitrate = (
414
+ previousCallStatsReport: CallStatsReport,
415
+ callStatsReport: CallStatsReport,
416
+ ) => {
417
+ const previousAudioStats = previousCallStatsReport.publisherAudioStats;
418
+ const audioStats = callStatsReport.publisherAudioStats;
419
+
420
+ const bytesSent =
421
+ audioStats.totalBytesSent - previousAudioStats.totalBytesSent;
422
+ const timeElapsed = audioStats.timestamp - previousAudioStats.timestamp;
423
+
424
+ return `${((bytesSent * 8) / timeElapsed).toFixed(2)} kbps`;
425
+ };
426
+
427
+ const calculateSubscribeAudioBitrate = (
428
+ previousCallStatsReport: CallStatsReport,
429
+ callStatsReport: CallStatsReport,
430
+ ) => {
431
+ const previousAudioStats = previousCallStatsReport.subscriberAudioStats;
432
+ const audioStats = callStatsReport.subscriberAudioStats;
433
+
434
+ const bytesReceived =
435
+ audioStats.totalBytesReceived - previousAudioStats.totalBytesReceived;
436
+ const timeElapsed = audioStats.timestamp - previousAudioStats.timestamp;
437
+
438
+ return `${((bytesReceived * 8) / timeElapsed).toFixed(2)} kbps`;
439
+ };
@@ -11,7 +11,7 @@ import {
11
11
  useCallStateHooks,
12
12
  useI18n,
13
13
  } from '@stream-io/video-react-bindings';
14
- import { hasScreenShare } from '@stream-io/video-client';
14
+ import { hasScreenShare, humanize } from '@stream-io/video-client';
15
15
  import { ParticipantView, useParticipantViewContext } from '../ParticipantView';
16
16
  import { ParticipantsAudio } from '../Audio';
17
17
  import {
@@ -33,6 +33,16 @@ export type LivestreamLayoutProps = {
33
33
  */
34
34
  showParticipantCount?: boolean;
35
35
 
36
+ /**
37
+ * Whether to humanize the participant count. Defaults to `true`.
38
+ * @example
39
+ * 1000 participants -> 1k
40
+ * 1500 participants -> 1.5k
41
+ * 10_000 participants -> 10k
42
+ * 100_000 participants -> 100k
43
+ */
44
+ humanizeParticipantCount?: boolean;
45
+
36
46
  /**
37
47
  * Whether to enable fullscreen mode. Defaults to `true`.
38
48
  */
@@ -53,6 +63,11 @@ export type LivestreamLayoutProps = {
53
63
  */
54
64
  showSpeakerName?: boolean;
55
65
 
66
+ /**
67
+ * Whether to show the mute button. Defaults to `true`.
68
+ */
69
+ showMuteButton?: boolean;
70
+
56
71
  /**
57
72
  * When set to `false` disables mirroring of the local participant's video.
58
73
  * @default true
@@ -94,6 +109,7 @@ export const LivestreamLayout = (props: LivestreamLayoutProps) => {
94
109
  showParticipantCount={props.showParticipantCount}
95
110
  showDuration={props.showDuration}
96
111
  showLiveBadge={props.showLiveBadge}
112
+ showMuteButton={props.showMuteButton}
97
113
  showSpeakerName={props.showSpeakerName}
98
114
  enableFullScreen={props.enableFullScreen}
99
115
  />
@@ -160,10 +176,21 @@ export type BackstageLayoutProps = {
160
176
  * the livestream went live. Defaults to `true`.
161
177
  */
162
178
  showEarlyParticipantCount?: boolean;
179
+
180
+ /**
181
+ * Show the participant count in a humanized format. Defaults to `true`.
182
+ * @example
183
+ * 1000 participants -> 1k
184
+ * 1500 participants -> 1.5k
185
+ * 10_000 participants -> 10k
186
+ * 10_0000 participants -> 100k
187
+ */
188
+ humanizeParticipantCount?: boolean;
163
189
  };
164
190
 
165
191
  export const BackstageLayout = (props: BackstageLayoutProps) => {
166
- const { showEarlyParticipantCount = true } = props;
192
+ const { showEarlyParticipantCount = true, humanizeParticipantCount = true } =
193
+ props;
167
194
  const { useParticipantCount, useCallStartsAt } = useCallStateHooks();
168
195
  const participantCount = useParticipantCount();
169
196
  const startsAt = useCallStartsAt();
@@ -182,7 +209,9 @@ export const BackstageLayout = (props: BackstageLayoutProps) => {
182
209
  {showEarlyParticipantCount && (
183
210
  <span className="str-video__livestream-layout__early-viewers-count">
184
211
  {t('{{ count }} participants joined early', {
185
- count: participantCount,
212
+ count: humanizeParticipantCount
213
+ ? humanize(participantCount)
214
+ : participantCount,
186
215
  })}
187
216
  </span>
188
217
  )}
@@ -196,15 +225,19 @@ BackstageLayout.displayName = 'BackstageLayout';
196
225
  const ParticipantOverlay = (props: {
197
226
  enableFullScreen?: boolean;
198
227
  showParticipantCount?: boolean;
228
+ humanizeParticipantCount?: boolean;
199
229
  showDuration?: boolean;
200
230
  showLiveBadge?: boolean;
201
231
  showSpeakerName?: boolean;
232
+ showMuteButton?: boolean;
202
233
  }) => {
203
234
  const {
204
235
  enableFullScreen = true,
205
236
  showParticipantCount = true,
237
+ humanizeParticipantCount = true,
206
238
  showDuration = true,
207
239
  showLiveBadge = true,
240
+ showMuteButton = true,
208
241
  showSpeakerName = false,
209
242
  } = props;
210
243
  const overlayBarVisible =
@@ -212,12 +245,15 @@ const ParticipantOverlay = (props: {
212
245
  showParticipantCount ||
213
246
  showDuration ||
214
247
  showLiveBadge ||
248
+ showMuteButton ||
215
249
  showSpeakerName;
216
250
  const { participant } = useParticipantViewContext();
217
- const { useParticipantCount } = useCallStateHooks();
251
+ const { useParticipantCount, useSpeakerState } = useCallStateHooks();
218
252
  const participantCount = useParticipantCount();
219
253
  const duration = useUpdateCallDuration();
220
254
  const toggleFullScreen = useToggleFullScreen();
255
+ const { speaker, volume } = useSpeakerState();
256
+ const isSpeakerMuted = volume === 0;
221
257
  const { t } = useI18n();
222
258
  return (
223
259
  <div className="str-video__livestream-layout__overlay">
@@ -230,7 +266,9 @@ const ParticipantOverlay = (props: {
230
266
  )}
231
267
  {showParticipantCount && (
232
268
  <span className="str-video__livestream-layout__viewers-count">
233
- {participantCount}
269
+ {humanizeParticipantCount
270
+ ? humanize(participantCount)
271
+ : participantCount}
234
272
  </span>
235
273
  )}
236
274
  {showSpeakerName && (
@@ -246,6 +284,16 @@ const ParticipantOverlay = (props: {
246
284
  {formatDuration(duration)}
247
285
  </span>
248
286
  )}
287
+ {showMuteButton && (
288
+ <span
289
+ className={clsx(
290
+ 'str-video__livestream-layout__mute-button',
291
+ isSpeakerMuted &&
292
+ 'str-video__livestream-layout__mute-button--muted',
293
+ )}
294
+ onClick={() => speaker.setVolume(isSpeakerMuted ? 1 : 0)}
295
+ />
296
+ )}
249
297
  {enableFullScreen && (
250
298
  <span
251
299
  className="str-video__livestream-layout__go-fullscreen"
@@ -3,3 +3,4 @@ export * from './usePersistedDevicePreferences';
3
3
  export * from './useScrollPosition';
4
4
  export * from './useRequestPermission';
5
5
  export * from './useDeviceList';
6
+ export * from './useModeration';
@@ -0,0 +1,167 @@
1
+ import { useCallback, useEffect, useRef } from 'react';
2
+ import { useCall } from '@stream-io/video-react-bindings';
3
+
4
+ type FullScreenBlurType =
5
+ typeof import('@stream-io/video-filters-web').FullScreenBlur;
6
+
7
+ const isFullScreenBlurPlatformSupported = (): boolean => {
8
+ if (
9
+ typeof window === 'undefined' ||
10
+ typeof OffscreenCanvas === 'undefined' ||
11
+ typeof VideoFrame === 'undefined' ||
12
+ !window.WebGL2RenderingContext
13
+ ) {
14
+ return false;
15
+ }
16
+
17
+ try {
18
+ const canvas = new OffscreenCanvas(1, 1);
19
+ return !!canvas.getContext('webgl2', {
20
+ alpha: false,
21
+ antialias: false,
22
+ desynchronized: true,
23
+ });
24
+ } catch {
25
+ return false;
26
+ }
27
+ };
28
+
29
+ export interface ModerationOptions {
30
+ /**
31
+ * How long the moderation effect should stay active before being disabled.
32
+ * Set to `0` to keep it active indefinitely. Defaults to 5000 ms.
33
+ */
34
+ duration?: number;
35
+ }
36
+
37
+ export const useModeration = (options?: ModerationOptions) => {
38
+ const { duration = 5000 } = options || {};
39
+
40
+ const call = useCall();
41
+
42
+ const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
43
+ const processorRef = useRef<InstanceType<FullScreenBlurType> | null>(null);
44
+ const unregisterRef = useRef<(() => Promise<void>) | null>(null);
45
+
46
+ const blurModulePromise = useRef<Promise<FullScreenBlurType> | null>(null);
47
+
48
+ /**
49
+ * Lazily loads and caches the video-filters-web module.
50
+ */
51
+ const loadVideoFiltersWebModule = useCallback(() => {
52
+ if (!blurModulePromise.current) {
53
+ blurModulePromise.current = import('@stream-io/video-filters-web')
54
+ .then((module) => module.FullScreenBlur)
55
+ .catch((error) => {
56
+ console.error('[moderation] Failed to import blur module:', error);
57
+ throw error;
58
+ });
59
+ }
60
+
61
+ return blurModulePromise.current;
62
+ }, []);
63
+
64
+ const disableBlur = useCallback(() => {
65
+ if (timeoutRef.current) {
66
+ clearTimeout(timeoutRef.current);
67
+ timeoutRef.current = null;
68
+ }
69
+
70
+ unregisterRef
71
+ .current?.()
72
+ .catch((err) => console.error('[moderation] unregister error:', err));
73
+
74
+ unregisterRef.current = null;
75
+ }, []);
76
+
77
+ const handleFallback = useCallback(async () => {
78
+ try {
79
+ await call?.camera.disable();
80
+ } catch (error) {
81
+ console.error('[moderation] Failed to disable camera:', error);
82
+ }
83
+ }, [call]);
84
+
85
+ useEffect(() => {
86
+ if (!call) return;
87
+
88
+ return call.on('call.moderation_warning', async () => {
89
+ try {
90
+ await loadVideoFiltersWebModule();
91
+ } catch (importErr) {
92
+ console.error('[moderation] Failed to import blur module:', importErr);
93
+ }
94
+ });
95
+ }, [call, loadVideoFiltersWebModule]);
96
+
97
+ useEffect(() => {
98
+ if (!call) return;
99
+
100
+ return call.on('call.moderation_blur', async () => {
101
+ if (unregisterRef.current) return;
102
+
103
+ let FullScreenBlurClass: FullScreenBlurType;
104
+
105
+ try {
106
+ FullScreenBlurClass = await loadVideoFiltersWebModule();
107
+ } catch (importErr) {
108
+ console.error('[moderation] Failed to import blur module:', importErr);
109
+ await handleFallback();
110
+ return;
111
+ }
112
+
113
+ if (!isFullScreenBlurPlatformSupported()) {
114
+ console.warn('[moderation] Blur not supported on this platform');
115
+ await handleFallback();
116
+ return;
117
+ }
118
+
119
+ const { unregister } = call.camera.registerFilter((inputStream) => {
120
+ unregisterRef.current = unregister;
121
+
122
+ const outputPromise = new Promise<MediaStream>(
123
+ async (resolve, reject) => {
124
+ const [track] = inputStream.getVideoTracks();
125
+
126
+ let processor: InstanceType<FullScreenBlurType>;
127
+
128
+ try {
129
+ processor = new FullScreenBlurClass(track);
130
+ processorRef.current = processor;
131
+
132
+ const result = await processor.start();
133
+ const output = new MediaStream([result]);
134
+ resolve(output);
135
+
136
+ if (duration > 0) {
137
+ timeoutRef.current = setTimeout(disableBlur, duration);
138
+ }
139
+ } catch (error) {
140
+ reject(error);
141
+ console.error('[moderation] Processor init failed:', error);
142
+
143
+ await unregisterRef.current?.();
144
+ unregisterRef.current = null;
145
+ processorRef.current = null;
146
+
147
+ await handleFallback();
148
+ return;
149
+ }
150
+ },
151
+ );
152
+
153
+ return {
154
+ output: outputPromise,
155
+ stop: () => {
156
+ if (processorRef.current) {
157
+ processorRef.current.stop();
158
+ processorRef.current = null;
159
+ }
160
+ },
161
+ };
162
+ });
163
+ });
164
+ }, [call, loadVideoFiltersWebModule, disableBlur, handleFallback, duration]);
165
+
166
+ useEffect(() => disableBlur, [disableBlur]);
167
+ };