@stream-io/video-react-sdk 0.3.47 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +3 -3
- package/dist/index.cjs.js +329 -883
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.es.js +330 -868
- package/dist/index.es.js.map +1 -1
- package/dist/src/components/Notification/SpeakingWhileMutedNotification.d.ts +3 -0
- package/dist/src/components/{Video → VideoPreview}/VideoPreview.d.ts +7 -11
- package/dist/src/components/index.d.ts +1 -1
- package/dist/src/core/components/ParticipantView/ParticipantView.d.ts +3 -9
- package/dist/src/core/components/ParticipantView/ParticipantViewContext.d.ts +9 -0
- package/dist/src/core/components/ParticipantView/index.d.ts +1 -0
- package/dist/src/core/components/StreamCall/StreamCall.d.ts +2 -11
- package/dist/src/core/hooks/index.d.ts +0 -2
- package/dist/src/core/hooks/useDevices.d.ts +0 -99
- package/dist/src/core/index.d.ts +0 -1
- package/dist/src/hooks/index.d.ts +1 -3
- package/dist/src/hooks/usePersistedDevicePreferences.d.ts +13 -0
- package/dist/src/translations/index.d.ts +4 -0
- package/index.ts +2 -2
- package/package.json +3 -3
- package/src/components/CallControls/CallControls.tsx +6 -8
- package/src/components/CallControls/ScreenShareButton.tsx +14 -10
- package/src/components/CallControls/ToggleAudioButton.tsx +21 -24
- package/src/components/CallControls/ToggleAudioOutputButton.tsx +1 -1
- package/src/components/CallControls/ToggleVideoButton.tsx +21 -22
- package/src/components/CallParticipantsList/CallParticipantsList.tsx +1 -1
- package/src/components/DeviceSettings/DeviceSelectorAudio.tsx +20 -26
- package/src/components/DeviceSettings/DeviceSelectorVideo.tsx +9 -8
- package/src/components/Icon/Icon.tsx +1 -1
- package/src/components/Notification/SpeakingWhileMutedNotification.tsx +5 -49
- package/src/components/VideoPreview/VideoPreview.tsx +78 -0
- package/src/components/index.ts +1 -1
- package/src/core/components/CallLayout/PaginatedGridLayout.tsx +2 -5
- package/src/core/components/ParticipantView/DefaultParticipantViewUI.tsx +7 -6
- package/src/core/components/ParticipantView/ParticipantView.tsx +2 -19
- package/src/core/components/ParticipantView/ParticipantViewContext.tsx +17 -0
- package/src/core/components/ParticipantView/index.ts +1 -0
- package/src/core/components/StreamCall/StreamCall.tsx +2 -28
- package/src/core/hooks/index.ts +0 -2
- package/src/core/hooks/useDevices.ts +0 -195
- package/src/core/index.ts +0 -1
- package/src/hooks/index.ts +1 -3
- package/src/hooks/usePersistedDevicePreferences.ts +118 -0
- package/src/translations/en.json +5 -0
- package/dist/src/core/contexts/MediaDevicesContext.d.ts +0 -180
- package/dist/src/core/contexts/index.d.ts +0 -1
- package/dist/src/core/hooks/useAudioPublisher.d.ts +0 -12
- package/dist/src/core/hooks/useVideoPublisher.d.ts +0 -12
- package/dist/src/hooks/useToggleAudioMuteState.d.ts +0 -4
- package/dist/src/hooks/useToggleScreenShare.d.ts +0 -5
- package/dist/src/hooks/useToggleVideoMuteState.d.ts +0 -4
- package/src/components/Video/VideoPreview.tsx +0 -152
- package/src/core/contexts/MediaDevicesContext.tsx +0 -416
- package/src/core/contexts/index.ts +0 -1
- package/src/core/hooks/useAudioPublisher.ts +0 -146
- package/src/core/hooks/useVideoPublisher.ts +0 -177
- package/src/hooks/useToggleAudioMuteState.ts +0 -34
- package/src/hooks/useToggleScreenShare.ts +0 -43
- package/src/hooks/useToggleVideoMuteState.ts +0 -34
- /package/dist/src/components/{Video → VideoPreview}/index.d.ts +0 -0
- /package/src/components/{Video → VideoPreview}/index.ts +0 -0
package/dist/index.cjs.js
CHANGED
|
@@ -5,8 +5,6 @@ var videoReactBindings = require('@stream-io/video-react-bindings');
|
|
|
5
5
|
var jsxRuntime = require('react/jsx-runtime');
|
|
6
6
|
var react = require('react');
|
|
7
7
|
var clsx = require('clsx');
|
|
8
|
-
var rxjs = require('rxjs');
|
|
9
|
-
var operators = require('rxjs/operators');
|
|
10
8
|
var react$1 = require('@floating-ui/react');
|
|
11
9
|
var line = require('@nivo/line');
|
|
12
10
|
|
|
@@ -198,377 +196,6 @@ const useHasBrowserPermissions = (permissionName) => {
|
|
|
198
196
|
}, [permissionName]);
|
|
199
197
|
return canSubscribe;
|
|
200
198
|
};
|
|
201
|
-
/**
|
|
202
|
-
* Observes changes in connected devices and maintains an up-to-date array of connected MediaDeviceInfo objects.
|
|
203
|
-
* @param observeDevices
|
|
204
|
-
* @category Device Management
|
|
205
|
-
*/
|
|
206
|
-
const useDevices = (observeDevices) => {
|
|
207
|
-
const [devices, setDevices] = react.useState([]);
|
|
208
|
-
react.useEffect(() => {
|
|
209
|
-
const subscription = observeDevices().subscribe(setDevices);
|
|
210
|
-
return () => {
|
|
211
|
-
subscription.unsubscribe();
|
|
212
|
-
};
|
|
213
|
-
}, [observeDevices]);
|
|
214
|
-
return devices;
|
|
215
|
-
};
|
|
216
|
-
/**
|
|
217
|
-
* Observes changes and maintains an array of connected video input devices
|
|
218
|
-
* @category Device Management
|
|
219
|
-
*/
|
|
220
|
-
const useVideoDevices = () => useDevices(videoClient.getVideoDevices);
|
|
221
|
-
/**
|
|
222
|
-
* Observes changes and maintains an array of connected audio input devices
|
|
223
|
-
* @category Device Management
|
|
224
|
-
*/
|
|
225
|
-
const useAudioInputDevices = () => useDevices(videoClient.getAudioDevices);
|
|
226
|
-
/**
|
|
227
|
-
* Observes changes and maintains an array of connected audio output devices
|
|
228
|
-
* @category Device Management
|
|
229
|
-
*/
|
|
230
|
-
const useAudioOutputDevices = () => useDevices(videoClient.getAudioOutputDevices);
|
|
231
|
-
/**
|
|
232
|
-
* Verifies that newly selected device id exists among the registered devices.
|
|
233
|
-
* If the selected device id is not found among existing devices, switches to the default device.
|
|
234
|
-
* The media devices are observed only if a given permission ('camera' resp. 'microphone') is granted in browser.
|
|
235
|
-
* Regardless of current permissions settings, an intent to observe devices will take place in Firefox.
|
|
236
|
-
* This is due to the fact that Firefox does not allow to query for 'camera' and 'microphone' permissions.
|
|
237
|
-
* @param canObserve
|
|
238
|
-
* @param devices$
|
|
239
|
-
* @param switchToDefaultDevice
|
|
240
|
-
* @param selectedDeviceId
|
|
241
|
-
* @category Device Management
|
|
242
|
-
*/
|
|
243
|
-
const useDeviceFallback = (canObserve, devices$, switchToDefaultDevice, selectedDeviceId) => {
|
|
244
|
-
react.useEffect(() => {
|
|
245
|
-
if (!canObserve)
|
|
246
|
-
return;
|
|
247
|
-
const validateDeviceId = devices$.pipe().subscribe((devices) => {
|
|
248
|
-
const deviceFound = devices.find((device) => device.deviceId === selectedDeviceId);
|
|
249
|
-
if (!deviceFound)
|
|
250
|
-
switchToDefaultDevice();
|
|
251
|
-
});
|
|
252
|
-
return () => {
|
|
253
|
-
validateDeviceId.unsubscribe();
|
|
254
|
-
};
|
|
255
|
-
}, [canObserve, devices$, selectedDeviceId, switchToDefaultDevice]);
|
|
256
|
-
};
|
|
257
|
-
/**
|
|
258
|
-
* Verifies that newly selected video device id exists among the registered devices.
|
|
259
|
-
* If the selected device id is not found among existing devices, switches to the default video device.
|
|
260
|
-
* The media devices are observed only if 'camera' permission is granted in browser.
|
|
261
|
-
* It is integrators responsibility to instruct users how to enable required permissions.
|
|
262
|
-
* Regardless of current permissions settings, an intent to observe devices will take place in Firefox.
|
|
263
|
-
* This is due to the fact that Firefox does not allow to query for 'camera' and 'microphone' permissions.
|
|
264
|
-
* @param switchToDefaultDevice
|
|
265
|
-
* @param canObserve
|
|
266
|
-
* @param selectedDeviceId
|
|
267
|
-
* @category Device Management
|
|
268
|
-
*/
|
|
269
|
-
const useVideoDeviceFallback = (switchToDefaultDevice, canObserve, selectedDeviceId) => useDeviceFallback(canObserve, videoClient.getVideoDevices(), switchToDefaultDevice, selectedDeviceId);
|
|
270
|
-
/**
|
|
271
|
-
* Verifies that newly selected audio input device id exists among the registered devices.
|
|
272
|
-
* If the selected device id is not found among existing devices, switches to the default audio input device.
|
|
273
|
-
* The media devices are observed only if 'microphone' permission is granted in browser.
|
|
274
|
-
* It is integrators responsibility to instruct users how to enable required permissions.
|
|
275
|
-
* Regardless of current permissions settings, an intent to observe devices will take place in Firefox.
|
|
276
|
-
* This is due to the fact that Firefox does not allow to query for 'camera' and 'microphone' permissions.
|
|
277
|
-
* @param switchToDefaultDevice
|
|
278
|
-
* @param canObserve
|
|
279
|
-
* @param selectedDeviceId
|
|
280
|
-
* @category Device Management
|
|
281
|
-
*/
|
|
282
|
-
const useAudioInputDeviceFallback = (switchToDefaultDevice, canObserve, selectedDeviceId) => useDeviceFallback(canObserve, videoClient.getAudioDevices(), switchToDefaultDevice, selectedDeviceId);
|
|
283
|
-
/**
|
|
284
|
-
* Verifies that newly selected audio output device id exists among the registered devices.
|
|
285
|
-
* If the selected device id is not found among existing devices, switches to the default audio output device.
|
|
286
|
-
* The media devices are observed only if 'microphone' permission is granted in browser.
|
|
287
|
-
* It is integrators responsibility to instruct users how to enable required permissions.
|
|
288
|
-
* Regardless of current permissions settings, an intent to observe devices will take place in Firefox.
|
|
289
|
-
* This is due to the fact that Firefox does not allow to query for 'camera' and 'microphone' permissions.
|
|
290
|
-
* @param switchToDefaultDevice
|
|
291
|
-
* @param canObserve
|
|
292
|
-
* @param selectedDeviceId
|
|
293
|
-
* @category Device Management
|
|
294
|
-
*/
|
|
295
|
-
const useAudioOutputDeviceFallback = (switchToDefaultDevice, canObserve, selectedDeviceId) => useDeviceFallback(canObserve, videoClient.getAudioOutputDevices(), switchToDefaultDevice, selectedDeviceId);
|
|
296
|
-
/**
|
|
297
|
-
* Observes devices of certain kind are made unavailable and executes onDisconnect callback.
|
|
298
|
-
* @param observeDevices
|
|
299
|
-
* @param onDisconnect
|
|
300
|
-
* @category Device Management
|
|
301
|
-
*/
|
|
302
|
-
const useOnUnavailableDevices = (observeDevices, onDisconnect) => {
|
|
303
|
-
react.useEffect(() => {
|
|
304
|
-
const subscription = observeDevices
|
|
305
|
-
.pipe(rxjs.pairwise())
|
|
306
|
-
.subscribe(([prev, current]) => {
|
|
307
|
-
if (prev.length > 0 && current.length === 0)
|
|
308
|
-
onDisconnect();
|
|
309
|
-
});
|
|
310
|
-
return () => subscription.unsubscribe();
|
|
311
|
-
}, [observeDevices, onDisconnect]);
|
|
312
|
-
};
|
|
313
|
-
/**
|
|
314
|
-
* Observes disconnect of all video devices and executes onDisconnect callback.
|
|
315
|
-
* @param onDisconnect
|
|
316
|
-
* @category Device Management
|
|
317
|
-
*/
|
|
318
|
-
const useOnUnavailableVideoDevices = (onDisconnect) => useOnUnavailableDevices(videoClient.getVideoDevices(), onDisconnect);
|
|
319
|
-
/**
|
|
320
|
-
* Observes disconnect of all audio input devices and executes onDisconnect callback.
|
|
321
|
-
* @param onDisconnect
|
|
322
|
-
* @category Device Management
|
|
323
|
-
*/
|
|
324
|
-
const useOnUnavailableAudioInputDevices = (onDisconnect) => useOnUnavailableDevices(videoClient.getAudioDevices(), onDisconnect);
|
|
325
|
-
/**
|
|
326
|
-
* Observes disconnect of all audio output devices and executes onDisconnect callback.
|
|
327
|
-
* @param onDisconnect
|
|
328
|
-
* @category Device Management
|
|
329
|
-
*/
|
|
330
|
-
const useOnUnavailableAudioOutputDevices = (onDisconnect) => useOnUnavailableDevices(videoClient.getAudioOutputDevices(), onDisconnect);
|
|
331
|
-
|
|
332
|
-
/**
|
|
333
|
-
* @internal
|
|
334
|
-
* @category Device Management
|
|
335
|
-
*/
|
|
336
|
-
const useAudioPublisher = ({ initialAudioMuted, audioDeviceId, }) => {
|
|
337
|
-
const call = videoReactBindings.useCall();
|
|
338
|
-
const { useCallState, useCallCallingState, useLocalParticipant } = videoReactBindings.useCallStateHooks();
|
|
339
|
-
const callState = useCallState();
|
|
340
|
-
const callingState = useCallCallingState();
|
|
341
|
-
const participant = useLocalParticipant();
|
|
342
|
-
const hasBrowserPermissionAudioInput = useHasBrowserPermissions('microphone');
|
|
343
|
-
const { localParticipant$ } = callState;
|
|
344
|
-
const isPublishingAudio = participant?.publishedTracks.includes(videoClient.SfuModels.TrackType.AUDIO);
|
|
345
|
-
const publishAudioStream = react.useCallback(async () => {
|
|
346
|
-
if (!call)
|
|
347
|
-
return;
|
|
348
|
-
if (!call.permissionsContext.hasPermission(videoClient.OwnCapability.SEND_AUDIO)) {
|
|
349
|
-
throw new Error(`No permission to publish audio`);
|
|
350
|
-
}
|
|
351
|
-
try {
|
|
352
|
-
const audioStream = await videoClient.getAudioStream({
|
|
353
|
-
deviceId: audioDeviceId,
|
|
354
|
-
});
|
|
355
|
-
await call.publishAudioStream(audioStream);
|
|
356
|
-
}
|
|
357
|
-
catch (e) {
|
|
358
|
-
console.log('Failed to publish audio stream', e);
|
|
359
|
-
}
|
|
360
|
-
}, [audioDeviceId, call]);
|
|
361
|
-
const lastAudioDeviceId = react.useRef(audioDeviceId);
|
|
362
|
-
react.useEffect(() => {
|
|
363
|
-
if (callingState === videoClient.CallingState.JOINED &&
|
|
364
|
-
audioDeviceId !== lastAudioDeviceId.current) {
|
|
365
|
-
lastAudioDeviceId.current = audioDeviceId;
|
|
366
|
-
publishAudioStream().catch((e) => {
|
|
367
|
-
console.error('Failed to publish audio stream', e);
|
|
368
|
-
});
|
|
369
|
-
}
|
|
370
|
-
}, [audioDeviceId, callingState, publishAudioStream]);
|
|
371
|
-
const initialPublishRun = react.useRef(false);
|
|
372
|
-
react.useEffect(() => {
|
|
373
|
-
if (callingState === videoClient.CallingState.JOINED &&
|
|
374
|
-
!initialPublishRun.current &&
|
|
375
|
-
!initialAudioMuted) {
|
|
376
|
-
// automatic publishing should happen only when joining the call
|
|
377
|
-
// from the lobby, and the audio is not muted
|
|
378
|
-
publishAudioStream().catch((e) => {
|
|
379
|
-
console.error('Failed to publish audio stream', e);
|
|
380
|
-
});
|
|
381
|
-
initialPublishRun.current = true;
|
|
382
|
-
}
|
|
383
|
-
}, [callingState, initialAudioMuted, publishAudioStream]);
|
|
384
|
-
react.useEffect(() => {
|
|
385
|
-
if (!localParticipant$ || !hasBrowserPermissionAudioInput)
|
|
386
|
-
return;
|
|
387
|
-
const subscription = videoClient.watchForDisconnectedAudioDevice(localParticipant$.pipe(rxjs.map((p) => p?.audioDeviceId))).subscribe(async () => {
|
|
388
|
-
if (!call)
|
|
389
|
-
return;
|
|
390
|
-
call.setAudioDevice(undefined);
|
|
391
|
-
await call.stopPublish(videoClient.SfuModels.TrackType.AUDIO);
|
|
392
|
-
});
|
|
393
|
-
return () => {
|
|
394
|
-
subscription.unsubscribe();
|
|
395
|
-
};
|
|
396
|
-
}, [hasBrowserPermissionAudioInput, localParticipant$, call]);
|
|
397
|
-
react.useEffect(() => {
|
|
398
|
-
if (!participant?.audioStream || !call || !isPublishingAudio)
|
|
399
|
-
return;
|
|
400
|
-
const [track] = participant.audioStream.getAudioTracks();
|
|
401
|
-
const selectedAudioDeviceId = track.getSettings().deviceId;
|
|
402
|
-
const republishDefaultDevice = videoClient.watchForAddedDefaultAudioDevice().subscribe(async () => {
|
|
403
|
-
if (!(call &&
|
|
404
|
-
participant.audioStream &&
|
|
405
|
-
selectedAudioDeviceId === 'default'))
|
|
406
|
-
return;
|
|
407
|
-
// We need to stop the original track first in order
|
|
408
|
-
// we can retrieve the new default device stream
|
|
409
|
-
track.stop();
|
|
410
|
-
const audioStream = await videoClient.getAudioStream({
|
|
411
|
-
deviceId: 'default',
|
|
412
|
-
});
|
|
413
|
-
await call.publishAudioStream(audioStream);
|
|
414
|
-
});
|
|
415
|
-
const handleTrackEnded = async () => {
|
|
416
|
-
if (selectedAudioDeviceId === audioDeviceId) {
|
|
417
|
-
const audioStream = await videoClient.getAudioStream({
|
|
418
|
-
deviceId: audioDeviceId,
|
|
419
|
-
});
|
|
420
|
-
await call.publishAudioStream(audioStream);
|
|
421
|
-
}
|
|
422
|
-
};
|
|
423
|
-
track.addEventListener('ended', handleTrackEnded);
|
|
424
|
-
return () => {
|
|
425
|
-
track.removeEventListener('ended', handleTrackEnded);
|
|
426
|
-
republishDefaultDevice.unsubscribe();
|
|
427
|
-
};
|
|
428
|
-
}, [audioDeviceId, call, participant?.audioStream, isPublishingAudio]);
|
|
429
|
-
return publishAudioStream;
|
|
430
|
-
};
|
|
431
|
-
|
|
432
|
-
const useQueryParams = () => {
|
|
433
|
-
return react.useMemo(() => typeof window === 'undefined'
|
|
434
|
-
? null
|
|
435
|
-
: new URLSearchParams(window.location.search), []);
|
|
436
|
-
};
|
|
437
|
-
/**
|
|
438
|
-
* Internal purpose hook. Enables certain development mode tools.
|
|
439
|
-
*/
|
|
440
|
-
const useIsDebugMode = () => {
|
|
441
|
-
const params = useQueryParams();
|
|
442
|
-
return !!params?.get('debug');
|
|
443
|
-
};
|
|
444
|
-
const useDebugPreferredVideoCodec = () => {
|
|
445
|
-
const params = useQueryParams();
|
|
446
|
-
return params?.get('video_codec');
|
|
447
|
-
};
|
|
448
|
-
|
|
449
|
-
/**
|
|
450
|
-
* @internal
|
|
451
|
-
* @category Device Management
|
|
452
|
-
*/
|
|
453
|
-
const useVideoPublisher = ({ initialVideoMuted, videoDeviceId, }) => {
|
|
454
|
-
const call = videoReactBindings.useCall();
|
|
455
|
-
const { useCallState, useCallCallingState, useLocalParticipant, useCallSettings, } = videoReactBindings.useCallStateHooks();
|
|
456
|
-
const callState = useCallState();
|
|
457
|
-
const callingState = useCallCallingState();
|
|
458
|
-
const participant = useLocalParticipant();
|
|
459
|
-
const hasBrowserPermissionVideoInput = useHasBrowserPermissions('camera');
|
|
460
|
-
const { localParticipant$ } = callState;
|
|
461
|
-
const preferredCodec = useDebugPreferredVideoCodec();
|
|
462
|
-
const isPublishingVideo = participant?.publishedTracks.includes(videoClient.SfuModels.TrackType.VIDEO);
|
|
463
|
-
const settings = useCallSettings();
|
|
464
|
-
const videoSettings = settings?.video;
|
|
465
|
-
const targetResolution = videoSettings?.target_resolution;
|
|
466
|
-
const publishVideoStream = react.useCallback(async () => {
|
|
467
|
-
if (!call)
|
|
468
|
-
return;
|
|
469
|
-
if (!call.permissionsContext.hasPermission(videoClient.OwnCapability.SEND_VIDEO)) {
|
|
470
|
-
throw new Error(`No permission to publish video`);
|
|
471
|
-
}
|
|
472
|
-
try {
|
|
473
|
-
const videoStream = await videoClient.getVideoStream({
|
|
474
|
-
deviceId: videoDeviceId,
|
|
475
|
-
width: targetResolution?.width,
|
|
476
|
-
height: targetResolution?.height,
|
|
477
|
-
facingMode: toFacingMode(videoSettings?.camera_facing),
|
|
478
|
-
});
|
|
479
|
-
await call.publishVideoStream(videoStream, { preferredCodec });
|
|
480
|
-
}
|
|
481
|
-
catch (e) {
|
|
482
|
-
console.log('Failed to publish video stream', e);
|
|
483
|
-
}
|
|
484
|
-
}, [
|
|
485
|
-
call,
|
|
486
|
-
preferredCodec,
|
|
487
|
-
targetResolution?.height,
|
|
488
|
-
targetResolution?.width,
|
|
489
|
-
videoDeviceId,
|
|
490
|
-
videoSettings?.camera_facing,
|
|
491
|
-
]);
|
|
492
|
-
const lastVideoDeviceId = react.useRef(videoDeviceId);
|
|
493
|
-
react.useEffect(() => {
|
|
494
|
-
if (callingState === videoClient.CallingState.JOINED &&
|
|
495
|
-
videoDeviceId !== lastVideoDeviceId.current) {
|
|
496
|
-
lastVideoDeviceId.current = videoDeviceId;
|
|
497
|
-
publishVideoStream().catch((e) => {
|
|
498
|
-
console.error('Failed to publish video stream', e);
|
|
499
|
-
});
|
|
500
|
-
}
|
|
501
|
-
}, [publishVideoStream, videoDeviceId, callingState]);
|
|
502
|
-
const initialPublishRun = react.useRef(false);
|
|
503
|
-
react.useEffect(() => {
|
|
504
|
-
if (callingState === videoClient.CallingState.JOINED &&
|
|
505
|
-
!initialPublishRun.current &&
|
|
506
|
-
!initialVideoMuted) {
|
|
507
|
-
// automatic publishing should happen only when joining the call
|
|
508
|
-
// from the lobby, and the video is not muted
|
|
509
|
-
publishVideoStream().catch((e) => {
|
|
510
|
-
console.error('Failed to publish video stream', e);
|
|
511
|
-
});
|
|
512
|
-
initialPublishRun.current = true;
|
|
513
|
-
}
|
|
514
|
-
}, [callingState, initialVideoMuted, publishVideoStream]);
|
|
515
|
-
react.useEffect(() => {
|
|
516
|
-
if (!localParticipant$ || !hasBrowserPermissionVideoInput)
|
|
517
|
-
return;
|
|
518
|
-
const subscription = videoClient.watchForDisconnectedVideoDevice(localParticipant$.pipe(operators.map((p) => p?.videoDeviceId))).subscribe(async () => {
|
|
519
|
-
if (!call)
|
|
520
|
-
return;
|
|
521
|
-
call.setVideoDevice(undefined);
|
|
522
|
-
await call.stopPublish(videoClient.SfuModels.TrackType.VIDEO);
|
|
523
|
-
});
|
|
524
|
-
return () => {
|
|
525
|
-
subscription.unsubscribe();
|
|
526
|
-
};
|
|
527
|
-
}, [hasBrowserPermissionVideoInput, localParticipant$, call]);
|
|
528
|
-
react.useEffect(() => {
|
|
529
|
-
if (!participant?.videoStream || !call || !isPublishingVideo)
|
|
530
|
-
return;
|
|
531
|
-
const [track] = participant.videoStream.getVideoTracks();
|
|
532
|
-
const selectedVideoDeviceId = track.getSettings().deviceId;
|
|
533
|
-
const republishDefaultDevice = videoClient.watchForAddedDefaultVideoDevice().subscribe(async () => {
|
|
534
|
-
if (!(call &&
|
|
535
|
-
participant.videoStream &&
|
|
536
|
-
selectedVideoDeviceId === 'default'))
|
|
537
|
-
return;
|
|
538
|
-
// We need to stop the original track first in order
|
|
539
|
-
// we can retrieve the new default device stream
|
|
540
|
-
track.stop();
|
|
541
|
-
const videoStream = await videoClient.getVideoStream({
|
|
542
|
-
deviceId: 'default',
|
|
543
|
-
});
|
|
544
|
-
await call.publishVideoStream(videoStream);
|
|
545
|
-
});
|
|
546
|
-
const handleTrackEnded = async () => {
|
|
547
|
-
if (selectedVideoDeviceId === videoDeviceId) {
|
|
548
|
-
const videoStream = await videoClient.getVideoStream({
|
|
549
|
-
deviceId: videoDeviceId,
|
|
550
|
-
});
|
|
551
|
-
await call.publishVideoStream(videoStream);
|
|
552
|
-
}
|
|
553
|
-
};
|
|
554
|
-
track.addEventListener('ended', handleTrackEnded);
|
|
555
|
-
return () => {
|
|
556
|
-
track.removeEventListener('ended', handleTrackEnded);
|
|
557
|
-
republishDefaultDevice.unsubscribe();
|
|
558
|
-
};
|
|
559
|
-
}, [videoDeviceId, call, participant?.videoStream, isPublishingVideo]);
|
|
560
|
-
return publishVideoStream;
|
|
561
|
-
};
|
|
562
|
-
const toFacingMode = (value) => {
|
|
563
|
-
switch (value) {
|
|
564
|
-
case videoClient.VideoSettingsCameraFacingEnum.FRONT:
|
|
565
|
-
return 'user';
|
|
566
|
-
case videoClient.VideoSettingsCameraFacingEnum.BACK:
|
|
567
|
-
return 'environment';
|
|
568
|
-
default:
|
|
569
|
-
return undefined;
|
|
570
|
-
}
|
|
571
|
-
};
|
|
572
199
|
|
|
573
200
|
const useTrackElementVisibility = ({ trackedElement, dynascaleManager: propsDynascaleManager, sessionId, trackType, }) => {
|
|
574
201
|
const call = videoReactBindings.useCall();
|
|
@@ -619,6 +246,106 @@ const useFloatingUIPreset = ({ placement, strategy, }) => {
|
|
|
619
246
|
return { refs, x, y, domReference, floating, strategy };
|
|
620
247
|
};
|
|
621
248
|
|
|
249
|
+
/**
|
|
250
|
+
* This hook will persist the device settings to local storage.
|
|
251
|
+
*
|
|
252
|
+
* @param key the key to use for local storage.
|
|
253
|
+
*/
|
|
254
|
+
const usePersistDevicePreferences = (key) => {
|
|
255
|
+
const { useMicrophoneState, useCameraState, useSpeakerState } = videoReactBindings.useCallStateHooks();
|
|
256
|
+
const mic = useMicrophoneState();
|
|
257
|
+
const camera = useCameraState();
|
|
258
|
+
const speaker = useSpeakerState();
|
|
259
|
+
react.useEffect(() => {
|
|
260
|
+
try {
|
|
261
|
+
const defaultDevice = 'default';
|
|
262
|
+
const preferences = {
|
|
263
|
+
mic: {
|
|
264
|
+
selectedDeviceId: mic.selectedDevice || defaultDevice,
|
|
265
|
+
muted: mic.isMute,
|
|
266
|
+
},
|
|
267
|
+
camera: {
|
|
268
|
+
selectedDeviceId: camera.selectedDevice || defaultDevice,
|
|
269
|
+
muted: camera.isMute,
|
|
270
|
+
},
|
|
271
|
+
speaker: {
|
|
272
|
+
selectedDeviceId: speaker.selectedDevice || defaultDevice,
|
|
273
|
+
muted: false,
|
|
274
|
+
},
|
|
275
|
+
};
|
|
276
|
+
window.localStorage.setItem(key, JSON.stringify(preferences));
|
|
277
|
+
}
|
|
278
|
+
catch (err) {
|
|
279
|
+
console.warn('Failed to save device preferences', err);
|
|
280
|
+
}
|
|
281
|
+
}, [
|
|
282
|
+
camera.isMute,
|
|
283
|
+
camera.selectedDevice,
|
|
284
|
+
key,
|
|
285
|
+
mic.isMute,
|
|
286
|
+
mic.selectedDevice,
|
|
287
|
+
speaker.selectedDevice,
|
|
288
|
+
]);
|
|
289
|
+
};
|
|
290
|
+
/**
|
|
291
|
+
* This hook will apply the device settings from local storage.
|
|
292
|
+
*
|
|
293
|
+
* @param key the key to use for local storage.
|
|
294
|
+
*/
|
|
295
|
+
const useApplyDevicePreferences = (key) => {
|
|
296
|
+
const call = videoReactBindings.useCall();
|
|
297
|
+
react.useEffect(() => {
|
|
298
|
+
if (!call)
|
|
299
|
+
return;
|
|
300
|
+
const apply = async () => {
|
|
301
|
+
const initMic = async (setting) => {
|
|
302
|
+
await call.microphone.select(setting.selectedDeviceId);
|
|
303
|
+
if (setting.muted) {
|
|
304
|
+
await call.microphone.disable();
|
|
305
|
+
}
|
|
306
|
+
else {
|
|
307
|
+
await call.microphone.enable();
|
|
308
|
+
}
|
|
309
|
+
};
|
|
310
|
+
const initCamera = async (setting) => {
|
|
311
|
+
await call.camera.select(setting.selectedDeviceId);
|
|
312
|
+
if (setting.muted) {
|
|
313
|
+
await call.camera.disable();
|
|
314
|
+
}
|
|
315
|
+
else {
|
|
316
|
+
await call.camera.enable();
|
|
317
|
+
}
|
|
318
|
+
};
|
|
319
|
+
const initSpeaker = (setting) => {
|
|
320
|
+
call.speaker.select(setting.selectedDeviceId);
|
|
321
|
+
};
|
|
322
|
+
try {
|
|
323
|
+
const preferences = JSON.parse(window.localStorage.getItem(key));
|
|
324
|
+
if (preferences) {
|
|
325
|
+
await initMic(preferences.mic);
|
|
326
|
+
await initCamera(preferences.camera);
|
|
327
|
+
initSpeaker(preferences.speaker);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
catch (err) {
|
|
331
|
+
console.warn('Failed to load device preferences', err);
|
|
332
|
+
}
|
|
333
|
+
};
|
|
334
|
+
apply().catch((err) => {
|
|
335
|
+
console.warn('Failed to apply device preferences', err);
|
|
336
|
+
});
|
|
337
|
+
}, [call, key]);
|
|
338
|
+
};
|
|
339
|
+
/**
|
|
340
|
+
* This hook will apply and persist the device preferences from local storage.
|
|
341
|
+
*
|
|
342
|
+
* @param key the key to use for local storage.
|
|
343
|
+
*/
|
|
344
|
+
const usePersistedDevicePreferences = (key = '@stream-io/device-preferences') => {
|
|
345
|
+
useApplyDevicePreferences(key);
|
|
346
|
+
usePersistDevicePreferences(key);
|
|
347
|
+
};
|
|
348
|
+
|
|
622
349
|
const SCROLL_THRESHOLD = 10;
|
|
623
350
|
/**
|
|
624
351
|
* Hook which observes element's scroll position and returns text value based on the
|
|
@@ -681,6 +408,39 @@ const useHorizontalScrollPosition = (scrollElement, threshold = SCROLL_THRESHOLD
|
|
|
681
408
|
return scrollPosition;
|
|
682
409
|
};
|
|
683
410
|
|
|
411
|
+
const useToggleCallRecording = () => {
|
|
412
|
+
const call = videoReactBindings.useCall();
|
|
413
|
+
const { useIsCallRecordingInProgress } = videoReactBindings.useCallStateHooks();
|
|
414
|
+
const isCallRecordingInProgress = useIsCallRecordingInProgress();
|
|
415
|
+
const [isAwaitingResponse, setIsAwaitingResponse] = react.useState(false);
|
|
416
|
+
// TODO: add permissions
|
|
417
|
+
react.useEffect(() => {
|
|
418
|
+
// we wait until call.recording_started/stopped event to flips the
|
|
419
|
+
// `isCallRecordingInProgress` state variable.
|
|
420
|
+
// Once the flip happens, we remove the loading indicator
|
|
421
|
+
setIsAwaitingResponse((isAwaiting) => {
|
|
422
|
+
if (isAwaiting)
|
|
423
|
+
return false;
|
|
424
|
+
return isAwaiting;
|
|
425
|
+
});
|
|
426
|
+
}, [isCallRecordingInProgress]);
|
|
427
|
+
const toggleCallRecording = react.useCallback(async () => {
|
|
428
|
+
try {
|
|
429
|
+
setIsAwaitingResponse(true);
|
|
430
|
+
if (isCallRecordingInProgress) {
|
|
431
|
+
await call?.stopRecording();
|
|
432
|
+
}
|
|
433
|
+
else {
|
|
434
|
+
await call?.startRecording();
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
catch (e) {
|
|
438
|
+
console.error(`Failed start recording`, e);
|
|
439
|
+
}
|
|
440
|
+
}, [call, isCallRecordingInProgress]);
|
|
441
|
+
return { toggleCallRecording, isAwaitingResponse, isCallRecordingInProgress };
|
|
442
|
+
};
|
|
443
|
+
|
|
684
444
|
const useRequestPermission = (permission) => {
|
|
685
445
|
const call = videoReactBindings.useCall();
|
|
686
446
|
const hasPermission = videoReactBindings.useHasPermissions(permission);
|
|
@@ -716,104 +476,6 @@ const useRequestPermission = (permission) => {
|
|
|
716
476
|
};
|
|
717
477
|
};
|
|
718
478
|
|
|
719
|
-
const useToggleAudioMuteState = () => {
|
|
720
|
-
const { publishAudioStream, stopPublishingAudio } = useMediaDevices();
|
|
721
|
-
const { useLocalParticipant } = videoReactBindings.useCallStateHooks();
|
|
722
|
-
const localParticipant = useLocalParticipant();
|
|
723
|
-
const { isAwaitingPermission, requestPermission } = useRequestPermission(videoClient.OwnCapability.SEND_AUDIO);
|
|
724
|
-
// to keep the toggle function as stable as possible
|
|
725
|
-
const isAudioMutedReference = react.useRef(false);
|
|
726
|
-
isAudioMutedReference.current = !localParticipant?.publishedTracks.includes(videoClient.SfuModels.TrackType.AUDIO);
|
|
727
|
-
const toggleAudioMuteState = react.useCallback(async () => {
|
|
728
|
-
if (isAudioMutedReference.current) {
|
|
729
|
-
const canPublish = await requestPermission();
|
|
730
|
-
if (canPublish)
|
|
731
|
-
return publishAudioStream();
|
|
732
|
-
}
|
|
733
|
-
if (!isAudioMutedReference.current)
|
|
734
|
-
await stopPublishingAudio();
|
|
735
|
-
}, [publishAudioStream, requestPermission, stopPublishingAudio]);
|
|
736
|
-
return { toggleAudioMuteState, isAwaitingPermission };
|
|
737
|
-
};
|
|
738
|
-
|
|
739
|
-
const useToggleVideoMuteState = () => {
|
|
740
|
-
const { publishVideoStream, stopPublishingVideo } = useMediaDevices();
|
|
741
|
-
const { useLocalParticipant } = videoReactBindings.useCallStateHooks();
|
|
742
|
-
const localParticipant = useLocalParticipant();
|
|
743
|
-
const { isAwaitingPermission, requestPermission } = useRequestPermission(videoClient.OwnCapability.SEND_VIDEO);
|
|
744
|
-
// to keep the toggle function as stable as possible
|
|
745
|
-
const isVideoMutedReference = react.useRef(false);
|
|
746
|
-
isVideoMutedReference.current = !localParticipant?.publishedTracks.includes(videoClient.SfuModels.TrackType.VIDEO);
|
|
747
|
-
const toggleVideoMuteState = react.useCallback(async () => {
|
|
748
|
-
if (isVideoMutedReference.current) {
|
|
749
|
-
const canPublish = await requestPermission();
|
|
750
|
-
if (canPublish)
|
|
751
|
-
return publishVideoStream();
|
|
752
|
-
}
|
|
753
|
-
if (!isVideoMutedReference.current)
|
|
754
|
-
await stopPublishingVideo();
|
|
755
|
-
}, [publishVideoStream, requestPermission, stopPublishingVideo]);
|
|
756
|
-
return { toggleVideoMuteState, isAwaitingPermission };
|
|
757
|
-
};
|
|
758
|
-
|
|
759
|
-
const useToggleScreenShare = () => {
|
|
760
|
-
const { useLocalParticipant } = videoReactBindings.useCallStateHooks();
|
|
761
|
-
const localParticipant = useLocalParticipant();
|
|
762
|
-
const call = videoReactBindings.useCall();
|
|
763
|
-
const isScreenSharingReference = react.useRef(false);
|
|
764
|
-
const { isAwaitingPermission, requestPermission } = useRequestPermission(videoClient.OwnCapability.SCREENSHARE);
|
|
765
|
-
const isScreenSharing = !!localParticipant?.publishedTracks.includes(videoClient.SfuModels.TrackType.SCREEN_SHARE);
|
|
766
|
-
isScreenSharingReference.current = isScreenSharing;
|
|
767
|
-
const toggleScreenShare = react.useCallback(async () => {
|
|
768
|
-
if (!isScreenSharingReference.current) {
|
|
769
|
-
const canPublish = await requestPermission();
|
|
770
|
-
if (!canPublish)
|
|
771
|
-
return;
|
|
772
|
-
const stream = await videoClient.getScreenShareStream().catch((e) => {
|
|
773
|
-
console.log(`Can't share screen: ${e}`);
|
|
774
|
-
});
|
|
775
|
-
if (stream) {
|
|
776
|
-
return call?.publishScreenShareStream(stream);
|
|
777
|
-
}
|
|
778
|
-
}
|
|
779
|
-
await call?.stopPublish(videoClient.SfuModels.TrackType.SCREEN_SHARE);
|
|
780
|
-
}, [call, requestPermission]);
|
|
781
|
-
return { toggleScreenShare, isAwaitingPermission, isScreenSharing };
|
|
782
|
-
};
|
|
783
|
-
|
|
784
|
-
const useToggleCallRecording = () => {
|
|
785
|
-
const call = videoReactBindings.useCall();
|
|
786
|
-
const { useIsCallRecordingInProgress } = videoReactBindings.useCallStateHooks();
|
|
787
|
-
const isCallRecordingInProgress = useIsCallRecordingInProgress();
|
|
788
|
-
const [isAwaitingResponse, setIsAwaitingResponse] = react.useState(false);
|
|
789
|
-
// TODO: add permissions
|
|
790
|
-
react.useEffect(() => {
|
|
791
|
-
// we wait until call.recording_started/stopped event to flips the
|
|
792
|
-
// `isCallRecordingInProgress` state variable.
|
|
793
|
-
// Once the flip happens, we remove the loading indicator
|
|
794
|
-
setIsAwaitingResponse((isAwaiting) => {
|
|
795
|
-
if (isAwaiting)
|
|
796
|
-
return false;
|
|
797
|
-
return isAwaiting;
|
|
798
|
-
});
|
|
799
|
-
}, [isCallRecordingInProgress]);
|
|
800
|
-
const toggleCallRecording = react.useCallback(async () => {
|
|
801
|
-
try {
|
|
802
|
-
setIsAwaitingResponse(true);
|
|
803
|
-
if (isCallRecordingInProgress) {
|
|
804
|
-
await call?.stopRecording();
|
|
805
|
-
}
|
|
806
|
-
else {
|
|
807
|
-
await call?.startRecording();
|
|
808
|
-
}
|
|
809
|
-
}
|
|
810
|
-
catch (e) {
|
|
811
|
-
console.error(`Failed start recording`, e);
|
|
812
|
-
}
|
|
813
|
-
}, [call, isCallRecordingInProgress]);
|
|
814
|
-
return { toggleCallRecording, isAwaitingResponse, isCallRecordingInProgress };
|
|
815
|
-
};
|
|
816
|
-
|
|
817
479
|
const MenuToggle = ({ ToggleButton, placement = 'top-start', strategy = 'absolute', children, }) => {
|
|
818
480
|
const [menuShown, setMenuShown] = react.useState(false);
|
|
819
481
|
const { floating, domReference, refs, x, y } = useFloatingUIPreset({
|
|
@@ -858,7 +520,7 @@ const GenericMenuButtonItem = ({ children, ...rest }) => {
|
|
|
858
520
|
return (jsxRuntime.jsx("li", { className: "str-video__generic-menu--item", children: jsxRuntime.jsx("button", { ...rest, children: children }) }));
|
|
859
521
|
};
|
|
860
522
|
|
|
861
|
-
const Icon = ({ icon }) => (jsxRuntime.jsx("span", { className: clsx
|
|
523
|
+
const Icon = ({ icon }) => (jsxRuntime.jsx("span", { className: clsx('str-video__icon', icon && `str-video__icon--${icon}`) }));
|
|
862
524
|
|
|
863
525
|
const IconButton = react.forwardRef((props, ref) => {
|
|
864
526
|
const { icon, enabled, variant, onClick, className, ...rest } = props;
|
|
@@ -1015,50 +677,32 @@ const PermissionNotification = (props) => {
|
|
|
1015
677
|
};
|
|
1016
678
|
|
|
1017
679
|
const SpeakingWhileMutedNotification = ({ children, text, }) => {
|
|
1018
|
-
const {
|
|
1019
|
-
const
|
|
1020
|
-
const { getAudioStream } = useMediaDevices();
|
|
680
|
+
const { useMicrophoneState } = videoReactBindings.useCallStateHooks();
|
|
681
|
+
const { isSpeakingWhileMuted } = useMicrophoneState();
|
|
1021
682
|
const { t } = videoReactBindings.useI18n();
|
|
1022
683
|
const message = text ?? t('You are muted. Unmute to speak.');
|
|
1023
|
-
const isAudioMute = !localParticipant?.publishedTracks.includes(videoClient.SfuModels.TrackType.AUDIO);
|
|
1024
|
-
const audioDeviceId = localParticipant?.audioDeviceId;
|
|
1025
|
-
const [isSpeakingWhileMuted, setIsSpeakingWhileMuted] = react.useState(false);
|
|
1026
|
-
react.useEffect(() => {
|
|
1027
|
-
// do nothing when not muted
|
|
1028
|
-
if (!isAudioMute)
|
|
1029
|
-
return;
|
|
1030
|
-
const disposeSoundDetector = getAudioStream({
|
|
1031
|
-
deviceId: audioDeviceId,
|
|
1032
|
-
}).then((audioStream) => videoClient.createSoundDetector(audioStream, ({ isSoundDetected }) => {
|
|
1033
|
-
setIsSpeakingWhileMuted((isNotified) => isNotified ? isNotified : isSoundDetected);
|
|
1034
|
-
}));
|
|
1035
|
-
disposeSoundDetector.catch((err) => {
|
|
1036
|
-
console.error('Error while creating sound detector', err);
|
|
1037
|
-
});
|
|
1038
|
-
return () => {
|
|
1039
|
-
disposeSoundDetector
|
|
1040
|
-
.then((dispose) => dispose())
|
|
1041
|
-
.catch((err) => {
|
|
1042
|
-
console.error('Error while disposing sound detector', err);
|
|
1043
|
-
});
|
|
1044
|
-
setIsSpeakingWhileMuted(false);
|
|
1045
|
-
};
|
|
1046
|
-
}, [audioDeviceId, getAudioStream, isAudioMute]);
|
|
1047
|
-
react.useEffect(() => {
|
|
1048
|
-
if (!isSpeakingWhileMuted)
|
|
1049
|
-
return;
|
|
1050
|
-
const timeout = setTimeout(() => {
|
|
1051
|
-
setIsSpeakingWhileMuted(false);
|
|
1052
|
-
}, 3500);
|
|
1053
|
-
return () => {
|
|
1054
|
-
clearTimeout(timeout);
|
|
1055
|
-
setIsSpeakingWhileMuted(false);
|
|
1056
|
-
};
|
|
1057
|
-
}, [isSpeakingWhileMuted]);
|
|
1058
684
|
return (jsxRuntime.jsx(Notification, { message: message, isVisible: isSpeakingWhileMuted, children: children }));
|
|
1059
685
|
};
|
|
1060
686
|
|
|
1061
|
-
const
|
|
687
|
+
const LoadingIndicator = ({ className, type = 'spinner', text, tooltip, }) => {
|
|
688
|
+
return (jsxRuntime.jsxs("div", { className: clsx('str-video__loading-indicator', className), title: tooltip, children: [jsxRuntime.jsx("div", { className: clsx('str-video__loading-indicator__icon', type) }), text && jsxRuntime.jsx("p", { className: "str-video__loading-indicator-text", children: text })] }));
|
|
689
|
+
};
|
|
690
|
+
|
|
691
|
+
const RecordCallButton = ({ caption = 'Record', }) => {
|
|
692
|
+
const call = videoReactBindings.useCall();
|
|
693
|
+
const { t } = videoReactBindings.useI18n();
|
|
694
|
+
const { toggleCallRecording, isAwaitingResponse, isCallRecordingInProgress } = useToggleCallRecording();
|
|
695
|
+
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [
|
|
696
|
+
videoClient.OwnCapability.START_RECORD_CALL,
|
|
697
|
+
videoClient.OwnCapability.STOP_RECORD_CALL,
|
|
698
|
+
], children: jsxRuntime.jsx(CompositeButton, { active: isCallRecordingInProgress, caption: caption, children: isAwaitingResponse ? (jsxRuntime.jsx(LoadingIndicator, { tooltip: isCallRecordingInProgress
|
|
699
|
+
? t('Waiting for recording to stop...')
|
|
700
|
+
: t('Waiting for recording to start...') })) : (jsxRuntime.jsx(IconButton
|
|
701
|
+
// FIXME OL: sort out this ambiguity
|
|
702
|
+
, {
|
|
703
|
+
// FIXME OL: sort out this ambiguity
|
|
704
|
+
enabled: !!call, disabled: !call, icon: isCallRecordingInProgress ? 'recording-on' : 'recording-off', title: t('Record call'), onClick: toggleCallRecording })) }) }));
|
|
705
|
+
};
|
|
1062
706
|
|
|
1063
707
|
const CallStatsLatencyChart = (props) => {
|
|
1064
708
|
const { values } = props;
|
|
@@ -1166,6 +810,132 @@ const calculateSubscribeBitrate = (previousCallStatsReport, callStatsReport) =>
|
|
|
1166
810
|
const CallStatsButton = () => (jsxRuntime.jsx(MenuToggle, { placement: "top-end", ToggleButton: ToggleMenuButton$1, children: jsxRuntime.jsx(CallStats, {}) }));
|
|
1167
811
|
const ToggleMenuButton$1 = react.forwardRef(({ menuShown }, ref) => (jsxRuntime.jsx(CompositeButton, { ref: ref, active: menuShown, caption: 'Stats', children: jsxRuntime.jsx(IconButton, { icon: "stats", title: "Statistics" }) })));
|
|
1168
812
|
|
|
813
|
+
const ScreenShareButton = (props) => {
|
|
814
|
+
const { t } = videoReactBindings.useI18n();
|
|
815
|
+
const { caption = t('Screen Share') } = props;
|
|
816
|
+
const { useHasOngoingScreenShare, useScreenShareState } = videoReactBindings.useCallStateHooks();
|
|
817
|
+
const isSomeoneScreenSharing = useHasOngoingScreenShare();
|
|
818
|
+
const { hasPermission, requestPermission, isAwaitingPermission } = useRequestPermission(videoClient.OwnCapability.SCREENSHARE);
|
|
819
|
+
const { screenShare, isMute: isScreenSharing } = useScreenShareState();
|
|
820
|
+
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [videoClient.OwnCapability.SCREENSHARE], children: jsxRuntime.jsx(PermissionNotification, { permission: videoClient.OwnCapability.SCREENSHARE, isAwaitingApproval: isAwaitingPermission, messageApproved: t('You can now share your screen.'), messageAwaitingApproval: t('Awaiting for an approval to share screen.'), messageRevoked: t('You can no longer share your screen.'), children: jsxRuntime.jsx(CompositeButton, { active: isSomeoneScreenSharing, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: isScreenSharing ? 'screen-share-on' : 'screen-share-off', title: t('Share screen'), disabled: !isScreenSharing && isSomeoneScreenSharing, onClick: async () => {
|
|
821
|
+
if (!hasPermission) {
|
|
822
|
+
await requestPermission();
|
|
823
|
+
}
|
|
824
|
+
else {
|
|
825
|
+
await screenShare.toggle();
|
|
826
|
+
}
|
|
827
|
+
} }) }) }) }));
|
|
828
|
+
};
|
|
829
|
+
|
|
830
|
+
const DeviceSelectorOption = ({ disabled, id, label, onChange, name, selected, defaultChecked, value, }) => {
|
|
831
|
+
return (jsxRuntime.jsxs("label", { className: clsx('str-video__device-settings__option', {
|
|
832
|
+
'str-video__device-settings__option--selected': selected,
|
|
833
|
+
'str-video__device-settings__option--disabled': disabled,
|
|
834
|
+
}), htmlFor: id, children: [jsxRuntime.jsx("input", { type: "radio", name: name, onChange: onChange, value: value, id: id, checked: selected, defaultChecked: defaultChecked, disabled: disabled }), label] }));
|
|
835
|
+
};
|
|
836
|
+
const DeviceSelector = (props) => {
|
|
837
|
+
const { devices = [], selectedDeviceId: selectedDeviceFromProps, title, onChange, } = props;
|
|
838
|
+
const inputGroupName = title.replace(' ', '-').toLowerCase();
|
|
839
|
+
// sometimes the browser (Chrome) will report the system-default device
|
|
840
|
+
// with an id of 'default'. In case when it doesn't, we'll select the first
|
|
841
|
+
// available device.
|
|
842
|
+
let selectedDeviceId = selectedDeviceFromProps;
|
|
843
|
+
if (devices.length > 0 &&
|
|
844
|
+
!devices.find((d) => d.deviceId === selectedDeviceId)) {
|
|
845
|
+
selectedDeviceId = devices[0].deviceId;
|
|
846
|
+
}
|
|
847
|
+
return (jsxRuntime.jsxs("div", { className: "str-video__device-settings__device-kind", children: [jsxRuntime.jsx("div", { className: "str-video__device-settings__device-selector-title", children: title }), !devices.length ? (jsxRuntime.jsx(DeviceSelectorOption, { id: `${inputGroupName}--default`, label: "Default", name: inputGroupName, defaultChecked: true, value: "default" })) : (devices.map((device) => {
|
|
848
|
+
return (jsxRuntime.jsx(DeviceSelectorOption, { id: `${inputGroupName}--${device.deviceId}`, value: device.deviceId, label: device.label, onChange: (e) => {
|
|
849
|
+
onChange?.(e.target.value);
|
|
850
|
+
}, name: inputGroupName, selected: device.deviceId === selectedDeviceId || devices.length === 1 }, device.deviceId));
|
|
851
|
+
}))] }));
|
|
852
|
+
};
|
|
853
|
+
|
|
854
|
+
const DeviceSelectorAudioInput = ({ title, }) => {
|
|
855
|
+
const { t } = videoReactBindings.useI18n();
|
|
856
|
+
const { useMicrophoneState } = videoReactBindings.useCallStateHooks();
|
|
857
|
+
const { microphone, selectedDevice, devices } = useMicrophoneState();
|
|
858
|
+
return (jsxRuntime.jsx(DeviceSelector, { devices: devices || [], selectedDeviceId: selectedDevice, onChange: async (deviceId) => {
|
|
859
|
+
await microphone.select(deviceId);
|
|
860
|
+
}, title: title || t('Select a Mic') }));
|
|
861
|
+
};
|
|
862
|
+
const DeviceSelectorAudioOutput = ({ title, }) => {
|
|
863
|
+
const { t } = videoReactBindings.useI18n();
|
|
864
|
+
const { useSpeakerState } = videoReactBindings.useCallStateHooks();
|
|
865
|
+
const { speaker, selectedDevice, devices, isDeviceSelectionSupported } = useSpeakerState();
|
|
866
|
+
if (!isDeviceSelectionSupported)
|
|
867
|
+
return null;
|
|
868
|
+
return (jsxRuntime.jsx(DeviceSelector, { devices: devices, selectedDeviceId: selectedDevice, onChange: (deviceId) => {
|
|
869
|
+
speaker.select(deviceId);
|
|
870
|
+
}, title: title || t('Select Speakers') }));
|
|
871
|
+
};
|
|
872
|
+
|
|
873
|
+
const DeviceSelectorVideo = ({ title }) => {
|
|
874
|
+
const { t } = videoReactBindings.useI18n();
|
|
875
|
+
const { useCameraState } = videoReactBindings.useCallStateHooks();
|
|
876
|
+
const { camera, devices, selectedDevice } = useCameraState();
|
|
877
|
+
return (jsxRuntime.jsx(DeviceSelector, { devices: devices || [], selectedDeviceId: selectedDevice, onChange: async (deviceId) => {
|
|
878
|
+
await camera.select(deviceId);
|
|
879
|
+
}, title: title || t('Select a Camera') }));
|
|
880
|
+
};
|
|
881
|
+
|
|
882
|
+
const DeviceSettings = () => {
|
|
883
|
+
return (jsxRuntime.jsx(MenuToggle, { placement: "bottom-end", ToggleButton: ToggleMenuButton, children: jsxRuntime.jsx(Menu, {}) }));
|
|
884
|
+
};
|
|
885
|
+
const Menu = () => (jsxRuntime.jsxs("div", { className: "str-video__device-settings", children: [jsxRuntime.jsx(DeviceSelectorVideo, {}), jsxRuntime.jsx(DeviceSelectorAudioInput, {}), jsxRuntime.jsx(DeviceSelectorAudioOutput, {})] }));
|
|
886
|
+
const ToggleMenuButton = react.forwardRef(({ menuShown }, ref) => {
|
|
887
|
+
const { t } = videoReactBindings.useI18n();
|
|
888
|
+
return (jsxRuntime.jsx(IconButton, { className: clsx('str-video__device-settings__button', {
|
|
889
|
+
'str-video__device-settings__button--active': menuShown,
|
|
890
|
+
}), title: t('Toggle device menu'), icon: "device-settings", ref: ref }));
|
|
891
|
+
});
|
|
892
|
+
|
|
893
|
+
const ToggleAudioPreviewButton = (props) => {
|
|
894
|
+
const { t } = videoReactBindings.useI18n();
|
|
895
|
+
const { caption = t('Mic'), Menu = DeviceSelectorAudioInput } = props;
|
|
896
|
+
const { useMicrophoneState } = videoReactBindings.useCallStateHooks();
|
|
897
|
+
const { microphone, isMute } = useMicrophoneState();
|
|
898
|
+
return (jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: isMute, caption: caption || t('Mic'), children: jsxRuntime.jsx(IconButton, { icon: !isMute ? 'mic' : 'mic-off', onClick: () => microphone.toggle() }) }));
|
|
899
|
+
};
|
|
900
|
+
const ToggleAudioPublishingButton = (props) => {
|
|
901
|
+
const { t } = videoReactBindings.useI18n();
|
|
902
|
+
const { caption = t('Mic'), Menu = DeviceSelectorAudioInput } = props;
|
|
903
|
+
const { hasPermission, requestPermission, isAwaitingPermission } = useRequestPermission(videoClient.OwnCapability.SEND_AUDIO);
|
|
904
|
+
const { useMicrophoneState } = videoReactBindings.useCallStateHooks();
|
|
905
|
+
const { microphone, isMute } = useMicrophoneState();
|
|
906
|
+
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [videoClient.OwnCapability.SEND_AUDIO], children: jsxRuntime.jsx(PermissionNotification, { permission: videoClient.OwnCapability.SEND_AUDIO, isAwaitingApproval: isAwaitingPermission, messageApproved: t('You can now speak.'), messageAwaitingApproval: t('Awaiting for an approval to speak.'), messageRevoked: t('You can no longer speak.'), children: jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: isMute, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: isMute ? 'mic-off' : 'mic', onClick: async () => {
|
|
907
|
+
if (!hasPermission) {
|
|
908
|
+
await requestPermission();
|
|
909
|
+
}
|
|
910
|
+
else {
|
|
911
|
+
await microphone.toggle();
|
|
912
|
+
}
|
|
913
|
+
} }) }) }) }));
|
|
914
|
+
};
|
|
915
|
+
|
|
916
|
+
const ToggleVideoPreviewButton = (props) => {
|
|
917
|
+
const { t } = videoReactBindings.useI18n();
|
|
918
|
+
const { caption = t('Video'), Menu = DeviceSelectorVideo } = props;
|
|
919
|
+
const { useCameraState } = videoReactBindings.useCallStateHooks();
|
|
920
|
+
const { camera, isMute } = useCameraState();
|
|
921
|
+
return (jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: isMute, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: !isMute ? 'camera' : 'camera-off', onClick: () => camera.toggle() }) }));
|
|
922
|
+
};
|
|
923
|
+
const ToggleVideoPublishingButton = (props) => {
|
|
924
|
+
const { t } = videoReactBindings.useI18n();
|
|
925
|
+
const { caption = t('Video'), Menu = DeviceSelectorVideo } = props;
|
|
926
|
+
const { hasPermission, requestPermission, isAwaitingPermission } = useRequestPermission(videoClient.OwnCapability.SEND_VIDEO);
|
|
927
|
+
const { useCameraState } = videoReactBindings.useCallStateHooks();
|
|
928
|
+
const { camera, isMute } = useCameraState();
|
|
929
|
+
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [videoClient.OwnCapability.SEND_VIDEO], children: jsxRuntime.jsx(PermissionNotification, { permission: videoClient.OwnCapability.SEND_VIDEO, isAwaitingApproval: isAwaitingPermission, messageApproved: t('You can now share your video.'), messageAwaitingApproval: t('Awaiting for an approval to share your video.'), messageRevoked: t('You can no longer share your video.'), children: jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: isMute, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: isMute ? 'camera-off' : 'camera', onClick: async () => {
|
|
930
|
+
if (!hasPermission) {
|
|
931
|
+
await requestPermission();
|
|
932
|
+
}
|
|
933
|
+
else {
|
|
934
|
+
await camera.toggle();
|
|
935
|
+
}
|
|
936
|
+
} }) }) }) }));
|
|
937
|
+
};
|
|
938
|
+
|
|
1169
939
|
const CancelCallButton = ({ disabled, onClick, onLeave, }) => {
|
|
1170
940
|
const call = videoReactBindings.useCall();
|
|
1171
941
|
const handleClick = react.useCallback(async (e) => {
|
|
@@ -1180,6 +950,8 @@ const CancelCallButton = ({ disabled, onClick, onLeave, }) => {
|
|
|
1180
950
|
return (jsxRuntime.jsx(IconButton, { disabled: disabled, icon: "call-end", variant: "danger", onClick: handleClick }));
|
|
1181
951
|
};
|
|
1182
952
|
|
|
953
|
+
const CallControls = ({ onLeave }) => (jsxRuntime.jsxs("div", { className: "str-video__call-controls", children: [jsxRuntime.jsx(RecordCallButton, {}), jsxRuntime.jsx(CallStatsButton, {}), jsxRuntime.jsx(ScreenShareButton, {}), jsxRuntime.jsx(SpeakingWhileMutedNotification, { children: jsxRuntime.jsx(ToggleAudioPublishingButton, {}) }), jsxRuntime.jsx(ToggleVideoPublishingButton, {}), jsxRuntime.jsx(CancelCallButton, { onLeave: onLeave })] }));
|
|
954
|
+
|
|
1183
955
|
const defaultEmojiReactionMap = {
|
|
1184
956
|
':like:': '👍',
|
|
1185
957
|
':raise-hand:': '✋',
|
|
@@ -1246,132 +1018,10 @@ const DefaultReactionsMenu = ({ reactions, }) => {
|
|
|
1246
1018
|
}, children: reaction.emoji_code && defaultEmojiReactionMap[reaction.emoji_code] }, reaction.emoji_code))) }));
|
|
1247
1019
|
};
|
|
1248
1020
|
|
|
1249
|
-
const LoadingIndicator = ({ className, type = 'spinner', text, tooltip, }) => {
|
|
1250
|
-
return (jsxRuntime.jsxs("div", { className: clsx('str-video__loading-indicator', className), title: tooltip, children: [jsxRuntime.jsx("div", { className: clsx('str-video__loading-indicator__icon', type) }), text && jsxRuntime.jsx("p", { className: "str-video__loading-indicator-text", children: text })] }));
|
|
1251
|
-
};
|
|
1252
|
-
|
|
1253
|
-
const RecordCallButton = ({ caption = 'Record', }) => {
|
|
1254
|
-
const call = videoReactBindings.useCall();
|
|
1255
|
-
const { t } = videoReactBindings.useI18n();
|
|
1256
|
-
const { toggleCallRecording, isAwaitingResponse, isCallRecordingInProgress } = useToggleCallRecording();
|
|
1257
|
-
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [
|
|
1258
|
-
videoClient.OwnCapability.START_RECORD_CALL,
|
|
1259
|
-
videoClient.OwnCapability.STOP_RECORD_CALL,
|
|
1260
|
-
], children: jsxRuntime.jsx(CompositeButton, { active: isCallRecordingInProgress, caption: caption, children: isAwaitingResponse ? (jsxRuntime.jsx(LoadingIndicator, { tooltip: isCallRecordingInProgress
|
|
1261
|
-
? t('Waiting for recording to stop...')
|
|
1262
|
-
: t('Waiting for recording to start...') })) : (jsxRuntime.jsx(IconButton
|
|
1263
|
-
// FIXME OL: sort out this ambiguity
|
|
1264
|
-
, {
|
|
1265
|
-
// FIXME OL: sort out this ambiguity
|
|
1266
|
-
enabled: !!call, disabled: !call, icon: isCallRecordingInProgress ? 'recording-on' : 'recording-off', title: t('Record call'), onClick: toggleCallRecording })) }) }));
|
|
1267
|
-
};
|
|
1268
|
-
|
|
1269
|
-
const ScreenShareButton = (props) => {
|
|
1270
|
-
const call = videoReactBindings.useCall();
|
|
1271
|
-
const { useHasOngoingScreenShare } = videoReactBindings.useCallStateHooks();
|
|
1272
|
-
const isSomeoneScreenSharing = useHasOngoingScreenShare();
|
|
1273
|
-
const { t } = videoReactBindings.useI18n();
|
|
1274
|
-
const { caption = t('Screen Share') } = props;
|
|
1275
|
-
const { toggleScreenShare, isAwaitingPermission, isScreenSharing } = useToggleScreenShare();
|
|
1276
|
-
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [videoClient.OwnCapability.SCREENSHARE], children: jsxRuntime.jsx(PermissionNotification, { permission: videoClient.OwnCapability.SCREENSHARE, isAwaitingApproval: isAwaitingPermission, messageApproved: t('You can now share your screen.'), messageAwaitingApproval: t('Awaiting for an approval to share screen.'), messageRevoked: t('You can no longer share your screen.'), children: jsxRuntime.jsx(CompositeButton, { active: isSomeoneScreenSharing, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: isScreenSharing ? 'screen-share-on' : 'screen-share-off', title: t('Share screen'), disabled: (!isScreenSharing && isSomeoneScreenSharing) || !call, onClick: toggleScreenShare }) }) }) }));
|
|
1277
|
-
};
|
|
1278
|
-
|
|
1279
|
-
const DeviceSelectorOption = ({ disabled, id, label, onChange, name, selected, defaultChecked, value, }) => {
|
|
1280
|
-
return (jsxRuntime.jsxs("label", { className: clsx('str-video__device-settings__option', {
|
|
1281
|
-
'str-video__device-settings__option--selected': selected,
|
|
1282
|
-
'str-video__device-settings__option--disabled': disabled,
|
|
1283
|
-
}), htmlFor: id, children: [jsxRuntime.jsx("input", { type: "radio", name: name, onChange: onChange, value: value, id: id, checked: selected, defaultChecked: defaultChecked, disabled: disabled }), label] }));
|
|
1284
|
-
};
|
|
1285
|
-
const DeviceSelector = (props) => {
|
|
1286
|
-
const { devices = [], selectedDeviceId: selectedDeviceFromProps, title, onChange, } = props;
|
|
1287
|
-
const inputGroupName = title.replace(' ', '-').toLowerCase();
|
|
1288
|
-
// sometimes the browser (Chrome) will report the system-default device
|
|
1289
|
-
// with an id of 'default'. In case when it doesn't, we'll select the first
|
|
1290
|
-
// available device.
|
|
1291
|
-
let selectedDeviceId = selectedDeviceFromProps;
|
|
1292
|
-
if (devices.length > 0 &&
|
|
1293
|
-
!devices.find((d) => d.deviceId === selectedDeviceId)) {
|
|
1294
|
-
selectedDeviceId = devices[0].deviceId;
|
|
1295
|
-
}
|
|
1296
|
-
return (jsxRuntime.jsxs("div", { className: "str-video__device-settings__device-kind", children: [jsxRuntime.jsx("div", { className: "str-video__device-settings__device-selector-title", children: title }), !devices.length ? (jsxRuntime.jsx(DeviceSelectorOption, { id: `${inputGroupName}--default`, label: "Default", name: inputGroupName, defaultChecked: true, value: "default" })) : (devices.map((device) => {
|
|
1297
|
-
return (jsxRuntime.jsx(DeviceSelectorOption, { id: `${inputGroupName}--${device.deviceId}`, value: device.deviceId, label: device.label, onChange: (e) => {
|
|
1298
|
-
onChange?.(e.target.value);
|
|
1299
|
-
}, name: inputGroupName, selected: device.deviceId === selectedDeviceId || devices.length === 1 }, device.deviceId));
|
|
1300
|
-
}))] }));
|
|
1301
|
-
};
|
|
1302
|
-
|
|
1303
|
-
const DeviceSelectorAudioInput = ({ title = 'Select a Mic', }) => {
|
|
1304
|
-
const { selectedAudioInputDeviceId, switchDevice } = useMediaDevices();
|
|
1305
|
-
const audioInputDevices = useAudioInputDevices();
|
|
1306
|
-
return (jsxRuntime.jsx(DeviceSelector, { devices: audioInputDevices, selectedDeviceId: selectedAudioInputDeviceId, onChange: (deviceId) => {
|
|
1307
|
-
switchDevice('audioinput', deviceId);
|
|
1308
|
-
}, title: title }));
|
|
1309
|
-
};
|
|
1310
|
-
const DeviceSelectorAudioOutput = ({ title = 'Select Speakers', }) => {
|
|
1311
|
-
const { isAudioOutputChangeSupported, selectedAudioOutputDeviceId, switchDevice, } = useMediaDevices();
|
|
1312
|
-
const audioOutputDevices = useAudioOutputDevices();
|
|
1313
|
-
if (!isAudioOutputChangeSupported)
|
|
1314
|
-
return null;
|
|
1315
|
-
return (jsxRuntime.jsx(DeviceSelector, { devices: audioOutputDevices, selectedDeviceId: selectedAudioOutputDeviceId, onChange: (deviceId) => {
|
|
1316
|
-
switchDevice('audiooutput', deviceId);
|
|
1317
|
-
}, title: title }));
|
|
1318
|
-
};
|
|
1319
|
-
|
|
1320
|
-
const DeviceSelectorVideo = ({ title }) => {
|
|
1321
|
-
const { selectedVideoDeviceId, switchDevice } = useMediaDevices();
|
|
1322
|
-
const videoDevices = useVideoDevices();
|
|
1323
|
-
return (jsxRuntime.jsx(DeviceSelector, { devices: videoDevices, selectedDeviceId: selectedVideoDeviceId, onChange: (deviceId) => {
|
|
1324
|
-
switchDevice('videoinput', deviceId);
|
|
1325
|
-
}, title: title || 'Select a Camera' }));
|
|
1326
|
-
};
|
|
1327
|
-
|
|
1328
|
-
const DeviceSettings = () => {
|
|
1329
|
-
return (jsxRuntime.jsx(MenuToggle, { placement: "bottom-end", ToggleButton: ToggleMenuButton, children: jsxRuntime.jsx(Menu, {}) }));
|
|
1330
|
-
};
|
|
1331
|
-
const Menu = () => (jsxRuntime.jsxs("div", { className: "str-video__device-settings", children: [jsxRuntime.jsx(DeviceSelectorVideo, {}), jsxRuntime.jsx(DeviceSelectorAudioInput, {}), jsxRuntime.jsx(DeviceSelectorAudioOutput, {})] }));
|
|
1332
|
-
const ToggleMenuButton = react.forwardRef(({ menuShown }, ref) => {
|
|
1333
|
-
const { t } = videoReactBindings.useI18n();
|
|
1334
|
-
return (jsxRuntime.jsx(IconButton, { className: clsx('str-video__device-settings__button', {
|
|
1335
|
-
'str-video__device-settings__button--active': menuShown,
|
|
1336
|
-
}), title: t('Toggle device menu'), icon: "device-settings", ref: ref }));
|
|
1337
|
-
});
|
|
1338
|
-
|
|
1339
|
-
const ToggleAudioPreviewButton = (props) => {
|
|
1340
|
-
const { initialAudioEnabled, toggleInitialAudioMuteState } = useMediaDevices();
|
|
1341
|
-
const { t } = videoReactBindings.useI18n();
|
|
1342
|
-
const { caption = t('Mic'), Menu = DeviceSelectorAudioInput } = props;
|
|
1343
|
-
return (jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: !initialAudioEnabled, caption: caption || t('Mic'), children: jsxRuntime.jsx(IconButton, { icon: initialAudioEnabled ? 'mic' : 'mic-off', onClick: toggleInitialAudioMuteState }) }));
|
|
1344
|
-
};
|
|
1345
|
-
const ToggleAudioPublishingButton = (props) => {
|
|
1346
|
-
const { useLocalParticipant } = videoReactBindings.useCallStateHooks();
|
|
1347
|
-
const localParticipant = useLocalParticipant();
|
|
1348
|
-
const { t } = videoReactBindings.useI18n();
|
|
1349
|
-
const { caption = t('Mic'), Menu = DeviceSelectorAudioInput } = props;
|
|
1350
|
-
const isAudioMute = !localParticipant?.publishedTracks.includes(videoClient.SfuModels.TrackType.AUDIO);
|
|
1351
|
-
const { toggleAudioMuteState: handleClick, isAwaitingPermission } = useToggleAudioMuteState();
|
|
1352
|
-
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [videoClient.OwnCapability.SEND_AUDIO], children: jsxRuntime.jsx(PermissionNotification, { permission: videoClient.OwnCapability.SEND_AUDIO, isAwaitingApproval: isAwaitingPermission, messageApproved: t('You can now speak.'), messageAwaitingApproval: t('Awaiting for an approval to speak.'), messageRevoked: t('You can no longer speak.'), children: jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: isAudioMute, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: isAudioMute ? 'mic-off' : 'mic', onClick: handleClick }) }) }) }));
|
|
1353
|
-
};
|
|
1354
|
-
|
|
1355
1021
|
const ToggleAudioOutputButton = (props) => {
|
|
1356
1022
|
const { t } = videoReactBindings.useI18n();
|
|
1357
1023
|
const { caption = t('Speakers'), Menu = DeviceSelectorAudioOutput } = props;
|
|
1358
|
-
return (jsxRuntime.jsx(CompositeButton, { Menu: Menu,
|
|
1359
|
-
};
|
|
1360
|
-
|
|
1361
|
-
const ToggleVideoPreviewButton = (props) => {
|
|
1362
|
-
const { toggleInitialVideoMuteState, initialVideoState } = useMediaDevices();
|
|
1363
|
-
const { t } = videoReactBindings.useI18n();
|
|
1364
|
-
const { caption = t('Video'), Menu = DeviceSelectorVideo } = props;
|
|
1365
|
-
return (jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: !initialVideoState.enabled, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: initialVideoState.enabled ? 'camera' : 'camera-off', onClick: toggleInitialVideoMuteState }) }));
|
|
1366
|
-
};
|
|
1367
|
-
const ToggleVideoPublishingButton = (props) => {
|
|
1368
|
-
const { useLocalParticipant } = videoReactBindings.useCallStateHooks();
|
|
1369
|
-
const localParticipant = useLocalParticipant();
|
|
1370
|
-
const { t } = videoReactBindings.useI18n();
|
|
1371
|
-
const { caption = t('Video'), Menu = DeviceSelectorVideo } = props;
|
|
1372
|
-
const isVideoMute = !localParticipant?.publishedTracks.includes(videoClient.SfuModels.TrackType.VIDEO);
|
|
1373
|
-
const { toggleVideoMuteState: handleClick, isAwaitingPermission } = useToggleVideoMuteState();
|
|
1374
|
-
return (jsxRuntime.jsx(videoReactBindings.Restricted, { requiredGrants: [videoClient.OwnCapability.SEND_VIDEO], children: jsxRuntime.jsx(PermissionNotification, { permission: videoClient.OwnCapability.SEND_VIDEO, isAwaitingApproval: isAwaitingPermission, messageApproved: t('You can now share your video.'), messageAwaitingApproval: t('Awaiting for an approval to share your video.'), messageRevoked: t('You can no longer share your video.'), children: jsxRuntime.jsx(CompositeButton, { Menu: Menu, active: isVideoMute, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: isVideoMute ? 'camera-off' : 'camera', onClick: handleClick }) }) }) }));
|
|
1024
|
+
return (jsxRuntime.jsx(CompositeButton, { Menu: Menu, caption: caption, children: jsxRuntime.jsx(IconButton, { icon: "speaker" }) }));
|
|
1375
1025
|
};
|
|
1376
1026
|
|
|
1377
1027
|
const BlockedUserListing = ({ data }) => {
|
|
@@ -1640,7 +1290,7 @@ const BlockedUsersSearchResults = ({ blockedUsersSearchFn: blockedUsersSearchFnF
|
|
|
1640
1290
|
const ToggleButton$1 = react.forwardRef((props, ref) => {
|
|
1641
1291
|
return jsxRuntime.jsx(IconButton, { enabled: props.menuShown, icon: "filter", ref: ref });
|
|
1642
1292
|
});
|
|
1643
|
-
const InviteLinkButton = react.forwardRef(({ className, ...props }, ref) => (jsxRuntime.jsxs("button", { ...props, className: clsx
|
|
1293
|
+
const InviteLinkButton = react.forwardRef(({ className, ...props }, ref) => (jsxRuntime.jsxs("button", { ...props, className: clsx('str-video__invite-link-button', className), ref: ref, children: [jsxRuntime.jsx("div", { className: "str-video__invite-participant-icon" }), jsxRuntime.jsx("div", { className: "str-video__invite-link-button__text", children: "Invite Link" })] })));
|
|
1644
1294
|
|
|
1645
1295
|
const CallPreview = (props) => {
|
|
1646
1296
|
const { className, style } = props;
|
|
@@ -1831,74 +1481,31 @@ const StreamTheme = ({ as: Component = 'div', className, children, ...props }) =
|
|
|
1831
1481
|
};
|
|
1832
1482
|
|
|
1833
1483
|
const DefaultDisabledVideoPreview = () => {
|
|
1834
|
-
|
|
1484
|
+
const { t } = videoReactBindings.useI18n();
|
|
1485
|
+
return jsxRuntime.jsx("div", { children: t('Video is disabled') });
|
|
1835
1486
|
};
|
|
1836
1487
|
const DefaultNoCameraPreview = () => {
|
|
1837
|
-
|
|
1838
|
-
};
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
};
|
|
1842
|
-
const
|
|
1843
|
-
const [stream, setStream] = react.useState();
|
|
1844
|
-
const { selectedVideoDeviceId, getVideoStream, initialVideoState, setInitialVideoState, } = useMediaDevices();
|
|
1845
|
-
// When there are 0 video devices (e.g. when laptop lid closed),
|
|
1846
|
-
// we do not restart the video automatically when the device is again available,
|
|
1847
|
-
// but rather leave turning the video on manually to the user.
|
|
1848
|
-
useOnUnavailableVideoDevices(() => setInitialVideoState(DEVICE_STATE.stopped));
|
|
1849
|
-
const videoDevices = useVideoDevices();
|
|
1850
|
-
react.useEffect(() => {
|
|
1851
|
-
if (!initialVideoState.enabled)
|
|
1852
|
-
return;
|
|
1853
|
-
getVideoStream({ deviceId: selectedVideoDeviceId })
|
|
1854
|
-
.then((s) => {
|
|
1855
|
-
setStream((previousStream) => {
|
|
1856
|
-
if (previousStream) {
|
|
1857
|
-
videoClient.disposeOfMediaStream(previousStream);
|
|
1858
|
-
}
|
|
1859
|
-
return s;
|
|
1860
|
-
});
|
|
1861
|
-
})
|
|
1862
|
-
.catch((e) => setInitialVideoState({
|
|
1863
|
-
...DEVICE_STATE.error,
|
|
1864
|
-
message: e.message,
|
|
1865
|
-
}));
|
|
1866
|
-
return () => {
|
|
1867
|
-
setStream(undefined);
|
|
1868
|
-
};
|
|
1869
|
-
}, [
|
|
1870
|
-
initialVideoState,
|
|
1871
|
-
getVideoStream,
|
|
1872
|
-
selectedVideoDeviceId,
|
|
1873
|
-
setInitialVideoState,
|
|
1874
|
-
videoDevices.length,
|
|
1875
|
-
]);
|
|
1876
|
-
react.useEffect(() => {
|
|
1877
|
-
if (initialVideoState.type === 'stopped') {
|
|
1878
|
-
setStream(undefined);
|
|
1879
|
-
}
|
|
1880
|
-
}, [initialVideoState]);
|
|
1881
|
-
const handleOnPlay = react.useCallback(() => {
|
|
1882
|
-
setInitialVideoState(DEVICE_STATE.playing);
|
|
1883
|
-
}, [setInitialVideoState]);
|
|
1488
|
+
const { t } = videoReactBindings.useI18n();
|
|
1489
|
+
return jsxRuntime.jsx("div", { children: t('No camera found') });
|
|
1490
|
+
};
|
|
1491
|
+
const VideoPreview = ({ className, mirror = true, DisabledVideoPreview = DefaultDisabledVideoPreview, NoCameraPreview = DefaultNoCameraPreview, StartingCameraPreview = LoadingIndicator, }) => {
|
|
1492
|
+
const { useCameraState } = videoReactBindings.useCallStateHooks();
|
|
1493
|
+
const { devices, status, isMute, mediaStream } = useCameraState();
|
|
1884
1494
|
let contents;
|
|
1885
|
-
if (
|
|
1886
|
-
contents = jsxRuntime.jsx(VideoErrorPreview, {});
|
|
1887
|
-
}
|
|
1888
|
-
else if (initialVideoState.type === 'stopped' && !videoDevices.length) {
|
|
1495
|
+
if (isMute && devices?.length === 0) {
|
|
1889
1496
|
contents = jsxRuntime.jsx(NoCameraPreview, {});
|
|
1890
1497
|
}
|
|
1891
|
-
else if (
|
|
1892
|
-
const loading =
|
|
1893
|
-
contents = (jsxRuntime.jsxs(jsxRuntime.Fragment, { children: [
|
|
1498
|
+
else if (status === 'enabled') {
|
|
1499
|
+
const loading = !mediaStream;
|
|
1500
|
+
contents = (jsxRuntime.jsxs(jsxRuntime.Fragment, { children: [mediaStream && (jsxRuntime.jsx(BaseVideo, { stream: mediaStream, className: clsx('str-video__video-preview', {
|
|
1894
1501
|
'str-video__video-preview--mirror': mirror,
|
|
1895
1502
|
'str-video__video-preview--loading': loading,
|
|
1896
|
-
})
|
|
1503
|
+
}) })), loading && jsxRuntime.jsx(StartingCameraPreview, {})] }));
|
|
1897
1504
|
}
|
|
1898
1505
|
else {
|
|
1899
1506
|
contents = jsxRuntime.jsx(DisabledVideoPreview, {});
|
|
1900
1507
|
}
|
|
1901
|
-
return (jsxRuntime.jsx("div", { className: clsx('str-video__video-preview-container'), children: contents }));
|
|
1508
|
+
return (jsxRuntime.jsx("div", { className: clsx('str-video__video-preview-container', className), children: contents }));
|
|
1902
1509
|
};
|
|
1903
1510
|
|
|
1904
1511
|
const DebugParticipantPublishQuality = (props) => {
|
|
@@ -2010,15 +1617,32 @@ const unwrapStats = (rawStats) => {
|
|
|
2010
1617
|
return decodedStats;
|
|
2011
1618
|
};
|
|
2012
1619
|
|
|
1620
|
+
const useQueryParams = () => {
|
|
1621
|
+
return react.useMemo(() => typeof window === 'undefined'
|
|
1622
|
+
? null
|
|
1623
|
+
: new URLSearchParams(window.location.search), []);
|
|
1624
|
+
};
|
|
1625
|
+
/**
|
|
1626
|
+
* Internal purpose hook. Enables certain development mode tools.
|
|
1627
|
+
*/
|
|
1628
|
+
const useIsDebugMode = () => {
|
|
1629
|
+
const params = useQueryParams();
|
|
1630
|
+
return !!params?.get('debug');
|
|
1631
|
+
};
|
|
1632
|
+
|
|
1633
|
+
const ParticipantViewContext = react.createContext(undefined);
|
|
1634
|
+
const useParticipantViewContext = () => react.useContext(ParticipantViewContext);
|
|
1635
|
+
|
|
2013
1636
|
const ToggleButton = react.forwardRef((props, ref) => {
|
|
2014
1637
|
return jsxRuntime.jsx(IconButton, { enabled: props.menuShown, icon: "ellipsis", ref: ref });
|
|
2015
1638
|
});
|
|
2016
1639
|
const DefaultScreenShareOverlay = () => {
|
|
2017
1640
|
const call = videoReactBindings.useCall();
|
|
1641
|
+
const { t } = videoReactBindings.useI18n();
|
|
2018
1642
|
const stopScreenShare = () => {
|
|
2019
|
-
call?.
|
|
1643
|
+
call?.screenShare.disable();
|
|
2020
1644
|
};
|
|
2021
|
-
return (jsxRuntime.jsxs("div", { className: "str-video__screen-share-overlay", children: [jsxRuntime.jsx(Icon, { icon: "screen-share-off" }), jsxRuntime.jsx("span", { className: "str-video__screen-share-overlay__title", children:
|
|
1645
|
+
return (jsxRuntime.jsxs("div", { className: "str-video__screen-share-overlay", children: [jsxRuntime.jsx(Icon, { icon: "screen-share-off" }), jsxRuntime.jsx("span", { className: "str-video__screen-share-overlay__title", children: t('You are presenting your screen') }), jsxRuntime.jsxs("button", { onClick: stopScreenShare, className: "str-video__screen-share-overlay__button", children: [jsxRuntime.jsx(Icon, { icon: "close" }), " ", t('Stop Screen Sharing')] })] }));
|
|
2022
1646
|
};
|
|
2023
1647
|
const DefaultParticipantViewUI = ({ indicatorsVisible = true, menuPlacement = 'bottom-end', showMenuButton = true, }) => {
|
|
2024
1648
|
const { participant, participantViewElement, trackType, videoElement } = useParticipantViewContext();
|
|
@@ -2042,13 +1666,11 @@ const ParticipantDetails = ({ indicatorsVisible = true, }) => {
|
|
|
2042
1666
|
const canUnpin = !!pin && pin.isLocalPin;
|
|
2043
1667
|
const isDebugMode = useIsDebugMode();
|
|
2044
1668
|
return (jsxRuntime.jsxs("div", { className: "str-video__participant-details", children: [jsxRuntime.jsxs("span", { className: "str-video__participant-details__name", children: [name || userId, indicatorsVisible && isDominantSpeaker && (jsxRuntime.jsx("span", { className: "str-video__participant-details__name--dominant_speaker", title: "Dominant speaker" })), indicatorsVisible && (jsxRuntime.jsx(Notification, { isVisible: isLocalParticipant &&
|
|
2045
|
-
connectionQuality === videoClient.SfuModels.ConnectionQuality.POOR, message: "Poor connection quality. Please check your internet connection.", children: connectionQualityAsString && (jsxRuntime.jsx("span", { className: clsx
|
|
1669
|
+
connectionQuality === videoClient.SfuModels.ConnectionQuality.POOR, message: "Poor connection quality. Please check your internet connection.", children: connectionQualityAsString && (jsxRuntime.jsx("span", { className: clsx('str-video__participant-details__connection-quality', `str-video__participant-details__connection-quality--${connectionQualityAsString}`), title: connectionQualityAsString })) })), indicatorsVisible && !hasAudio && (jsxRuntime.jsx("span", { className: "str-video__participant-details__name--audio-muted" })), indicatorsVisible && !hasVideo && (jsxRuntime.jsx("span", { className: "str-video__participant-details__name--video-muted" })), indicatorsVisible && canUnpin && (
|
|
2046
1670
|
// TODO: remove this monstrosity once we have a proper design
|
|
2047
1671
|
jsxRuntime.jsx("span", { title: "Unpin", onClick: () => call?.unpin(sessionId), style: { cursor: 'pointer' }, className: "str-video__participant-details__name--pinned" }))] }), isDebugMode && (jsxRuntime.jsxs(jsxRuntime.Fragment, { children: [jsxRuntime.jsx(DebugParticipantPublishQuality, { participant: participant, call: call }), jsxRuntime.jsx(DebugStatsView, { call: call, sessionId: sessionId, userId: userId, mediaStream: videoStream })] }))] }));
|
|
2048
1672
|
};
|
|
2049
1673
|
|
|
2050
|
-
const ParticipantViewContext = react.createContext(undefined);
|
|
2051
|
-
const useParticipantViewContext = () => react.useContext(ParticipantViewContext);
|
|
2052
1674
|
const ParticipantView = react.forwardRef(({ participant, trackType = 'videoTrack', muteAudio, refs: { setVideoElement, setVideoPlaceholderElement } = {}, className, VideoPlaceholder, ParticipantViewUI = DefaultParticipantViewUI, }, ref) => {
|
|
2053
1675
|
const { isLocalParticipant, isSpeaking, isDominantSpeaker, publishedTracks, sessionId, } = participant;
|
|
2054
1676
|
const hasAudio = publishedTracks.includes(videoClient.SfuModels.TrackType.AUDIO);
|
|
@@ -2092,172 +1714,8 @@ const ParticipantView = react.forwardRef(({ participant, trackType = 'videoTrack
|
|
|
2092
1714
|
}, className: clsx('str-video__participant-view', isDominantSpeaker && 'str-video__participant-view--dominant-speaker', isSpeaking && 'str-video__participant-view--speaking', !hasVideo && 'str-video__participant-view--no-video', !hasAudio && 'str-video__participant-view--no-audio', className), children: jsxRuntime.jsxs(ParticipantViewContext.Provider, { value: participantViewContextValue, children: [!isLocalParticipant && !muteAudio && (jsxRuntime.jsxs(jsxRuntime.Fragment, { children: [hasAudio && (jsxRuntime.jsx(Audio, { participant: participant, trackType: "audioTrack" })), hasScreenShareAudio && (jsxRuntime.jsx(Audio, { participant: participant, trackType: "screenShareAudioTrack" }))] })), jsxRuntime.jsx(Video$1, { VideoPlaceholder: VideoPlaceholder, participant: participant, trackType: trackType, refs: videoRefs, autoPlay: true }), isComponentType(ParticipantViewUI) ? (jsxRuntime.jsx(ParticipantViewUI, {})) : (ParticipantViewUI)] }) }));
|
|
2093
1715
|
});
|
|
2094
1716
|
|
|
2095
|
-
|
|
2096
|
-
|
|
2097
|
-
playing: 'stopped',
|
|
2098
|
-
stopped: 'starting',
|
|
2099
|
-
uninitialized: 'starting',
|
|
2100
|
-
error: 'starting',
|
|
2101
|
-
};
|
|
2102
|
-
/**
|
|
2103
|
-
* Exclude types from documentation site, but we should still add doc comments
|
|
2104
|
-
* @internal
|
|
2105
|
-
*/
|
|
2106
|
-
const DEVICE_STATE = {
|
|
2107
|
-
starting: { type: 'starting', enabled: true },
|
|
2108
|
-
playing: { type: 'playing', enabled: true },
|
|
2109
|
-
stopped: { type: 'stopped', enabled: false },
|
|
2110
|
-
uninitialized: { type: 'uninitialized', enabled: false },
|
|
2111
|
-
error: { type: 'error', message: '', enabled: false },
|
|
2112
|
-
};
|
|
2113
|
-
const DEFAULT_DEVICE_ID = 'default';
|
|
2114
|
-
const MediaDevicesContext = react.createContext(null);
|
|
2115
|
-
/**
|
|
2116
|
-
* Context provider that internally puts in place mechanisms that:
|
|
2117
|
-
* 1. fall back to selecting a default device when trying to switch to a non-existent device
|
|
2118
|
-
* 2. fall back to a default device when an active device is disconnected
|
|
2119
|
-
* 3. stop publishing a media stream when a non-default device is disconnected
|
|
2120
|
-
* 4. republish a media stream from the newly connected default device
|
|
2121
|
-
* 5. republish a media stream when a new device is selected
|
|
2122
|
-
*
|
|
2123
|
-
* Provides `MediaDevicesContextAPI` that allow the integrators to handle:
|
|
2124
|
-
* 1. the initial device state enablement (for example apt for lobby scenario)
|
|
2125
|
-
* 2. media stream retrieval and disposal
|
|
2126
|
-
* 3. media stream publishing
|
|
2127
|
-
* 4. specific device selection
|
|
2128
|
-
* @param params
|
|
2129
|
-
* @returns
|
|
2130
|
-
*
|
|
2131
|
-
* @category Device Management
|
|
2132
|
-
*/
|
|
2133
|
-
const MediaDevicesProvider = ({ children, initialAudioEnabled, initialVideoEnabled, initialVideoInputDeviceId = DEFAULT_DEVICE_ID, initialAudioOutputDeviceId = DEFAULT_DEVICE_ID, initialAudioInputDeviceId = DEFAULT_DEVICE_ID, }) => {
|
|
2134
|
-
const call = videoReactBindings.useCall();
|
|
2135
|
-
const { useCallCallingState, useCallState, useCallSettings } = videoReactBindings.useCallStateHooks();
|
|
2136
|
-
const callingState = useCallCallingState();
|
|
2137
|
-
const callState = useCallState();
|
|
2138
|
-
const { localParticipant$ } = callState;
|
|
2139
|
-
const hasBrowserPermissionVideoInput = useHasBrowserPermissions('camera');
|
|
2140
|
-
const hasBrowserPermissionAudioInput = useHasBrowserPermissions('microphone');
|
|
2141
|
-
const [selectedAudioInputDeviceId, selectAudioInputDeviceId] = react.useState(initialAudioInputDeviceId);
|
|
2142
|
-
const [selectedAudioOutputDeviceId, selectAudioOutputDeviceId] = react.useState(initialAudioOutputDeviceId);
|
|
2143
|
-
const [selectedVideoDeviceId, selectVideoDeviceId] = react.useState(initialVideoInputDeviceId);
|
|
2144
|
-
const [isAudioOutputChangeSupported] = react.useState(() => videoClient.checkIfAudioOutputChangeSupported());
|
|
2145
|
-
const [initAudioEnabled, setInitialAudioEnabled] = react.useState(!!initialAudioEnabled);
|
|
2146
|
-
const [initialVideoState, setInitialVideoState] = react.useState(() => initialVideoEnabled ? DEVICE_STATE.starting : DEVICE_STATE.uninitialized);
|
|
2147
|
-
const settings = useCallSettings();
|
|
2148
|
-
react.useEffect(() => {
|
|
2149
|
-
if (!settings)
|
|
2150
|
-
return;
|
|
2151
|
-
const { audio, video } = settings;
|
|
2152
|
-
if (typeof initialAudioEnabled === 'undefined' && audio.mic_default_on) {
|
|
2153
|
-
setInitialAudioEnabled(audio.mic_default_on);
|
|
2154
|
-
}
|
|
2155
|
-
if (typeof initialVideoEnabled === 'undefined' && video.camera_default_on) {
|
|
2156
|
-
setInitialVideoState(DEVICE_STATE.starting);
|
|
2157
|
-
}
|
|
2158
|
-
}, [initialAudioEnabled, initialVideoEnabled, settings]);
|
|
2159
|
-
const publishVideoStream = useVideoPublisher({
|
|
2160
|
-
initialVideoMuted: !initialVideoState.enabled,
|
|
2161
|
-
videoDeviceId: selectedVideoDeviceId,
|
|
2162
|
-
});
|
|
2163
|
-
const publishAudioStream = useAudioPublisher({
|
|
2164
|
-
initialAudioMuted: !initAudioEnabled,
|
|
2165
|
-
audioDeviceId: selectedAudioInputDeviceId,
|
|
2166
|
-
});
|
|
2167
|
-
const stopPublishingAudio = react.useCallback(async () => {
|
|
2168
|
-
if (callingState === videoClient.CallingState.IDLE ||
|
|
2169
|
-
callingState === videoClient.CallingState.RINGING) {
|
|
2170
|
-
setInitialAudioEnabled(false);
|
|
2171
|
-
}
|
|
2172
|
-
else {
|
|
2173
|
-
call?.stopPublish(videoClient.SfuModels.TrackType.AUDIO);
|
|
2174
|
-
}
|
|
2175
|
-
}, [call, callingState]);
|
|
2176
|
-
const stopPublishingVideo = react.useCallback(async () => {
|
|
2177
|
-
if (callingState === videoClient.CallingState.IDLE ||
|
|
2178
|
-
callingState === videoClient.CallingState.RINGING) {
|
|
2179
|
-
setInitialVideoState(DEVICE_STATE.stopped);
|
|
2180
|
-
}
|
|
2181
|
-
else {
|
|
2182
|
-
call?.stopPublish(videoClient.SfuModels.TrackType.VIDEO);
|
|
2183
|
-
}
|
|
2184
|
-
}, [call, callingState]);
|
|
2185
|
-
const toggleInitialAudioMuteState = react.useCallback(() => setInitialAudioEnabled((prev) => !prev), []);
|
|
2186
|
-
const toggleInitialVideoMuteState = react.useCallback(() => setInitialVideoState((prev) => {
|
|
2187
|
-
const newType = DEVICE_STATE_TOGGLE[prev.type];
|
|
2188
|
-
return DEVICE_STATE[newType];
|
|
2189
|
-
}), []);
|
|
2190
|
-
const switchDevice = react.useCallback((kind, deviceId) => {
|
|
2191
|
-
if (kind === 'videoinput') {
|
|
2192
|
-
selectVideoDeviceId(deviceId);
|
|
2193
|
-
}
|
|
2194
|
-
if (kind === 'audioinput') {
|
|
2195
|
-
selectAudioInputDeviceId(deviceId);
|
|
2196
|
-
}
|
|
2197
|
-
if (kind === 'audiooutput') {
|
|
2198
|
-
selectAudioOutputDeviceId(deviceId);
|
|
2199
|
-
}
|
|
2200
|
-
}, []);
|
|
2201
|
-
useAudioInputDeviceFallback(() => switchDevice('audioinput', DEFAULT_DEVICE_ID), hasBrowserPermissionAudioInput, selectedAudioInputDeviceId);
|
|
2202
|
-
useAudioOutputDeviceFallback(() => switchDevice('audiooutput', DEFAULT_DEVICE_ID),
|
|
2203
|
-
// audiooutput devices can be enumerated only with microphone permissions
|
|
2204
|
-
hasBrowserPermissionAudioInput, selectedAudioOutputDeviceId);
|
|
2205
|
-
useVideoDeviceFallback(() => switchDevice('videoinput', DEFAULT_DEVICE_ID), hasBrowserPermissionVideoInput, selectedVideoDeviceId);
|
|
2206
|
-
react.useEffect(() => {
|
|
2207
|
-
if (!call || callingState !== videoClient.CallingState.JOINED)
|
|
2208
|
-
return;
|
|
2209
|
-
call.setAudioOutputDevice(selectedAudioOutputDeviceId);
|
|
2210
|
-
}, [call, callingState, selectedAudioOutputDeviceId]);
|
|
2211
|
-
react.useEffect(() => {
|
|
2212
|
-
// audiooutput devices can be enumerated only with microphone permissions
|
|
2213
|
-
if (!localParticipant$ || !hasBrowserPermissionAudioInput)
|
|
2214
|
-
return;
|
|
2215
|
-
const subscription = videoClient.watchForDisconnectedAudioOutputDevice(localParticipant$.pipe(rxjs.map((p) => p?.audioOutputDeviceId))).subscribe(async () => {
|
|
2216
|
-
selectAudioOutputDeviceId(DEFAULT_DEVICE_ID);
|
|
2217
|
-
});
|
|
2218
|
-
return () => {
|
|
2219
|
-
subscription.unsubscribe();
|
|
2220
|
-
};
|
|
2221
|
-
}, [hasBrowserPermissionAudioInput, localParticipant$]);
|
|
2222
|
-
const contextValue = {
|
|
2223
|
-
disposeOfMediaStream: videoClient.disposeOfMediaStream,
|
|
2224
|
-
getAudioStream: videoClient.getAudioStream,
|
|
2225
|
-
getVideoStream: videoClient.getVideoStream,
|
|
2226
|
-
isAudioOutputChangeSupported,
|
|
2227
|
-
selectedAudioInputDeviceId,
|
|
2228
|
-
selectedAudioOutputDeviceId,
|
|
2229
|
-
selectedVideoDeviceId,
|
|
2230
|
-
switchDevice,
|
|
2231
|
-
initialAudioEnabled: initAudioEnabled,
|
|
2232
|
-
initialVideoState,
|
|
2233
|
-
setInitialAudioEnabled,
|
|
2234
|
-
setInitialVideoState,
|
|
2235
|
-
toggleInitialAudioMuteState,
|
|
2236
|
-
toggleInitialVideoMuteState,
|
|
2237
|
-
publishAudioStream,
|
|
2238
|
-
publishVideoStream,
|
|
2239
|
-
stopPublishingAudio,
|
|
2240
|
-
stopPublishingVideo,
|
|
2241
|
-
};
|
|
2242
|
-
return (jsxRuntime.jsx(MediaDevicesContext.Provider, { value: contextValue, children: children }));
|
|
2243
|
-
};
|
|
2244
|
-
/**
|
|
2245
|
-
* Context consumer retrieving MediaDevicesContextAPI.
|
|
2246
|
-
* @returns
|
|
2247
|
-
*
|
|
2248
|
-
* @category Device Management
|
|
2249
|
-
*/
|
|
2250
|
-
const useMediaDevices = () => {
|
|
2251
|
-
const value = react.useContext(MediaDevicesContext);
|
|
2252
|
-
if (!value) {
|
|
2253
|
-
console.warn(`Null MediaDevicesContext`);
|
|
2254
|
-
}
|
|
2255
|
-
return value;
|
|
2256
|
-
};
|
|
2257
|
-
|
|
2258
|
-
const StreamCall = ({ children, call, mediaDevicesProviderProps, }) => {
|
|
2259
|
-
return (jsxRuntime.jsx(videoReactBindings.StreamCallProvider, { call: call, children: jsxRuntime.jsx(MediaDevicesProvider, { ...mediaDevicesProviderProps, children: children }) }));
|
|
2260
|
-
};
|
|
1717
|
+
// re-exporting the StreamCallProvider as StreamCall
|
|
1718
|
+
const StreamCall = videoReactBindings.StreamCallProvider;
|
|
2261
1719
|
|
|
2262
1720
|
var Joining = "Joining";
|
|
2263
1721
|
var Mic = "Mic";
|
|
@@ -2319,6 +1777,8 @@ var en = {
|
|
|
2319
1777
|
Me: Me,
|
|
2320
1778
|
Unknown: Unknown,
|
|
2321
1779
|
"Toggle device menu": "Toggle device menu",
|
|
1780
|
+
"You are presenting your screen": "You are presenting your screen",
|
|
1781
|
+
"Stop Screen Sharing": "Stop Screen Sharing",
|
|
2322
1782
|
Allow: Allow,
|
|
2323
1783
|
Revoke: Revoke,
|
|
2324
1784
|
Dismiss: Dismiss,
|
|
@@ -2326,6 +1786,8 @@ var en = {
|
|
|
2326
1786
|
"Microphone off": "Microphone off",
|
|
2327
1787
|
"Camera on": "Camera on",
|
|
2328
1788
|
"Camera off": "Camera off",
|
|
1789
|
+
"No camera found": "No camera found",
|
|
1790
|
+
"Video is disabled": "Video is disabled",
|
|
2329
1791
|
Pinned: Pinned,
|
|
2330
1792
|
Unpin: Unpin,
|
|
2331
1793
|
Pin: Pin,
|
|
@@ -2635,7 +2097,7 @@ const VerticalScrollButtons = ({ scrollWrapper, }) => {
|
|
|
2635
2097
|
};
|
|
2636
2098
|
const hasScreenShare = (p) => !!p?.publishedTracks.includes(videoClient.SfuModels.TrackType.SCREEN_SHARE);
|
|
2637
2099
|
|
|
2638
|
-
const [major, minor, patch] = ("0.
|
|
2100
|
+
const [major, minor, patch] = ("0.4.1" ).split('.');
|
|
2639
2101
|
videoClient.setSdkInfo({
|
|
2640
2102
|
type: videoClient.SfuModels.SdkType.REACT,
|
|
2641
2103
|
major,
|
|
@@ -2661,7 +2123,6 @@ exports.CancelCallButton = CancelCallButton;
|
|
|
2661
2123
|
exports.CompositeButton = CompositeButton;
|
|
2662
2124
|
exports.CopyToClipboardButton = CopyToClipboardButton;
|
|
2663
2125
|
exports.CopyToClipboardButtonWithPopup = CopyToClipboardButtonWithPopup;
|
|
2664
|
-
exports.DEVICE_STATE = DEVICE_STATE;
|
|
2665
2126
|
exports.DefaultParticipantViewUI = DefaultParticipantViewUI;
|
|
2666
2127
|
exports.DefaultReactionsMenu = DefaultReactionsMenu;
|
|
2667
2128
|
exports.DefaultScreenShareOverlay = DefaultScreenShareOverlay;
|
|
@@ -2679,13 +2140,13 @@ exports.IconButton = IconButton;
|
|
|
2679
2140
|
exports.LivestreamLayout = LivestreamLayout;
|
|
2680
2141
|
exports.LoadingCallRecordingListing = LoadingCallRecordingListing;
|
|
2681
2142
|
exports.LoadingIndicator = LoadingIndicator;
|
|
2682
|
-
exports.MediaDevicesProvider = MediaDevicesProvider;
|
|
2683
2143
|
exports.MenuToggle = MenuToggle;
|
|
2684
2144
|
exports.Notification = Notification;
|
|
2685
2145
|
exports.PaginatedGridLayout = PaginatedGridLayout;
|
|
2686
2146
|
exports.ParticipantActionsContextMenu = ParticipantActionsContextMenu;
|
|
2687
2147
|
exports.ParticipantDetails = ParticipantDetails;
|
|
2688
2148
|
exports.ParticipantView = ParticipantView;
|
|
2149
|
+
exports.ParticipantViewContext = ParticipantViewContext;
|
|
2689
2150
|
exports.ParticipantsAudio = ParticipantsAudio;
|
|
2690
2151
|
exports.PermissionNotification = PermissionNotification;
|
|
2691
2152
|
exports.PermissionRequestList = PermissionRequestList;
|
|
@@ -2714,28 +2175,13 @@ exports.VideoPreview = VideoPreview;
|
|
|
2714
2175
|
exports.WithTooltip = WithTooltip;
|
|
2715
2176
|
exports.defaultReactions = defaultReactions;
|
|
2716
2177
|
exports.translations = translations;
|
|
2717
|
-
exports.useAudioInputDeviceFallback = useAudioInputDeviceFallback;
|
|
2718
|
-
exports.useAudioInputDevices = useAudioInputDevices;
|
|
2719
|
-
exports.useAudioOutputDeviceFallback = useAudioOutputDeviceFallback;
|
|
2720
|
-
exports.useAudioOutputDevices = useAudioOutputDevices;
|
|
2721
|
-
exports.useAudioPublisher = useAudioPublisher;
|
|
2722
|
-
exports.useDeviceFallback = useDeviceFallback;
|
|
2723
|
-
exports.useDevices = useDevices;
|
|
2724
2178
|
exports.useHasBrowserPermissions = useHasBrowserPermissions;
|
|
2725
2179
|
exports.useHorizontalScrollPosition = useHorizontalScrollPosition;
|
|
2726
|
-
exports.useMediaDevices = useMediaDevices;
|
|
2727
|
-
exports.useOnUnavailableAudioInputDevices = useOnUnavailableAudioInputDevices;
|
|
2728
|
-
exports.useOnUnavailableAudioOutputDevices = useOnUnavailableAudioOutputDevices;
|
|
2729
|
-
exports.useOnUnavailableDevices = useOnUnavailableDevices;
|
|
2730
|
-
exports.useOnUnavailableVideoDevices = useOnUnavailableVideoDevices;
|
|
2731
2180
|
exports.useParticipantViewContext = useParticipantViewContext;
|
|
2732
|
-
exports.
|
|
2733
|
-
exports.
|
|
2181
|
+
exports.usePersistedDevicePreferences = usePersistedDevicePreferences;
|
|
2182
|
+
exports.useRequestPermission = useRequestPermission;
|
|
2734
2183
|
exports.useTrackElementVisibility = useTrackElementVisibility;
|
|
2735
2184
|
exports.useVerticalScrollPosition = useVerticalScrollPosition;
|
|
2736
|
-
exports.useVideoDeviceFallback = useVideoDeviceFallback;
|
|
2737
|
-
exports.useVideoDevices = useVideoDevices;
|
|
2738
|
-
exports.useVideoPublisher = useVideoPublisher;
|
|
2739
2185
|
Object.keys(videoClient).forEach(function (k) {
|
|
2740
2186
|
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
|
2741
2187
|
enumerable: true,
|