@selfcommunity/react-ui 0.11.0-alpha.29 → 0.11.0-alpha.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,6 +17,8 @@ const track_processors_1 = require("@livekit/track-processors");
17
17
  const LiveStreamSettingsMenu_1 = tslib_1.__importDefault(require("./LiveStreamSettingsMenu"));
18
18
  const utils_1 = require("@selfcommunity/utils");
19
19
  const LiveStream_1 = require("../../../constants/LiveStream");
20
+ const react_intl_1 = require("react-intl");
21
+ const notistack_1 = require("notistack");
20
22
  /** @alpha */
21
23
  function usePreviewTracks(options, onError) {
22
24
  const [tracks, setTracks] = React.useState();
@@ -175,6 +177,7 @@ function PreJoin(_a) {
175
177
  preventSave: !persistUserChoices,
176
178
  preventLoad: !persistUserChoices
177
179
  });
180
+ const { enqueueSnackbar } = (0, notistack_1.useSnackbar)();
178
181
  // Initialize device settings
179
182
  const [audioEnabled, setAudioEnabled] = React.useState(initialUserChoices.audioEnabled && canUseAudio);
180
183
  const [videoEnabled, setVideoEnabled] = React.useState(initialUserChoices.videoEnabled && canUseVideo);
@@ -182,7 +185,7 @@ function PreJoin(_a) {
182
185
  const [videoDeviceId, setVideoDeviceId] = React.useState(initialUserChoices.videoDeviceId);
183
186
  const [username, setUsername] = React.useState(initialUserChoices.username);
184
187
  // Processors
185
- const [blurEnabled, setBlurEnabled] = React.useState((0, utils_1.isClientSideRendering)() ? Boolean((_b = window === null || window === void 0 ? void 0 : window.localStorage) === null || _b === void 0 ? void 0 : _b.getItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT)) || false : false);
188
+ const [blurEnabled, setBlurEnabled] = React.useState((0, utils_1.isClientSideRendering)() ? ((_b = window === null || window === void 0 ? void 0 : window.localStorage) === null || _b === void 0 ? void 0 : _b.getItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT)) === 'true' : false);
186
189
  const [processorPending, setProcessorPending] = React.useState(false);
187
190
  // Save user choices to persistent storage.
188
191
  React.useEffect(() => {
@@ -240,8 +243,9 @@ function PreJoin(_a) {
240
243
  }, [onValidate]);
241
244
  const handleBlur = React.useCallback(() => {
242
245
  var _a;
243
- setBlurEnabled((enabled) => !enabled);
244
- (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT, (!blurEnabled).toString());
246
+ const _blur = !blurEnabled;
247
+ setBlurEnabled(_blur);
248
+ (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT, _blur.toString());
245
249
  }, [setBlurEnabled, blurEnabled]);
246
250
  (0, react_1.useEffect)(() => {
247
251
  const newUserChoices = {
@@ -255,6 +259,7 @@ function PreJoin(_a) {
255
259
  setIsValid(handleValidation(newUserChoices));
256
260
  }, [username, scUserContext.user, videoEnabled, handleValidation, audioEnabled, audioDeviceId, videoDeviceId]);
257
261
  (0, react_1.useEffect)(() => {
262
+ var _a;
258
263
  if (videoTrack && videoEnabled) {
259
264
  setProcessorPending(true);
260
265
  try {
@@ -265,6 +270,15 @@ function PreJoin(_a) {
265
270
  videoTrack.stopProcessor();
266
271
  }
267
272
  }
273
+ catch (e) {
274
+ console.log(e);
275
+ setBlurEnabled(false);
276
+ (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT, false.toString());
277
+ enqueueSnackbar((0, jsx_runtime_1.jsx)(react_intl_1.FormattedMessage, { id: "ui.liveStreamRoom.errorApplyVideoEffect", defaultMessage: "ui.contributionActionMenu.errorApplyVideoEffect" }), {
278
+ variant: 'warning',
279
+ autoHideDuration: 3000
280
+ });
281
+ }
268
282
  finally {
269
283
  setProcessorPending(false);
270
284
  }
@@ -23,6 +23,8 @@ const track_processors_1 = require("@livekit/track-processors");
23
23
  const utils_1 = require("@selfcommunity/utils");
24
24
  const LiveStream_1 = require("../../../constants/LiveStream");
25
25
  const Icon_1 = tslib_1.__importDefault(require("@mui/material/Icon"));
26
+ const notistack_1 = require("notistack");
27
+ const react_intl_1 = require("react-intl");
26
28
  const PREFIX = 'SCVideoConference';
27
29
  const classes = {
28
30
  root: `${PREFIX}-root`
@@ -60,7 +62,7 @@ function VideoConference(inProps) {
60
62
  const lastAutoFocusedScreenShareTrack = React.useRef(null);
61
63
  // HOOKS
62
64
  const scUserContext = (0, react_core_1.useSCUser)();
63
- const [blurEnabled, setBlurEnabled] = React.useState((0, utils_1.isClientSideRendering)() ? Boolean((_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.getItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT)) || false : false);
65
+ const [blurEnabled, setBlurEnabled] = React.useState((0, utils_1.isClientSideRendering)() ? ((_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.getItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT)) === 'true' : false);
64
66
  const [processorPending, setProcessorPending] = React.useState(false);
65
67
  const tracks = (0, components_react_1.useTracks)([
66
68
  { source: livekit_client_1.Track.Source.Camera, withPlaceholder: true },
@@ -88,6 +90,7 @@ function VideoConference(inProps) {
88
90
  const focusTrack = (_b = (0, components_react_1.usePinnedTracks)(layoutContext)) === null || _b === void 0 ? void 0 : _b[0];
89
91
  const carouselTracks = tracks.filter((track) => !(0, components_core_1.isEqualTrackRef)(track, focusTrack));
90
92
  const { cameraTrack } = (0, components_react_1.useLocalParticipant)();
93
+ const { enqueueSnackbar } = (0, notistack_1.useSnackbar)();
91
94
  (0, useLiveStreamCheck_1.useLivestreamCheck)();
92
95
  /**
93
96
  * widgetUpdate
@@ -167,6 +170,7 @@ function VideoConference(inProps) {
167
170
  }
168
171
  }, [tracks, participants, speakerFocused]);
169
172
  (0, react_1.useEffect)(() => {
173
+ var _a;
170
174
  const localCamTrack = cameraTrack === null || cameraTrack === void 0 ? void 0 : cameraTrack.track;
171
175
  if (localCamTrack) {
172
176
  setProcessorPending(true);
@@ -178,6 +182,15 @@ function VideoConference(inProps) {
178
182
  localCamTrack.stopProcessor();
179
183
  }
180
184
  }
185
+ catch (e) {
186
+ console.log(e);
187
+ setBlurEnabled(false);
188
+ (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(LiveStream_1.CHOICE_VIDEO_BLUR_EFFECT, false.toString());
189
+ enqueueSnackbar((0, jsx_runtime_1.jsx)(react_intl_1.FormattedMessage, { id: "ui.liveStreamRoom.errorApplyVideoEffect", defaultMessage: "ui.contributionActionMenu.errorApplyVideoEffect" }), {
190
+ variant: 'warning',
191
+ autoHideDuration: 3000
192
+ });
193
+ }
181
194
  finally {
182
195
  setProcessorPending(false);
183
196
  }
@@ -14,6 +14,8 @@ import { BackgroundBlur } from '@livekit/track-processors';
14
14
  import LiveStreamSettingsMenu from './LiveStreamSettingsMenu';
15
15
  import { isClientSideRendering } from '@selfcommunity/utils';
16
16
  import { CHOICE_VIDEO_BLUR_EFFECT } from '../../../constants/LiveStream';
17
+ import { FormattedMessage } from 'react-intl';
18
+ import { useSnackbar } from 'notistack';
17
19
  /** @alpha */
18
20
  export function usePreviewTracks(options, onError) {
19
21
  const [tracks, setTracks] = React.useState();
@@ -170,6 +172,7 @@ export function PreJoin(_a) {
170
172
  preventSave: !persistUserChoices,
171
173
  preventLoad: !persistUserChoices
172
174
  });
175
+ const { enqueueSnackbar } = useSnackbar();
173
176
  // Initialize device settings
174
177
  const [audioEnabled, setAudioEnabled] = React.useState(initialUserChoices.audioEnabled && canUseAudio);
175
178
  const [videoEnabled, setVideoEnabled] = React.useState(initialUserChoices.videoEnabled && canUseVideo);
@@ -177,7 +180,7 @@ export function PreJoin(_a) {
177
180
  const [videoDeviceId, setVideoDeviceId] = React.useState(initialUserChoices.videoDeviceId);
178
181
  const [username, setUsername] = React.useState(initialUserChoices.username);
179
182
  // Processors
180
- const [blurEnabled, setBlurEnabled] = React.useState(isClientSideRendering() ? Boolean((_b = window === null || window === void 0 ? void 0 : window.localStorage) === null || _b === void 0 ? void 0 : _b.getItem(CHOICE_VIDEO_BLUR_EFFECT)) || false : false);
183
+ const [blurEnabled, setBlurEnabled] = React.useState(isClientSideRendering() ? ((_b = window === null || window === void 0 ? void 0 : window.localStorage) === null || _b === void 0 ? void 0 : _b.getItem(CHOICE_VIDEO_BLUR_EFFECT)) === 'true' : false);
181
184
  const [processorPending, setProcessorPending] = React.useState(false);
182
185
  // Save user choices to persistent storage.
183
186
  React.useEffect(() => {
@@ -235,8 +238,9 @@ export function PreJoin(_a) {
235
238
  }, [onValidate]);
236
239
  const handleBlur = React.useCallback(() => {
237
240
  var _a;
238
- setBlurEnabled((enabled) => !enabled);
239
- (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(CHOICE_VIDEO_BLUR_EFFECT, (!blurEnabled).toString());
241
+ const _blur = !blurEnabled;
242
+ setBlurEnabled(_blur);
243
+ (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(CHOICE_VIDEO_BLUR_EFFECT, _blur.toString());
240
244
  }, [setBlurEnabled, blurEnabled]);
241
245
  useEffect(() => {
242
246
  const newUserChoices = {
@@ -250,6 +254,7 @@ export function PreJoin(_a) {
250
254
  setIsValid(handleValidation(newUserChoices));
251
255
  }, [username, scUserContext.user, videoEnabled, handleValidation, audioEnabled, audioDeviceId, videoDeviceId]);
252
256
  useEffect(() => {
257
+ var _a;
253
258
  if (videoTrack && videoEnabled) {
254
259
  setProcessorPending(true);
255
260
  try {
@@ -260,6 +265,15 @@ export function PreJoin(_a) {
260
265
  videoTrack.stopProcessor();
261
266
  }
262
267
  }
268
+ catch (e) {
269
+ console.log(e);
270
+ setBlurEnabled(false);
271
+ (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(CHOICE_VIDEO_BLUR_EFFECT, false.toString());
272
+ enqueueSnackbar(_jsx(FormattedMessage, { id: "ui.liveStreamRoom.errorApplyVideoEffect", defaultMessage: "ui.contributionActionMenu.errorApplyVideoEffect" }), {
273
+ variant: 'warning',
274
+ autoHideDuration: 3000
275
+ });
276
+ }
263
277
  finally {
264
278
  setProcessorPending(false);
265
279
  }
@@ -20,6 +20,8 @@ import { BackgroundBlur } from '@livekit/track-processors';
20
20
  import { isClientSideRendering } from '@selfcommunity/utils';
21
21
  import { CHOICE_VIDEO_BLUR_EFFECT } from '../../../constants/LiveStream';
22
22
  import Icon from '@mui/material/Icon';
23
+ import { useSnackbar } from 'notistack';
24
+ import { FormattedMessage } from 'react-intl';
23
25
  const PREFIX = 'SCVideoConference';
24
26
  const classes = {
25
27
  root: `${PREFIX}-root`
@@ -57,7 +59,7 @@ export function VideoConference(inProps) {
57
59
  const lastAutoFocusedScreenShareTrack = React.useRef(null);
58
60
  // HOOKS
59
61
  const scUserContext = useSCUser();
60
- const [blurEnabled, setBlurEnabled] = React.useState(isClientSideRendering() ? Boolean((_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.getItem(CHOICE_VIDEO_BLUR_EFFECT)) || false : false);
62
+ const [blurEnabled, setBlurEnabled] = React.useState(isClientSideRendering() ? ((_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.getItem(CHOICE_VIDEO_BLUR_EFFECT)) === 'true' : false);
61
63
  const [processorPending, setProcessorPending] = React.useState(false);
62
64
  const tracks = useTracks([
63
65
  { source: Track.Source.Camera, withPlaceholder: true },
@@ -85,6 +87,7 @@ export function VideoConference(inProps) {
85
87
  const focusTrack = (_b = usePinnedTracks(layoutContext)) === null || _b === void 0 ? void 0 : _b[0];
86
88
  const carouselTracks = tracks.filter((track) => !isEqualTrackRef(track, focusTrack));
87
89
  const { cameraTrack } = useLocalParticipant();
90
+ const { enqueueSnackbar } = useSnackbar();
88
91
  useLivestreamCheck();
89
92
  /**
90
93
  * widgetUpdate
@@ -164,6 +167,7 @@ export function VideoConference(inProps) {
164
167
  }
165
168
  }, [tracks, participants, speakerFocused]);
166
169
  useEffect(() => {
170
+ var _a;
167
171
  const localCamTrack = cameraTrack === null || cameraTrack === void 0 ? void 0 : cameraTrack.track;
168
172
  if (localCamTrack) {
169
173
  setProcessorPending(true);
@@ -175,6 +179,15 @@ export function VideoConference(inProps) {
175
179
  localCamTrack.stopProcessor();
176
180
  }
177
181
  }
182
+ catch (e) {
183
+ console.log(e);
184
+ setBlurEnabled(false);
185
+ (_a = window === null || window === void 0 ? void 0 : window.localStorage) === null || _a === void 0 ? void 0 : _a.setItem(CHOICE_VIDEO_BLUR_EFFECT, false.toString());
186
+ enqueueSnackbar(_jsx(FormattedMessage, { id: "ui.liveStreamRoom.errorApplyVideoEffect", defaultMessage: "ui.contributionActionMenu.errorApplyVideoEffect" }), {
187
+ variant: 'warning',
188
+ autoHideDuration: 3000
189
+ });
190
+ }
178
191
  finally {
179
192
  setProcessorPending(false);
180
193
  }