@stream-io/video-react-sdk 0.6.15 → 0.6.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,24 @@
2
2
 
3
3
  This file was generated using [@jscutlery/semver](https://github.com/jscutlery/semver).
4
4
 
5
+ ### [0.6.17](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-react-sdk-0.6.16...@stream-io/video-react-sdk-0.6.17) (2024-05-03)
6
+
7
+ ### Dependency Updates
8
+
9
+ * `@stream-io/video-client` updated to version `0.7.11`
10
+ * `@stream-io/video-react-bindings` updated to version `0.4.22`
11
+
12
+ ### Bug Fixes
13
+
14
+ * **devices:** API to disable speaking while muted notifications ([#1335](https://github.com/GetStream/stream-video-js/issues/1335)) ([cdff0e0](https://github.com/GetStream/stream-video-js/commit/cdff0e036bf4afca763e4f7a1563c23e806be190)), closes [#1329](https://github.com/GetStream/stream-video-js/issues/1329)
15
+
16
+ ### [0.6.16](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-react-sdk-0.6.15...@stream-io/video-react-sdk-0.6.16) (2024-05-02)
17
+
18
+
19
+ ### Bug Fixes
20
+
21
+ * synchronize background filter canvas capturing ([#1334](https://github.com/GetStream/stream-video-js/issues/1334)) ([eaead81](https://github.com/GetStream/stream-video-js/commit/eaead818f9f404cf647efdbc11707fc3b58b2459))
22
+
5
23
  ### [0.6.15](https://github.com/GetStream/stream-video-js/compare/@stream-io/video-react-sdk-0.6.14...@stream-io/video-react-sdk-0.6.15) (2024-04-30)
6
24
 
7
25
  ### Dependency Updates
package/dist/index.cjs.js CHANGED
@@ -138,45 +138,56 @@ const BackgroundFilters = (props) => {
138
138
  return;
139
139
  const register = (unregister.current || Promise.resolve()).then(() => call.camera.registerFilter(async (ms) => {
140
140
  return new Promise((resolve) => {
141
- setMediaStream(ms);
142
141
  signalFilterReadyRef.current = resolve;
142
+ setMediaStream(ms);
143
143
  });
144
144
  }));
145
145
  return () => {
146
146
  unregister.current = register
147
147
  .then((unregisterFilter) => unregisterFilter())
148
+ .then(() => (signalFilterReadyRef.current = undefined))
148
149
  .then(() => setMediaStream(undefined))
149
150
  .catch((err) => console.error('Failed to unregister filter', err));
150
151
  };
151
152
  }, [backgroundFilter, call]);
153
+ const [isPlaying, setIsPlaying] = react.useState(false);
152
154
  react.useEffect(() => {
153
- if (!mediaStream || !videoRef || !canvasRef)
155
+ if (!mediaStream || !videoRef)
154
156
  return;
155
157
  const handleOnPlay = () => {
156
158
  const [track] = mediaStream.getVideoTracks();
157
- if (track) {
158
- const { width: w = 0, height: h = 0 } = track.getSettings();
159
- setWidth(w);
160
- setHeight(h);
161
- }
162
- const resolveFilter = signalFilterReadyRef.current;
163
- if (!resolveFilter)
159
+ if (!track)
164
160
  return;
165
- const filter = canvasRef.captureStream();
166
- resolveFilter(filter);
161
+ const { width: w = 0, height: h = 0 } = track.getSettings();
162
+ setWidth(w);
163
+ setHeight(h);
164
+ setIsPlaying(true);
167
165
  };
168
166
  videoRef.addEventListener('play', handleOnPlay);
169
167
  videoRef.srcObject = mediaStream;
170
- videoRef.play().catch((err) => console.error('Failed to play video', err));
168
+ videoRef.play().catch((err) => {
169
+ console.error('Failed to play video', err);
170
+ });
171
171
  return () => {
172
172
  videoRef.removeEventListener('play', handleOnPlay);
173
173
  videoRef.srcObject = null;
174
+ setIsPlaying(false);
175
+ };
176
+ }, [mediaStream, videoRef]);
177
+ react.useEffect(() => {
178
+ const resolveFilter = signalFilterReadyRef.current;
179
+ if (!canvasRef || !resolveFilter)
180
+ return;
181
+ const filter = canvasRef.captureStream();
182
+ resolveFilter(filter);
183
+ return () => {
184
+ videoClient.disposeOfMediaStream(filter);
174
185
  };
175
- }, [canvasRef, mediaStream, videoRef]);
186
+ }, [canvasRef]);
176
187
  return (jsxRuntime.jsxs("div", { className: "str-video__background-filters", style: {
177
188
  width: `${width}px`,
178
189
  height: `${height}px`,
179
- }, children: [mediaStream && (jsxRuntime.jsx(RenderPipeline, { tfLite: tfLite, videoRef: videoRef, canvasRef: canvasRef, backgroundImageRef: bgImageRef })), jsxRuntime.jsx("video", { className: clsx('str-video__background-filters__video', height > width && 'str-video__background-filters__video--tall'), ref: setVideoRef, autoPlay: true, playsInline: true, controls: false, width: width, height: height, muted: true, loop: true }), backgroundImage && (jsxRuntime.jsx("img", { className: "str-video__background-filters__background-image", alt: "Background", ref: setBgImageRef, src: backgroundImage, width: width, height: height }, backgroundImage)), jsxRuntime.jsx("canvas", { className: "str-video__background-filters__target-canvas", width: width, height: height, ref: setCanvasRef }, `key-${width}${height}`)] }));
190
+ }, children: [mediaStream && isPlaying && (jsxRuntime.jsx(RenderPipeline, { tfLite: tfLite, videoRef: videoRef, canvasRef: canvasRef, backgroundImageRef: bgImageRef })), jsxRuntime.jsx("video", { className: clsx('str-video__background-filters__video', height > width && 'str-video__background-filters__video--tall'), ref: setVideoRef, autoPlay: true, playsInline: true, controls: false, width: width, height: height, muted: true, loop: true }), backgroundImage && (jsxRuntime.jsx("img", { className: "str-video__background-filters__background-image", alt: "Background", ref: setBgImageRef, src: backgroundImage, width: width, height: height }, backgroundImage)), isPlaying && (jsxRuntime.jsx("canvas", { className: "str-video__background-filters__target-canvas", width: width, height: height, ref: setCanvasRef }))] }));
180
191
  };
181
192
  const RenderPipeline = (props) => {
182
193
  const { tfLite, videoRef, canvasRef, backgroundImageRef } = props;
@@ -296,6 +307,8 @@ const useApplyDevicePreferences = (key) => {
296
307
  react.useEffect(() => {
297
308
  if (!call || !settings)
298
309
  return;
310
+ if (call.state.callingState === videoClient.CallingState.LEFT)
311
+ return;
299
312
  const apply = async () => {
300
313
  const initMic = async (setting) => {
301
314
  await call.microphone.select(setting.selectedDeviceId);
@@ -2497,7 +2510,7 @@ const VerticalScrollButtons = ({ scrollWrapper, }) => {
2497
2510
  };
2498
2511
  const hasScreenShare = (p) => !!p?.publishedTracks.includes(videoClient.SfuModels.TrackType.SCREEN_SHARE);
2499
2512
 
2500
- const [major, minor, patch] = ("0.6.15" ).split('.');
2513
+ const [major, minor, patch] = ("0.6.17" ).split('.');
2501
2514
  videoClient.setSdkInfo({
2502
2515
  type: videoClient.SfuModels.SdkType.REACT,
2503
2516
  major,