@stream-io/video-react-native-sdk 1.30.4 → 1.31.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/CHANGELOG.md +18 -0
  2. package/android/src/main/java/com/streamvideo/reactnative/StreamVideoReactNativeModule.kt +85 -0
  3. package/android/src/main/java/com/streamvideo/reactnative/screenshare/ScreenAudioCapture.kt +111 -0
  4. package/dist/commonjs/components/Call/CallControls/ScreenShareToggleButton.js +3 -2
  5. package/dist/commonjs/components/Call/CallControls/ScreenShareToggleButton.js.map +1 -1
  6. package/dist/commonjs/hooks/index.js +11 -0
  7. package/dist/commonjs/hooks/index.js.map +1 -1
  8. package/dist/commonjs/hooks/useScreenShareAudioMixing.js +126 -0
  9. package/dist/commonjs/hooks/useScreenShareAudioMixing.js.map +1 -0
  10. package/dist/commonjs/hooks/useScreenShareButton.js +57 -3
  11. package/dist/commonjs/hooks/useScreenShareButton.js.map +1 -1
  12. package/dist/commonjs/modules/ScreenShareAudioManager.js +54 -0
  13. package/dist/commonjs/modules/ScreenShareAudioManager.js.map +1 -0
  14. package/dist/commonjs/providers/StreamCall/index.js +11 -1
  15. package/dist/commonjs/providers/StreamCall/index.js.map +1 -1
  16. package/dist/commonjs/version.js +1 -1
  17. package/dist/module/components/Call/CallControls/ScreenShareToggleButton.js +3 -2
  18. package/dist/module/components/Call/CallControls/ScreenShareToggleButton.js.map +1 -1
  19. package/dist/module/hooks/index.js +1 -0
  20. package/dist/module/hooks/index.js.map +1 -1
  21. package/dist/module/hooks/useScreenShareAudioMixing.js +119 -0
  22. package/dist/module/hooks/useScreenShareAudioMixing.js.map +1 -0
  23. package/dist/module/hooks/useScreenShareButton.js +57 -3
  24. package/dist/module/hooks/useScreenShareButton.js.map +1 -1
  25. package/dist/module/modules/ScreenShareAudioManager.js +47 -0
  26. package/dist/module/modules/ScreenShareAudioManager.js.map +1 -0
  27. package/dist/module/providers/StreamCall/index.js +11 -1
  28. package/dist/module/providers/StreamCall/index.js.map +1 -1
  29. package/dist/module/version.js +1 -1
  30. package/dist/typescript/components/Call/CallControls/ScreenShareToggleButton.d.ts +6 -1
  31. package/dist/typescript/components/Call/CallControls/ScreenShareToggleButton.d.ts.map +1 -1
  32. package/dist/typescript/hooks/index.d.ts +1 -0
  33. package/dist/typescript/hooks/index.d.ts.map +1 -1
  34. package/dist/typescript/hooks/useScreenShareAudioMixing.d.ts +14 -0
  35. package/dist/typescript/hooks/useScreenShareAudioMixing.d.ts.map +1 -0
  36. package/dist/typescript/hooks/useScreenShareButton.d.ts +39 -2
  37. package/dist/typescript/hooks/useScreenShareButton.d.ts.map +1 -1
  38. package/dist/typescript/modules/ScreenShareAudioManager.d.ts +28 -0
  39. package/dist/typescript/modules/ScreenShareAudioManager.d.ts.map +1 -0
  40. package/dist/typescript/providers/StreamCall/index.d.ts.map +1 -1
  41. package/dist/typescript/version.d.ts +1 -1
  42. package/ios/StreamVideoReactNative.m +93 -4
  43. package/package.json +7 -7
  44. package/src/components/Call/CallControls/ScreenShareToggleButton.tsx +11 -1
  45. package/src/hooks/index.ts +1 -0
  46. package/src/hooks/useScreenShareAudioMixing.ts +130 -0
  47. package/src/hooks/useScreenShareButton.ts +87 -2
  48. package/src/modules/ScreenShareAudioManager.ts +49 -0
  49. package/src/providers/StreamCall/index.tsx +11 -0
  50. package/src/version.ts +1 -1
@@ -1,8 +1,41 @@
1
1
  import React from 'react';
2
+ /**
3
+ * The type of screen sharing to use on iOS.
4
+ *
5
+ * - `'broadcast'` — Uses a Broadcast Upload Extension (RPSystemBroadcastPickerView).
6
+ * Captures the entire device screen, works across all apps. Requires an extension target.
7
+ * - `'inApp'` — Uses RPScreenRecorder.startCapture to capture the current app's screen.
8
+ * Only captures the current app. Supports `.audioApp` sample buffers for audio mixing.
9
+ *
10
+ * On Android, this option is ignored — the system screen capture dialog is always used.
11
+ */
12
+ export type ScreenShareType = 'broadcast' | 'inApp';
13
+ /**
14
+ * Options for screen share behavior.
15
+ */
16
+ export type ScreenShareOptions = {
17
+ /**
18
+ * The type of screen sharing on iOS. Default: `'broadcast'`.
19
+ * On Android this is ignored.
20
+ */
21
+ type?: ScreenShareType;
22
+ /**
23
+ * Whether to capture and mix system/app audio into the microphone audio track.
24
+ * When `true`, remote participants will hear media audio from the shared screen
25
+ * (e.g., YouTube video audio) mixed with the user's microphone.
26
+ *
27
+ * - iOS in-app: Audio captured from RPScreenRecorder `.audioApp` buffers.
28
+ * - iOS broadcast: Audio mixing is **not** currently supported.
29
+ * - Android: Audio captured via AudioPlaybackCaptureConfiguration (API 29+).
30
+ *
31
+ * Default: `false`.
32
+ */
33
+ includeAudio?: boolean;
34
+ };
2
35
  export declare const useScreenShareButton: (
3
36
  /**
4
37
  * Ref of the ScreenCapturePickerView component.
5
- *
38
+ * Required for iOS broadcast screen sharing. Can be `null` for in-app mode.
6
39
  */
7
40
  screenCapturePickerViewiOSRef: React.MutableRefObject<any>,
8
41
  /**
@@ -19,7 +52,11 @@ onScreenShareStoppedHandler?: () => void,
19
52
  * Handler to be called when the permissions to stream screen share media is missing
20
53
  *
21
54
  */
22
- onMissingScreenShareStreamPermission?: () => void) => {
55
+ onMissingScreenShareStreamPermission?: () => void,
56
+ /**
57
+ * Options for screen share behavior (type, includeAudio).
58
+ */
59
+ screenShareOptions?: ScreenShareOptions) => {
23
60
  onPress: undefined;
24
61
  hasPublishedScreenShare: boolean;
25
62
  } | {
@@ -1 +1 @@
1
- {"version":3,"file":"useScreenShareButton.d.ts","sourceRoot":"","sources":["../../../src/hooks/useScreenShareButton.ts"],"names":[],"mappings":"AAMA,OAAO,KAA4B,MAAM,OAAO,CAAC;AAWjD,eAAO,MAAM,oBAAoB;AAC/B;;;GAGG;AACH,+BAA+B,KAAK,CAAC,gBAAgB,CAAC,GAAG,CAAC;AAC1D;;;GAGG;AACH,8BAA8B,MAAM,IAAI;AACxC;;;GAGG;AACH,8BAA8B,MAAM,IAAI;AACxC;;;GAGG;AACH,uCAAuC,MAAM,IAAI;;;;;;CAuFlD,CAAC"}
1
+ {"version":3,"file":"useScreenShareButton.d.ts","sourceRoot":"","sources":["../../../src/hooks/useScreenShareButton.ts"],"names":[],"mappings":"AAMA,OAAO,KAA4B,MAAM,OAAO,CAAC;AAMjD;;;;;;;;;GASG;AACH,MAAM,MAAM,eAAe,GAAG,WAAW,GAAG,OAAO,CAAC;AAEpD;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG;IAC/B;;;OAGG;IACH,IAAI,CAAC,EAAE,eAAe,CAAC;IACvB;;;;;;;;;;OAUG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB,CAAC;AAQF,eAAO,MAAM,oBAAoB;AAC/B;;;GAGG;AACH,+BAA+B,KAAK,CAAC,gBAAgB,CAAC,GAAG,CAAC;AAC1D;;;GAGG;AACH,8BAA8B,MAAM,IAAI;AACxC;;;GAGG;AACH,8BAA8B,MAAM,IAAI;AACxC;;;GAGG;AACH,uCAAuC,MAAM,IAAI;AACjD;;GAEG;AACH,qBAAqB,kBAAkB;;;;;;CAoIxC,CAAC"}
@@ -0,0 +1,28 @@
1
+ export declare class ScreenShareAudioManager {
2
+ /**
3
+ * Starts mixing screen share audio into the microphone audio track.
4
+ * On iOS, this enables audio buffer processing on the prepared mixer.
5
+ * On Android, this registers an audio processor that captures system media
6
+ * audio via AudioPlaybackCaptureConfiguration and mixes it into the mic buffer.
7
+ */
8
+ startScreenShareAudioMixing(): Promise<void>;
9
+ /**
10
+ * Stops mixing screen share audio into the microphone audio track
11
+ * and restores the original audio pipeline.
12
+ */
13
+ stopScreenShareAudioMixing(): Promise<void>;
14
+ /**
15
+ * Starts in-app screen capture using RPScreenRecorder (iOS only).
16
+ * Unlike broadcast screen sharing, in-app capture runs in the main app process
17
+ * and can directly provide `.audioApp` sample buffers for mixing.
18
+ *
19
+ * @param includeAudio Whether to capture and mix app audio.
20
+ */
21
+ startInAppScreenCapture(includeAudio: boolean): Promise<void>;
22
+ /**
23
+ * Stops in-app screen capture (iOS only).
24
+ */
25
+ stopInAppScreenCapture(): Promise<void>;
26
+ }
27
+ export declare const screenShareAudioMixingManager: ScreenShareAudioManager;
28
+ //# sourceMappingURL=ScreenShareAudioManager.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ScreenShareAudioManager.d.ts","sourceRoot":"","sources":["../../../src/modules/ScreenShareAudioManager.ts"],"names":[],"mappings":"AAIA,qBAAa,uBAAuB;IAClC;;;;;OAKG;IACG,2BAA2B,IAAI,OAAO,CAAC,IAAI,CAAC;IAIlD;;;OAGG;IACG,0BAA0B,IAAI,OAAO,CAAC,IAAI,CAAC;IAIjD;;;;;;OAMG;IACG,uBAAuB,CAAC,YAAY,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAOnE;;OAEG;IACG,sBAAsB,IAAI,OAAO,CAAC,IAAI,CAAC;CAM9C;AAED,eAAO,MAAM,6BAA6B,yBAAgC,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/providers/StreamCall/index.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,KAAK,iBAAiB,EAAa,MAAM,OAAO,CAAC;AACjE,OAAO,EAAE,IAAI,EAAE,MAAM,yBAAyB,CAAC;AAY/C,MAAM,MAAM,eAAe,GAAG;IAC5B;;;OAGG;IACH,IAAI,EAAE,IAAI,CAAC;CACZ,CAAC;AACF;;;;;;GAMG;AACH,eAAO,MAAM,UAAU,GAAI,qBAGxB,iBAAiB,CAAC,eAAe,CAAC,sBAWpC,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/providers/StreamCall/index.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,KAAK,iBAAiB,EAAa,MAAM,OAAO,CAAC;AACjE,OAAO,EAAE,IAAI,EAAE,MAAM,yBAAyB,CAAC;AAa/C,MAAM,MAAM,eAAe,GAAG;IAC5B;;;OAGG;IACH,IAAI,EAAE,IAAI,CAAC;CACZ,CAAC;AACF;;;;;;GAMG;AACH,eAAO,MAAM,UAAU,GAAI,qBAGxB,iBAAiB,CAAC,eAAe,CAAC,sBAYpC,CAAC"}
@@ -1,2 +1,2 @@
1
- export declare const version = "1.30.4";
1
+ export declare const version = "1.31.0";
2
2
  //# sourceMappingURL=version.d.ts.map
@@ -7,9 +7,17 @@
7
7
  #import "StreamVideoReactNative.h"
8
8
  #import "WebRTCModule.h"
9
9
  #import "WebRTCModuleOptions.h"
10
+ #import "InAppScreenCapturer.h"
10
11
  #import <AVFoundation/AVFoundation.h>
11
12
  #import <AudioToolbox/AudioToolbox.h>
12
13
 
14
+ // Import Swift-generated header for ScreenShareAudioMixer
15
+ #if __has_include(<stream_react_native_webrtc/stream_react_native_webrtc-Swift.h>)
16
+ #import <stream_react_native_webrtc/stream_react_native_webrtc-Swift.h>
17
+ #elif __has_include("stream_react_native_webrtc-Swift.h")
18
+ #import "stream_react_native_webrtc-Swift.h"
19
+ #endif
20
+
13
21
  // Do not change these consts, it is what is used react-native-webrtc
14
22
  NSNotificationName const kBroadcastStartedNotification = @"iOS_BroadcastStarted";
15
23
  NSNotificationName const kBroadcastStoppedNotification = @"iOS_BroadcastStopped";
@@ -626,22 +634,22 @@ RCT_EXPORT_METHOD(stopBusyTone:(RCTPromiseResolveBlock)resolve rejecter:(RCTProm
626
634
 
627
635
  - (void)audioSessionInterrupted:(NSNotification *)notification {
628
636
  AVAudioSessionInterruptionType interruptionType = [notification.userInfo[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
629
-
637
+
630
638
  switch (interruptionType) {
631
639
  case AVAudioSessionInterruptionTypeBegan:
632
640
  if (_busyTonePlayer && _busyTonePlayer.isPlaying) {
633
641
  [_busyTonePlayer pause];
634
642
  }
635
643
  break;
636
-
644
+
637
645
  case AVAudioSessionInterruptionTypeEnded: {
638
646
  AVAudioSessionInterruptionOptions options = [notification.userInfo[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
639
-
647
+
640
648
  if (options & AVAudioSessionInterruptionOptionShouldResume) {
641
649
  // Reactivate audio session
642
650
  NSError *error = nil;
643
651
  [[AVAudioSession sharedInstance] setActive:YES error:&error];
644
-
652
+
645
653
  if (!error && _busyTonePlayer) {
646
654
  [_busyTonePlayer play];
647
655
  } else if (error) {
@@ -653,4 +661,85 @@ RCT_EXPORT_METHOD(stopBusyTone:(RCTPromiseResolveBlock)resolve rejecter:(RCTProm
653
661
  }
654
662
  }
655
663
 
664
+ #pragma mark - In-App Screen Capture
665
+
666
+ RCT_EXPORT_METHOD(startInAppScreenCapture:(BOOL)includeAudio
667
+ resolve:(RCTPromiseResolveBlock)resolve
668
+ reject:(RCTPromiseRejectBlock)reject)
669
+ {
670
+ WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
671
+ options.useInAppScreenCapture = YES;
672
+ options.includeScreenShareAudio = includeAudio;
673
+ resolve(nil);
674
+ }
675
+
676
+ RCT_EXPORT_METHOD(stopInAppScreenCapture:(RCTPromiseResolveBlock)resolve
677
+ reject:(RCTPromiseRejectBlock)reject)
678
+ {
679
+ WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
680
+ options.useInAppScreenCapture = NO;
681
+ options.includeScreenShareAudio = NO;
682
+ resolve(nil);
683
+ }
684
+
685
+ #pragma mark - Screen Share Audio Mixing
686
+
687
+ RCT_EXPORT_METHOD(startScreenShareAudioMixing:(RCTPromiseResolveBlock)resolve
688
+ reject:(RCTPromiseRejectBlock)reject)
689
+ {
690
+ WebRTCModule *webrtcModule = [self.bridge moduleForClass:[WebRTCModule class]];
691
+ WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
692
+
693
+ ScreenShareAudioMixer *mixer = webrtcModule.audioDeviceModule.screenShareAudioMixer;
694
+
695
+ // Wire mixer as capturePostProcessingDelegate on the audio processing module.
696
+ id<RTCAudioProcessingModule> apmId = options.audioProcessingModule;
697
+ if (apmId && [apmId isKindOfClass:[RTCDefaultAudioProcessingModule class]]) {
698
+ RTCDefaultAudioProcessingModule *apm = (RTCDefaultAudioProcessingModule *)apmId;
699
+ apm.capturePostProcessingDelegate = mixer;
700
+ NSLog(@"[SSAMixer] Set capturePostProcessingDelegate on APM");
701
+ } else {
702
+ NSLog(@"[SSAMixer] WARNING: No RTCDefaultAudioProcessingModule available, mixing will not work");
703
+ }
704
+
705
+ [mixer startMixing];
706
+
707
+ // Wire audio buffer handler on the active capturer → mixer.enqueue
708
+ InAppScreenCapturer *capturer = options.activeInAppScreenCapturer;
709
+ if (capturer) {
710
+ capturer.audioBufferHandler = ^(CMSampleBufferRef sampleBuffer) {
711
+ [mixer enqueue:sampleBuffer];
712
+ };
713
+ }
714
+
715
+ resolve(nil);
716
+ }
717
+
718
+ RCT_EXPORT_METHOD(stopScreenShareAudioMixing:(RCTPromiseResolveBlock)resolve
719
+ reject:(RCTPromiseRejectBlock)reject)
720
+ {
721
+ WebRTCModule *webrtcModule = [self.bridge moduleForClass:[WebRTCModule class]];
722
+ WebRTCModuleOptions *options = [WebRTCModuleOptions sharedInstance];
723
+
724
+ // Stop feeding audio to the mixer
725
+ InAppScreenCapturer *capturer = options.activeInAppScreenCapturer;
726
+ if (capturer) {
727
+ capturer.audioBufferHandler = nil;
728
+ }
729
+
730
+ // Stop mixing
731
+ ScreenShareAudioMixer *mixer = webrtcModule.audioDeviceModule.screenShareAudioMixer;
732
+ [mixer stopMixing];
733
+
734
+ // Clear capturePostProcessingDelegate
735
+ id<RTCAudioProcessingModule> apmId = options.audioProcessingModule;
736
+ if (apmId && [apmId isKindOfClass:[RTCDefaultAudioProcessingModule class]]) {
737
+ RTCDefaultAudioProcessingModule *apm = (RTCDefaultAudioProcessingModule *)apmId;
738
+ apm.capturePostProcessingDelegate = nil;
739
+ NSLog(@"[SSAMixer] Cleared capturePostProcessingDelegate on APM");
740
+ }
741
+
742
+ resolve(nil);
743
+ }
744
+
656
745
  @end
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stream-io/video-react-native-sdk",
3
- "version": "1.30.4",
3
+ "version": "1.31.0",
4
4
  "description": "Stream Video SDK for React Native",
5
5
  "author": "https://getstream.io",
6
6
  "homepage": "https://getstream.io/video/docs/react-native/",
@@ -50,8 +50,8 @@
50
50
  "!**/.*"
51
51
  ],
52
52
  "dependencies": {
53
- "@stream-io/video-client": "1.44.4",
54
- "@stream-io/video-react-bindings": "1.13.13",
53
+ "@stream-io/video-client": "1.44.5",
54
+ "@stream-io/video-react-bindings": "1.13.14",
55
55
  "intl-pluralrules": "2.0.1",
56
56
  "react-native-url-polyfill": "^3.0.0",
57
57
  "rxjs": "~7.8.2",
@@ -64,7 +64,7 @@
64
64
  "@react-native-firebase/app": ">=17.5.0",
65
65
  "@react-native-firebase/messaging": ">=17.5.0",
66
66
  "@stream-io/noise-cancellation-react-native": ">=0.1.0",
67
- "@stream-io/react-native-webrtc": ">=137.1.0",
67
+ "@stream-io/react-native-webrtc": ">=137.1.3",
68
68
  "@stream-io/video-filters-react-native": ">=0.1.0",
69
69
  "expo": ">=47.0.0",
70
70
  "expo-build-properties": "*",
@@ -129,9 +129,9 @@
129
129
  "@react-native-firebase/app": "^23.4.0",
130
130
  "@react-native-firebase/messaging": "^23.4.0",
131
131
  "@react-native/babel-preset": "^0.81.5",
132
- "@stream-io/noise-cancellation-react-native": "^0.5.1",
133
- "@stream-io/react-native-webrtc": "137.1.0",
134
- "@stream-io/video-filters-react-native": "^0.10.1",
132
+ "@stream-io/noise-cancellation-react-native": "^0.6.0",
133
+ "@stream-io/react-native-webrtc": "137.1.3",
134
+ "@stream-io/video-filters-react-native": "^0.11.0",
135
135
  "@testing-library/jest-native": "^5.4.3",
136
136
  "@testing-library/react-native": "13.3.3",
137
137
  "@tsconfig/node18": "^18.2.4",
@@ -5,7 +5,10 @@ import { ScreenShare } from '../../../icons/ScreenShare';
5
5
  import { StopScreenShare } from '../../../icons/StopScreenShare';
6
6
  import { CallControlsButton } from './CallControlsButton';
7
7
  import { useTheme } from '../../../contexts/ThemeContext';
8
- import { useScreenShareButton } from '../../../hooks/useScreenShareButton';
8
+ import {
9
+ useScreenShareButton,
10
+ type ScreenShareOptions,
11
+ } from '../../../hooks/useScreenShareButton';
9
12
  import { IconWrapper } from '../../../icons';
10
13
 
11
14
  /**
@@ -22,6 +25,10 @@ export type ScreenShareToggleButtonProps = {
22
25
  *
23
26
  */
24
27
  onScreenShareStoppedHandler?: () => void;
28
+ /**
29
+ * Options for screen share behavior (type, includeAudio).
30
+ */
31
+ screenShareOptions?: ScreenShareOptions;
25
32
  };
26
33
 
27
34
  /**
@@ -31,6 +38,7 @@ export type ScreenShareToggleButtonProps = {
31
38
  export const ScreenShareToggleButton = ({
32
39
  onScreenShareStartedHandler,
33
40
  onScreenShareStoppedHandler,
41
+ screenShareOptions,
34
42
  }: ScreenShareToggleButtonProps) => {
35
43
  const {
36
44
  theme: { colors, screenShareToggleButton, variants },
@@ -42,6 +50,8 @@ export const ScreenShareToggleButton = ({
42
50
  screenCapturePickerViewiOSRef,
43
51
  onScreenShareStartedHandler,
44
52
  onScreenShareStoppedHandler,
53
+ undefined,
54
+ screenShareOptions,
45
55
  );
46
56
 
47
57
  if (!onPress) return null;
@@ -6,6 +6,7 @@ export * from './useIsIosScreenshareBroadcastStarted';
6
6
  export * from './useIsInPiPMode';
7
7
  export * from './useAutoEnterPiPEffect';
8
8
  export * from './useScreenShareButton';
9
+ export * from './useScreenShareAudioMixing';
9
10
  export * from './useTrackDimensions';
10
11
  export * from './useScreenshot';
11
12
  export * from './useSpeechDetection';
@@ -0,0 +1,130 @@
1
+ import { useCallback, useEffect, useRef, useState } from 'react';
2
+ import { hasScreenShare, videoLoggerSystem } from '@stream-io/video-client';
3
+ import { useCall, useCallStateHooks } from '@stream-io/video-react-bindings';
4
+ import { screenShareAudioMixingManager } from '../modules/ScreenShareAudioManager';
5
+ import { NoiseCancellationWrapper } from '../providers/NoiseCancellation/lib';
6
+
7
+ const logger = videoLoggerSystem.getLogger('useScreenShareAudioMixing');
8
+
9
+ /**
10
+ * Tries to disable noise cancellation so screen audio passes through
11
+ * unfiltered. Returns true if NC was disabled (and should be re-enabled later).
12
+ */
13
+ async function disableNoiseCancellation(): Promise<boolean> {
14
+ try {
15
+ const nc = NoiseCancellationWrapper.getInstance();
16
+ const wasEnabled = await nc.isEnabled();
17
+ if (wasEnabled) {
18
+ await nc.disable();
19
+ logger.info('Noise cancellation disabled for screen share audio');
20
+ }
21
+ return wasEnabled;
22
+ } catch {
23
+ // NC module not installed or not configured — nothing to do
24
+ return false;
25
+ }
26
+ }
27
+
28
+ /**
29
+ * Re-enables noise cancellation if it was previously disabled.
30
+ */
31
+ async function restoreNoiseCancellation() {
32
+ try {
33
+ const nc = NoiseCancellationWrapper.getInstance();
34
+ await nc.enable();
35
+ logger.info('Noise cancellation re-enabled after screen share audio');
36
+ } catch {
37
+ // NC module not installed — nothing to do
38
+ }
39
+ }
40
+
41
+ /**
42
+ * Hook that manages the lifecycle of screen share audio mixing.
43
+ *
44
+ * When screen share is active and audio mixing is enabled
45
+ * (via `call.screenShare.enableScreenShareAudio()`), this hook
46
+ * calls the native module to mix captured screen/app audio
47
+ * into the microphone audio track.
48
+ *
49
+ * Noise cancellation is temporarily disabled while screen audio mixing
50
+ * is active so that all captured sounds (music, game audio, etc.)
51
+ * pass through without being filtered.
52
+ */
53
+ export const useScreenShareAudioMixing = () => {
54
+ const call = useCall();
55
+ const { useLocalParticipant } = useCallStateHooks();
56
+ const localParticipant = useLocalParticipant();
57
+ const isScreenSharing =
58
+ localParticipant != null && hasScreenShare(localParticipant);
59
+
60
+ const [audioEnabled, setAudioEnabled] = useState(
61
+ () => call?.screenShare.state.audioEnabled ?? false,
62
+ );
63
+
64
+ const isMixingActiveRef = useRef(false);
65
+ const ncWasEnabledRef = useRef(false);
66
+
67
+ useEffect(() => {
68
+ if (!call) return;
69
+ const sub = call.screenShare.state.audioEnabled$.subscribe(setAudioEnabled);
70
+ return () => sub.unsubscribe();
71
+ }, [call]);
72
+
73
+ const startMixing = useCallback(async () => {
74
+ if (isMixingActiveRef.current) return;
75
+ try {
76
+ // Disable NC before starting mixing so screen audio is not filtered
77
+ ncWasEnabledRef.current = await disableNoiseCancellation();
78
+
79
+ logger.info('Starting screen share audio mixing');
80
+ await screenShareAudioMixingManager.startScreenShareAudioMixing();
81
+ isMixingActiveRef.current = true;
82
+ } catch (error) {
83
+ logger.warn('Failed to start screen share audio mixing', error);
84
+ if (ncWasEnabledRef.current) {
85
+ restoreNoiseCancellation().catch(() => {});
86
+ ncWasEnabledRef.current = false;
87
+ }
88
+ }
89
+ }, []);
90
+
91
+ const stopMixing = useCallback(async () => {
92
+ if (!isMixingActiveRef.current) return;
93
+ try {
94
+ logger.info('Stopping screen share audio mixing');
95
+ await screenShareAudioMixingManager.stopScreenShareAudioMixing();
96
+ isMixingActiveRef.current = false;
97
+
98
+ if (ncWasEnabledRef.current) {
99
+ await restoreNoiseCancellation();
100
+ ncWasEnabledRef.current = false;
101
+ }
102
+ } catch (error) {
103
+ logger.warn('Failed to stop screen share audio mixing', error);
104
+ }
105
+ }, []);
106
+
107
+ // Start/stop audio mixing based on screen share status and audio preference
108
+ useEffect(() => {
109
+ if (isScreenSharing && audioEnabled) {
110
+ startMixing();
111
+ } else {
112
+ stopMixing();
113
+ }
114
+ }, [isScreenSharing, audioEnabled, startMixing, stopMixing]);
115
+
116
+ useEffect(() => {
117
+ return () => {
118
+ if (isMixingActiveRef.current) {
119
+ screenShareAudioMixingManager
120
+ .stopScreenShareAudioMixing()
121
+ .catch(() => {});
122
+ isMixingActiveRef.current = false;
123
+ if (ncWasEnabledRef.current) {
124
+ restoreNoiseCancellation().catch(() => {});
125
+ ncWasEnabledRef.current = false;
126
+ }
127
+ }
128
+ };
129
+ }, []);
130
+ };
@@ -8,6 +8,42 @@ import React, { useEffect, useRef } from 'react';
8
8
  import { findNodeHandle, NativeModules, Platform } from 'react-native';
9
9
  import { usePrevious } from '../utils/hooks';
10
10
  import { useIsIosScreenshareBroadcastStarted } from './useIsIosScreenshareBroadcastStarted';
11
+ import { screenShareAudioMixingManager } from '../modules/ScreenShareAudioManager';
12
+
13
+ /**
14
+ * The type of screen sharing to use on iOS.
15
+ *
16
+ * - `'broadcast'` — Uses a Broadcast Upload Extension (RPSystemBroadcastPickerView).
17
+ * Captures the entire device screen, works across all apps. Requires an extension target.
18
+ * - `'inApp'` — Uses RPScreenRecorder.startCapture to capture the current app's screen.
19
+ * Only captures the current app. Supports `.audioApp` sample buffers for audio mixing.
20
+ *
21
+ * On Android, this option is ignored — the system screen capture dialog is always used.
22
+ */
23
+ export type ScreenShareType = 'broadcast' | 'inApp';
24
+
25
+ /**
26
+ * Options for screen share behavior.
27
+ */
28
+ export type ScreenShareOptions = {
29
+ /**
30
+ * The type of screen sharing on iOS. Default: `'broadcast'`.
31
+ * On Android this is ignored.
32
+ */
33
+ type?: ScreenShareType;
34
+ /**
35
+ * Whether to capture and mix system/app audio into the microphone audio track.
36
+ * When `true`, remote participants will hear media audio from the shared screen
37
+ * (e.g., YouTube video audio) mixed with the user's microphone.
38
+ *
39
+ * - iOS in-app: Audio captured from RPScreenRecorder `.audioApp` buffers.
40
+ * - iOS broadcast: Audio mixing is **not** currently supported.
41
+ * - Android: Audio captured via AudioPlaybackCaptureConfiguration (API 29+).
42
+ *
43
+ * Default: `false`.
44
+ */
45
+ includeAudio?: boolean;
46
+ };
11
47
 
12
48
  // ios >= 14.0 or android - platform restrictions
13
49
  const CanDeviceScreenShare =
@@ -18,7 +54,7 @@ const CanDeviceScreenShare =
18
54
  export const useScreenShareButton = (
19
55
  /**
20
56
  * Ref of the ScreenCapturePickerView component.
21
- *
57
+ * Required for iOS broadcast screen sharing. Can be `null` for in-app mode.
22
58
  */
23
59
  screenCapturePickerViewiOSRef: React.MutableRefObject<any>,
24
60
  /**
@@ -36,6 +72,10 @@ export const useScreenShareButton = (
36
72
  *
37
73
  */
38
74
  onMissingScreenShareStreamPermission?: () => void,
75
+ /**
76
+ * Options for screen share behavior (type, includeAudio).
77
+ */
78
+ screenShareOptions?: ScreenShareOptions,
39
79
  ) => {
40
80
  const call = useCall();
41
81
  const { useLocalParticipant, useCallSettings, useOwnCapabilities } =
@@ -47,6 +87,9 @@ export const useScreenShareButton = (
47
87
  );
48
88
  const isScreenSharingEnabledInCall = callSettings?.screensharing.enabled;
49
89
 
90
+ const screenShareType = screenShareOptions?.type ?? 'broadcast';
91
+ const includeAudio = screenShareOptions?.includeAudio ?? false;
92
+
50
93
  const onScreenShareStartedHandlerRef = useRef(onScreenShareStartedHandler);
51
94
  onScreenShareStartedHandlerRef.current = onScreenShareStartedHandler;
52
95
  const onScreenShareStoppedHandlerRef = useRef(onScreenShareStoppedHandler);
@@ -62,15 +105,22 @@ export const useScreenShareButton = (
62
105
  localParticipant && hasScreenShare(localParticipant);
63
106
 
64
107
  // listens to iOS screen share broadcast started event from the system
108
+ // (only relevant for broadcast mode)
65
109
  useEffect(() => {
66
110
  if (Platform.OS !== 'ios') {
67
111
  return;
68
112
  }
113
+ if (screenShareType !== 'broadcast') {
114
+ return;
115
+ }
69
116
  if (
70
117
  iosScreenShareStartedFromSystem &&
71
118
  !prevIosScreenShareStartedFromSystem
72
119
  ) {
73
120
  onScreenShareStartedHandlerRef.current?.();
121
+ if (includeAudio) {
122
+ call?.screenShare.enableScreenShareAudio();
123
+ }
74
124
  call?.screenShare.enable();
75
125
  } else if (
76
126
  !iosScreenShareStartedFromSystem &&
@@ -81,6 +131,8 @@ export const useScreenShareButton = (
81
131
  }
82
132
  }, [
83
133
  call,
134
+ includeAudio,
135
+ screenShareType,
84
136
  iosScreenShareStartedFromSystem,
85
137
  prevIosScreenShareStartedFromSystem,
86
138
  ]);
@@ -92,14 +144,43 @@ export const useScreenShareButton = (
92
144
  'User does not have permissions to stream the screen share media, calling onMissingScreenShareStreamPermission handler if present',
93
145
  );
94
146
  onMissingScreenShareStreamPermission?.();
147
+ return;
95
148
  }
149
+
96
150
  if (!hasPublishedScreenShare) {
97
- if (Platform.OS === 'ios') {
151
+ // Set audio mixing preference before starting screen share
152
+ if (includeAudio) {
153
+ call?.screenShare.enableScreenShareAudio();
154
+ } else {
155
+ try {
156
+ await call?.screenShare.disableScreenShareAudio();
157
+ } catch (error) {
158
+ const logger = videoLoggerSystem.getLogger('useScreenShareButton');
159
+ logger.warn('Failed to disable screen share audio', error);
160
+ }
161
+ }
162
+
163
+ if (Platform.OS === 'ios' && screenShareType === 'inApp') {
164
+ // In-app screen sharing on iOS — uses RPScreenRecorder directly
165
+ try {
166
+ await screenShareAudioMixingManager.startInAppScreenCapture(
167
+ includeAudio,
168
+ );
169
+ await call?.screenShare.enable();
170
+ onScreenShareStartedHandler?.();
171
+ } catch (error) {
172
+ await screenShareAudioMixingManager.stopInAppScreenCapture();
173
+ const logger = videoLoggerSystem.getLogger('useScreenShareButton');
174
+ logger.warn('Failed to start in-app screen capture', error);
175
+ }
176
+ } else if (Platform.OS === 'ios') {
177
+ // Broadcast screen sharing on iOS — shows the system picker
98
178
  const reactTag = findNodeHandle(screenCapturePickerViewiOSRef.current);
99
179
  await NativeModules.ScreenCapturePickerViewManager.show(reactTag);
100
180
  // After this the iOS screen share broadcast started/stopped event will be triggered
101
181
  // and the useEffect listener will handle the rest
102
182
  } else {
183
+ // Android screen sharing
103
184
  try {
104
185
  await call?.screenShare.enable();
105
186
  onScreenShareStartedHandler?.();
@@ -114,6 +195,10 @@ export const useScreenShareButton = (
114
195
  }
115
196
  } else if (hasPublishedScreenShare) {
116
197
  onScreenShareStoppedHandler?.();
198
+ // Stop in-app screen capture if it was active (iOS only)
199
+ if (Platform.OS === 'ios' && screenShareType === 'inApp') {
200
+ await screenShareAudioMixingManager.stopInAppScreenCapture();
201
+ }
117
202
  await call?.screenShare.disable(true);
118
203
  }
119
204
  };
@@ -0,0 +1,49 @@
1
+ import { NativeModules, Platform } from 'react-native';
2
+
3
+ const StreamVideoReactNative = NativeModules.StreamVideoReactNative;
4
+
5
+ export class ScreenShareAudioManager {
6
+ /**
7
+ * Starts mixing screen share audio into the microphone audio track.
8
+ * On iOS, this enables audio buffer processing on the prepared mixer.
9
+ * On Android, this registers an audio processor that captures system media
10
+ * audio via AudioPlaybackCaptureConfiguration and mixes it into the mic buffer.
11
+ */
12
+ async startScreenShareAudioMixing(): Promise<void> {
13
+ return StreamVideoReactNative?.startScreenShareAudioMixing();
14
+ }
15
+
16
+ /**
17
+ * Stops mixing screen share audio into the microphone audio track
18
+ * and restores the original audio pipeline.
19
+ */
20
+ async stopScreenShareAudioMixing(): Promise<void> {
21
+ return StreamVideoReactNative?.stopScreenShareAudioMixing();
22
+ }
23
+
24
+ /**
25
+ * Starts in-app screen capture using RPScreenRecorder (iOS only).
26
+ * Unlike broadcast screen sharing, in-app capture runs in the main app process
27
+ * and can directly provide `.audioApp` sample buffers for mixing.
28
+ *
29
+ * @param includeAudio Whether to capture and mix app audio.
30
+ */
31
+ async startInAppScreenCapture(includeAudio: boolean): Promise<void> {
32
+ if (Platform.OS !== 'ios') {
33
+ return;
34
+ }
35
+ return StreamVideoReactNative?.startInAppScreenCapture(includeAudio);
36
+ }
37
+
38
+ /**
39
+ * Stops in-app screen capture (iOS only).
40
+ */
41
+ async stopInAppScreenCapture(): Promise<void> {
42
+ if (Platform.OS !== 'ios') {
43
+ return;
44
+ }
45
+ return StreamVideoReactNative?.stopInAppScreenCapture();
46
+ }
47
+ }
48
+
49
+ export const screenShareAudioMixingManager = new ScreenShareAudioManager();