@stream-io/video-react-native-sdk 1.22.1-alpha.0 → 1.22.1-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/android/src/main/java/com/streamvideo/reactnative/StreamVideoReactNativeModule.kt +4 -4
- package/dist/commonjs/version.js +1 -1
- package/dist/module/version.js +1 -1
- package/dist/typescript/version.d.ts +1 -1
- package/ios/StreamInCallManager.swift +61 -48
- package/package.json +3 -3
- package/src/version.ts +1 -1
|
@@ -429,15 +429,15 @@ class StreamVideoReactNativeModule(reactContext: ReactApplicationContext) :
|
|
|
429
429
|
busyToneJob?.cancel()
|
|
430
430
|
busyToneJob = null
|
|
431
431
|
|
|
432
|
-
busyToneAudioTrack?.
|
|
432
|
+
busyToneAudioTrack?.apply {
|
|
433
433
|
try {
|
|
434
|
-
if (
|
|
435
|
-
|
|
434
|
+
if (playState == AudioTrack.PLAYSTATE_PLAYING) {
|
|
435
|
+
stop()
|
|
436
436
|
}
|
|
437
437
|
} catch (e: Exception) {
|
|
438
438
|
Log.e(NAME, "Error stopping AudioTrack: ${e.message}")
|
|
439
439
|
} finally {
|
|
440
|
-
|
|
440
|
+
release()
|
|
441
441
|
}
|
|
442
442
|
}
|
|
443
443
|
busyToneAudioTrack = null
|
package/dist/commonjs/version.js
CHANGED
package/dist/module/version.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export const version = '1.22.1-alpha.
|
|
1
|
+
export const version = '1.22.1-alpha.2';
|
|
2
2
|
//# sourceMappingURL=version.js.map
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export declare const version = "1.22.1-alpha.
|
|
1
|
+
export declare const version = "1.22.1-alpha.2";
|
|
2
2
|
//# sourceMappingURL=version.d.ts.map
|
|
@@ -19,6 +19,8 @@ enum DefaultAudioDevice {
|
|
|
19
19
|
@objc(StreamInCallManager)
|
|
20
20
|
class StreamInCallManager: RCTEventEmitter {
|
|
21
21
|
|
|
22
|
+
private let audioSessionQueue = DispatchQueue(label: "io.getstream.rn.audioSessionQueue", qos: .userInitiated)
|
|
23
|
+
|
|
22
24
|
private var audioManagerActivated = false
|
|
23
25
|
private var callAudioRole: CallAudioRole = .communicator
|
|
24
26
|
private var defaultAudioDevice: DefaultAudioDevice = .speaker
|
|
@@ -44,55 +46,60 @@ class StreamInCallManager: RCTEventEmitter {
|
|
|
44
46
|
|
|
45
47
|
@objc(setAudioRole:)
|
|
46
48
|
func setAudioRole(audioRole: String) {
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
audioSessionQueue.async { [self] in
|
|
50
|
+
if audioManagerActivated {
|
|
51
|
+
log("AudioManager is already activated, audio role cannot be changed.")
|
|
52
|
+
return
|
|
53
|
+
}
|
|
54
|
+
self.callAudioRole = audioRole.lowercased() == "listener" ? .listener : .communicator
|
|
50
55
|
}
|
|
51
|
-
self.callAudioRole = audioRole.lowercased() == "listener" ? .listener : .communicator
|
|
52
|
-
|
|
53
56
|
}
|
|
54
57
|
|
|
55
58
|
@objc(setDefaultAudioDeviceEndpointType:)
|
|
56
59
|
func setDefaultAudioDeviceEndpointType(endpointType: String) {
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
+
audioSessionQueue.async { [self] in
|
|
61
|
+
if audioManagerActivated {
|
|
62
|
+
log("AudioManager is already activated, default audio device cannot be changed.")
|
|
63
|
+
return
|
|
64
|
+
}
|
|
65
|
+
self.defaultAudioDevice = endpointType.lowercased() == "earpiece" ? .earpiece : .speaker
|
|
60
66
|
}
|
|
61
|
-
self.defaultAudioDevice = endpointType.lowercased() == "earpiece" ? .earpiece : .speaker
|
|
62
|
-
|
|
63
67
|
}
|
|
64
68
|
|
|
65
69
|
@objc
|
|
66
70
|
func start() {
|
|
67
|
-
|
|
68
|
-
|
|
71
|
+
audioSessionQueue.async { [self] in
|
|
72
|
+
if audioManagerActivated {
|
|
73
|
+
return
|
|
74
|
+
}
|
|
75
|
+
let session = AVAudioSession.sharedInstance()
|
|
76
|
+
previousAudioSessionState = AudioSessionState(
|
|
77
|
+
category: session.category,
|
|
78
|
+
mode: session.mode,
|
|
79
|
+
options: session.categoryOptions
|
|
80
|
+
)
|
|
81
|
+
configureAudioSession()
|
|
82
|
+
audioManagerActivated = true
|
|
69
83
|
}
|
|
70
|
-
let session = AVAudioSession.sharedInstance()
|
|
71
|
-
previousAudioSessionState = AudioSessionState(
|
|
72
|
-
category: session.category,
|
|
73
|
-
mode: session.mode,
|
|
74
|
-
options: session.categoryOptions
|
|
75
|
-
)
|
|
76
|
-
configureAudioSession()
|
|
77
|
-
audioManagerActivated = true
|
|
78
84
|
}
|
|
79
85
|
|
|
80
86
|
@objc
|
|
81
87
|
func stop() {
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
let
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
88
|
+
audioSessionQueue.async { [self] in
|
|
89
|
+
if !audioManagerActivated {
|
|
90
|
+
return
|
|
91
|
+
}
|
|
92
|
+
if let prev = previousAudioSessionState {
|
|
93
|
+
let session = AVAudioSession.sharedInstance()
|
|
94
|
+
do {
|
|
95
|
+
try session.setCategory(prev.category, mode: prev.mode, options: prev.options)
|
|
96
|
+
} catch {
|
|
97
|
+
log("Error restoring previous audio session: \(error.localizedDescription)")
|
|
98
|
+
}
|
|
99
|
+
previousAudioSessionState = nil
|
|
91
100
|
}
|
|
92
|
-
|
|
101
|
+
audioManagerActivated = false
|
|
93
102
|
}
|
|
94
|
-
audioManagerActivated = false
|
|
95
|
-
|
|
96
103
|
}
|
|
97
104
|
|
|
98
105
|
private func configureAudioSession() {
|
|
@@ -111,13 +118,23 @@ class StreamInCallManager: RCTEventEmitter {
|
|
|
111
118
|
|
|
112
119
|
if (defaultAudioDevice == .speaker) {
|
|
113
120
|
// defaultToSpeaker will route to speaker if nothing else is connected
|
|
114
|
-
intendedOptions = [.
|
|
121
|
+
intendedOptions = [.allowBluetoothHFP, .defaultToSpeaker]
|
|
115
122
|
} else {
|
|
116
123
|
// having no defaultToSpeaker makes sure audio goes to earpiece if nothing is connected
|
|
117
|
-
intendedOptions = [.
|
|
124
|
+
intendedOptions = [.allowBluetoothHFP]
|
|
118
125
|
}
|
|
119
126
|
}
|
|
120
127
|
|
|
128
|
+
// STEP 1: Configure iOS native audio session FIRST (this does the pre-warming so that webrtc worker thread isnt stalled on audio unit initialisation)
|
|
129
|
+
let nativeSession = AVAudioSession.sharedInstance()
|
|
130
|
+
do {
|
|
131
|
+
try nativeSession.setCategory(intendedCategory, mode: intendedMode, options: intendedOptions)
|
|
132
|
+
try nativeSession.setActive(true)
|
|
133
|
+
log("configureAudioSession: Native AVAudioSession configured successfully")
|
|
134
|
+
} catch {
|
|
135
|
+
log("configureAudioSession: Failed to configure native session: \(error.localizedDescription)")
|
|
136
|
+
}
|
|
137
|
+
|
|
121
138
|
// START: set the config that webrtc must use when it takes control
|
|
122
139
|
let rtcConfig = RTCAudioSessionConfiguration.webRTC()
|
|
123
140
|
rtcConfig.category = intendedCategory.rawValue
|
|
@@ -126,6 +143,7 @@ class StreamInCallManager: RCTEventEmitter {
|
|
|
126
143
|
RTCAudioSessionConfiguration.setWebRTC(rtcConfig)
|
|
127
144
|
// END
|
|
128
145
|
|
|
146
|
+
|
|
129
147
|
// START: compare current audio session with intended, and update if different
|
|
130
148
|
let session = RTCAudioSession.sharedInstance()
|
|
131
149
|
let currentCategory = session.category
|
|
@@ -135,9 +153,7 @@ class StreamInCallManager: RCTEventEmitter {
|
|
|
135
153
|
|
|
136
154
|
if currentCategory != intendedCategory.rawValue || currentMode != intendedMode.rawValue || currentOptions != intendedOptions || !currentIsActive {
|
|
137
155
|
session.lockForConfiguration()
|
|
138
|
-
defer {
|
|
139
|
-
session.unlockForConfiguration()
|
|
140
|
-
}
|
|
156
|
+
defer { session.unlockForConfiguration() }
|
|
141
157
|
do {
|
|
142
158
|
try session.setCategory(intendedCategory, mode: intendedMode, options: intendedOptions)
|
|
143
159
|
try session.setActive(true)
|
|
@@ -175,12 +191,14 @@ class StreamInCallManager: RCTEventEmitter {
|
|
|
175
191
|
|
|
176
192
|
@objc(setForceSpeakerphoneOn:)
|
|
177
193
|
func setForceSpeakerphoneOn(enable: Bool) {
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
194
|
+
audioSessionQueue.async {
|
|
195
|
+
let session = AVAudioSession.sharedInstance()
|
|
196
|
+
do {
|
|
197
|
+
try session.overrideOutputAudioPort(enable ? .speaker : .none)
|
|
198
|
+
try session.setActive(true)
|
|
199
|
+
} catch {
|
|
200
|
+
self.log("Error setting speakerphone: \(error)")
|
|
201
|
+
}
|
|
184
202
|
}
|
|
185
203
|
}
|
|
186
204
|
|
|
@@ -290,11 +308,6 @@ class StreamInCallManager: RCTEventEmitter {
|
|
|
290
308
|
}
|
|
291
309
|
}
|
|
292
310
|
|
|
293
|
-
@objc
|
|
294
|
-
func methodQueue() -> DispatchQueue {
|
|
295
|
-
return DispatchQueue(label: "io.getstream.rn.audioSessionQueue", qos: .userInitiated)
|
|
296
|
-
}
|
|
297
|
-
|
|
298
311
|
// MARK: - Logging Helper
|
|
299
312
|
private func log(_ message: String) {
|
|
300
313
|
NSLog("InCallManager: %@", message)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@stream-io/video-react-native-sdk",
|
|
3
|
-
"version": "1.22.1-alpha.
|
|
3
|
+
"version": "1.22.1-alpha.2",
|
|
4
4
|
"description": "Stream Video SDK for React Native",
|
|
5
5
|
"author": "https://getstream.io",
|
|
6
6
|
"homepage": "https://getstream.io/video/docs/react-native/",
|
|
@@ -45,8 +45,8 @@
|
|
|
45
45
|
"!**/.*"
|
|
46
46
|
],
|
|
47
47
|
"dependencies": {
|
|
48
|
-
"@stream-io/video-client": "1.34.
|
|
49
|
-
"@stream-io/video-react-bindings": "1.10.
|
|
48
|
+
"@stream-io/video-client": "1.34.1",
|
|
49
|
+
"@stream-io/video-react-bindings": "1.10.1",
|
|
50
50
|
"intl-pluralrules": "2.0.1",
|
|
51
51
|
"lodash.merge": "^4.6.2",
|
|
52
52
|
"react-native-url-polyfill": "^3.0.0",
|
package/src/version.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export const version = '1.22.1-alpha.
|
|
1
|
+
export const version = '1.22.1-alpha.2';
|