agora-electron-sdk 4.2.2-dev.4 → 4.2.2-dev.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/CHANGELOG.md +22 -0
  2. package/js/Private/AgoraBase.js +139 -53
  3. package/js/Private/AgoraMediaBase.js +13 -7
  4. package/js/Private/IAgoraRtcEngine.js +11 -11
  5. package/js/Private/IAgoraRtcEngineEx.js +1 -1
  6. package/js/Private/IAgoraSpatialAudio.js +2 -2
  7. package/js/Private/impl/IAgoraMediaEngineImpl.js +12 -6
  8. package/js/Private/impl/IAgoraRtcEngineImpl.js +7 -7
  9. package/js/Private/internal/IrisApiEngine.js +13 -0
  10. package/js/Private/internal/MusicContentCenterInternal.js +3 -0
  11. package/js/Private/internal/RtcEngineExInternal.js +24 -7
  12. package/js/Private/ti/IAgoraRtcEngine-ti.js +2 -2
  13. package/js/Renderer/WebGLRenderer/index.js +19 -19
  14. package/js/Renderer/YUVCanvasRenderer/index.js +1 -1
  15. package/package.json +3 -3
  16. package/scripts/zipBuild.js +3 -1
  17. package/ts/Private/AgoraBase.ts +296 -133
  18. package/ts/Private/AgoraMediaBase.ts +68 -34
  19. package/ts/Private/AgoraMediaPlayerTypes.ts +8 -5
  20. package/ts/Private/IAgoraLog.ts +7 -3
  21. package/ts/Private/IAgoraMediaEngine.ts +76 -35
  22. package/ts/Private/IAgoraMediaPlayer.ts +126 -64
  23. package/ts/Private/IAgoraMediaPlayerSource.ts +3 -1
  24. package/ts/Private/IAgoraRtcEngine.ts +1325 -530
  25. package/ts/Private/IAgoraRtcEngineEx.ts +227 -94
  26. package/ts/Private/IAgoraSpatialAudio.ts +71 -36
  27. package/ts/Private/IAudioDeviceManager.ts +61 -31
  28. package/ts/Private/impl/IAgoraMediaEngineImpl.ts +12 -6
  29. package/ts/Private/impl/IAgoraRtcEngineImpl.ts +9 -11
  30. package/ts/Private/internal/IrisApiEngine.ts +14 -0
  31. package/ts/Private/internal/MusicContentCenterInternal.ts +4 -0
  32. package/ts/Private/internal/RtcEngineExInternal.ts +36 -14
  33. package/ts/Private/ti/IAgoraRtcEngine-ti.ts +2 -2
  34. package/ts/Renderer/WebGLRenderer/index.ts +26 -21
  35. package/ts/Renderer/YUVCanvasRenderer/index.ts +1 -1
  36. package/types/Private/AgoraBase.d.ts +298 -135
  37. package/types/Private/AgoraBase.d.ts.map +1 -1
  38. package/types/Private/AgoraMediaBase.d.ts +69 -35
  39. package/types/Private/AgoraMediaBase.d.ts.map +1 -1
  40. package/types/Private/AgoraMediaPlayerTypes.d.ts +8 -5
  41. package/types/Private/AgoraMediaPlayerTypes.d.ts.map +1 -1
  42. package/types/Private/IAgoraLog.d.ts +7 -3
  43. package/types/Private/IAgoraLog.d.ts.map +1 -1
  44. package/types/Private/IAgoraMediaEngine.d.ts +76 -35
  45. package/types/Private/IAgoraMediaEngine.d.ts.map +1 -1
  46. package/types/Private/IAgoraMediaPlayer.d.ts +126 -64
  47. package/types/Private/IAgoraMediaPlayer.d.ts.map +1 -1
  48. package/types/Private/IAgoraMediaPlayerSource.d.ts +3 -1
  49. package/types/Private/IAgoraMediaPlayerSource.d.ts.map +1 -1
  50. package/types/Private/IAgoraRtcEngine.d.ts +1326 -530
  51. package/types/Private/IAgoraRtcEngine.d.ts.map +1 -1
  52. package/types/Private/IAgoraRtcEngineEx.d.ts +227 -94
  53. package/types/Private/IAgoraRtcEngineEx.d.ts.map +1 -1
  54. package/types/Private/IAgoraSpatialAudio.d.ts +71 -36
  55. package/types/Private/IAgoraSpatialAudio.d.ts.map +1 -1
  56. package/types/Private/IAudioDeviceManager.d.ts +61 -31
  57. package/types/Private/IAudioDeviceManager.d.ts.map +1 -1
  58. package/types/Private/impl/IAgoraMediaEngineImpl.d.ts +2 -2
  59. package/types/Private/impl/IAgoraMediaEngineImpl.d.ts.map +1 -1
  60. package/types/Private/impl/IAgoraRtcEngineImpl.d.ts +5 -5
  61. package/types/Private/impl/IAgoraRtcEngineImpl.d.ts.map +1 -1
  62. package/types/Private/internal/IrisApiEngine.d.ts.map +1 -1
  63. package/types/Private/internal/MusicContentCenterInternal.d.ts +1 -0
  64. package/types/Private/internal/MusicContentCenterInternal.d.ts.map +1 -1
  65. package/types/Private/internal/RtcEngineExInternal.d.ts.map +1 -1
  66. package/types/Renderer/WebGLRenderer/index.d.ts +1 -1
  67. package/types/Renderer/WebGLRenderer/index.d.ts.map +1 -1
@@ -52,7 +52,11 @@ export class SpatialAudioZone {
52
52
  */
53
53
  upLength?: number;
54
54
  /**
55
- * The sound attenuation coefficient when users within the sound insulation area communicate with external users. The value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the audioAttenuation parameter.(0.5,1]: Strong attenuation mode (default value is 1), that is, the volume and timbre attenuate rapidly during propagation.
55
+ * The sound attenuation coefficient when users within the sound insulation area communicate with external users. The value range is [0,1]. The values are as follows:
56
+ * 0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.
57
+ * (0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.
58
+ * 0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the audioAttenuation parameter.
59
+ * (0.5,1]: Strong attenuation mode (default value is 1), that is, the volume and timbre attenuate rapidly during propagation.
56
60
  */
57
61
  audioAttenuation?: number;
58
62
  }
@@ -60,13 +64,13 @@ export class SpatialAudioZone {
60
64
  /**
61
65
  * This class contains some of the APIs in the ILocalSpatialAudioEngine class.
62
66
  *
63
- * The ILocalSpatialAudioEngine class inherits from IBaseSpatialAudioEngine .
67
+ * The ILocalSpatialAudioEngine class inherits from IBaseSpatialAudioEngine.
64
68
  */
65
69
  export abstract class IBaseSpatialAudioEngine {
66
70
  /**
67
71
  * Destroys IBaseSpatialAudioEngine.
68
72
  *
69
- * This method releases all resources under IBaseSpatialAudioEngine . When the user does not need to use the spatial audio effect, you can call this method to release resources for other operations. After calling this method, you can no longer use any of the APIs under IBaseSpatialAudioEngine . Call this method before the release method under IRtcEngine .
73
+ * This method releases all resources under IBaseSpatialAudioEngine. When the user does not need to use the spatial audio effect, you can call this method to release resources for other operations. After calling this method, you can no longer use any of the APIs under IBaseSpatialAudioEngine. Call this method before the release method under IRtcEngine.
70
74
  */
71
75
  abstract release(): void;
72
76
 
@@ -78,7 +82,8 @@ export abstract class IBaseSpatialAudioEngine {
78
82
  * @param maxCount The maximum number of streams that a user can receive within a specified audio reception range. The value of this parameter should be ≤ 16, and the default value is 10.
79
83
  *
80
84
  * @returns
81
- * 0: Success. < 0: Failure.
85
+ * 0: Success.
86
+ * < 0: Failure.
82
87
  */
83
88
  abstract setMaxAudioRecvCount(maxCount: number): number;
84
89
 
@@ -90,7 +95,8 @@ export abstract class IBaseSpatialAudioEngine {
90
95
  * @param range The maximum audio reception range. The unit is meters. The value of this parameter must be greater than 0, and the default value is 20.
91
96
  *
92
97
  * @returns
93
- * 0: Success. < 0: Failure.
98
+ * 0: Success.
99
+ * < 0: Failure.
94
100
  */
95
101
  abstract setAudioRecvRange(range: number): number;
96
102
 
@@ -99,17 +105,18 @@ export abstract class IBaseSpatialAudioEngine {
99
105
  *
100
106
  * In a game engine, the unit of distance is customized, while in the Agora spatial audio algorithm, distance is measured in meters. By default, the SDK converts the game engine distance per unit to one meter. You can call this method to convert the game engine distance per unit to a specified number of meters.
101
107
  *
102
- * @param unit The number of meters that the game engine distance per unit is equal to. The value of this parameter must be greater than 0.00, and the default value is 1.00. For example, setting unit as 2.00 means the game engine distance per unit equals 2 meters.The larger the value is, the faster the sound heard by the local user attenuates when the remote user moves far away from the local user.
108
+ * @param unit The number of meters that the game engine distance per unit is equal to. The value of this parameter must be greater than 0.00, and the default value is 1.00. For example, setting unit as 2.00 means the game engine distance per unit equals 2 meters. The larger the value is, the faster the sound heard by the local user attenuates when the remote user moves far away from the local user.
103
109
  *
104
110
  * @returns
105
- * 0: Success. < 0: Failure.
111
+ * 0: Success.
112
+ * < 0: Failure.
106
113
  */
107
114
  abstract setDistanceUnit(unit: number): number;
108
115
 
109
116
  /**
110
117
  * Updates the spatial position of the local user.
111
118
  *
112
- * Under the ILocalSpatialAudioEngine class, this method needs to be used with updateRemotePosition . The SDK calculates the relative position between the local and remote users according to this method and the parameter settings in updateRemotePosition , and then calculates the user's spatial audio effect parameters.
119
+ * Under the ILocalSpatialAudioEngine class, this method needs to be used with updateRemotePosition. The SDK calculates the relative position between the local and remote users according to this method and the parameter settings in updateRemotePosition, and then calculates the user's spatial audio effect parameters.
113
120
  *
114
121
  * @param position The coordinates in the world coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn.
115
122
  * @param axisForward The unit vector of the x axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn.
@@ -117,7 +124,8 @@ export abstract class IBaseSpatialAudioEngine {
117
124
  * @param axisUp The unit vector of the z axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn.
118
125
  *
119
126
  * @returns
120
- * 0: Success. < 0: Failure.
127
+ * 0: Success.
128
+ * < 0: Failure.
121
129
  */
122
130
  abstract updateSelfPosition(
123
131
  position: number[],
@@ -146,7 +154,8 @@ export abstract class IBaseSpatialAudioEngine {
146
154
  * @param positionInfo The spatial position of the media player. See RemoteVoicePositionInfo.
147
155
  *
148
156
  * @returns
149
- * 0: Success. < 0: Failure.
157
+ * 0: Success.
158
+ * < 0: Failure.
150
159
  */
151
160
  abstract updatePlayerPositionInfo(
152
161
  playerId: number,
@@ -161,37 +170,49 @@ export abstract class IBaseSpatialAudioEngine {
161
170
  /**
162
171
  * Stops or resumes publishing the local audio stream.
163
172
  *
164
- * This method does not affect any ongoing audio recording, because it does not disable the audio capture device. Call this method after joinChannel . When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteLocalAudioStream method in IRtcEngine . A successful call of this method triggers the onUserMuteAudio and onRemoteAudioStateChanged callbacks on the remote client.
173
+ * This method does not affect any ongoing audio recording, because it does not disable the audio capture device.
174
+ * Call this method after joinChannel.
175
+ * When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteLocalAudioStream method in IRtcEngine.
176
+ * A successful call of this method triggers the onUserMuteAudio and onRemoteAudioStateChanged callbacks on the remote client.
165
177
  *
166
- * @param mute Whether to stop publishing the local audio stream:true: Stop publishing the local audio stream.false: Publish the local audio stream.
178
+ * @param mute Whether to stop publishing the local audio stream: true : Stop publishing the local audio stream. false : Publish the local audio stream.
167
179
  *
168
180
  * @returns
169
- * 0: Success. < 0: Failure.
181
+ * 0: Success.
182
+ * < 0: Failure.
170
183
  */
171
184
  abstract muteLocalAudioStream(mute: boolean): number;
172
185
 
173
186
  /**
174
187
  * Stops or resumes subscribing to the audio streams of all remote users.
175
188
  *
176
- * After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. Call this method after joinChannel . When using the spatial audio effect, if you need to set whether to stop subscribing to the audio streams of all remote users, Agora recommends calling this method instead of the muteAllRemoteAudioStreams method in IRtcEngine . After calling this method, you need to call updateSelfPosition and updateRemotePosition to update the spatial location of the local user and the remote user; otherwise, the settings in this method do not take effect.
189
+ * After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.
190
+ * Call this method after joinChannel.
191
+ * When using the spatial audio effect, if you need to set whether to stop subscribing to the audio streams of all remote users, Agora recommends calling this method instead of the muteAllRemoteAudioStreams method in IRtcEngine.
192
+ * After calling this method, you need to call updateSelfPosition and updateRemotePosition to update the spatial location of the local user and the remote user; otherwise, the settings in this method do not take effect.
177
193
  *
178
- * @param mute Whether to stop subscribing to the audio streams of all remote users:true: Stop subscribing to the audio streams of all remote users.false: Subscribe to the audio streams of all remote users.
194
+ * @param mute Whether to stop subscribing to the audio streams of all remote users: true : Stop subscribing to the audio streams of all remote users. false : Subscribe to the audio streams of all remote users.
179
195
  *
180
196
  * @returns
181
- * 0: Success. < 0: Failure.
197
+ * 0: Success.
198
+ * < 0: Failure.
182
199
  */
183
200
  abstract muteAllRemoteAudioStreams(mute: boolean): number;
184
201
 
185
202
  /**
186
203
  * Sets the sound insulation area.
187
204
  *
188
- * In virtual interactive scenarios, you can use this method to set the sound insulation area and sound attenuation coefficient. When the sound source (which can be the user or the media player) and the listener belong to the inside and outside of the sound insulation area, they can experience the attenuation effect of sound similar to the real environment when it encounters a building partition. When the sound source and the listener belong to the inside and outside of the sound insulation area, the sound attenuation effect is determined by the sound attenuation coefficient in SpatialAudioZone . If the user or media player is in the same sound insulation area, it is not affected by SpatialAudioZone , and the sound attenuation effect is determined by the attenuation parameter in setPlayerAttenuation or setRemoteAudioAttenuation . If you do not call setPlayerAttenuation or setRemoteAudioAttenuation , the default sound attenuation coefficient of the SDK is 0.5, which simulates the attenuation of the sound in the real environment. If the sound source and the receiver belong to two sound insulation areas, the receiver cannot hear the sound source. If this method is called multiple times, the last sound insulation area set takes effect.
205
+ * In virtual interactive scenarios, you can use this method to set the sound insulation area and sound attenuation coefficient. When the sound source (which can be the user or the media player) and the listener belong to the inside and outside of the sound insulation area, they can experience the attenuation effect of sound similar to the real environment when it encounters a building partition.
206
+ * When the sound source and the listener belong to the inside and outside of the sound insulation area, the sound attenuation effect is determined by the sound attenuation coefficient in SpatialAudioZone.
207
+ * If the user or media player is in the same sound insulation area, it is not affected by SpatialAudioZone, and the sound attenuation effect is determined by the attenuation parameter in setPlayerAttenuation or setRemoteAudioAttenuation. If you do not call setPlayerAttenuation or setRemoteAudioAttenuation, the default sound attenuation coefficient of the SDK is 0.5, which simulates the attenuation of the sound in the real environment.
208
+ * If the sound source and the receiver belong to two sound insulation areas, the receiver cannot hear the sound source. If this method is called multiple times, the last sound insulation area set takes effect.
189
209
  *
190
- * @param zones Sound insulation area settings. See SpatialAudioZone.
210
+ * @param zones Sound insulation area settings. See SpatialAudioZone. On the Windows platform, it is necessary to ensure that the number of members in the zones array is equal to the value of zoneCount; otherwise, it may cause a crash.
191
211
  * @param zoneCount The number of sound insulation areas.
192
212
  *
193
213
  * @returns
194
- * 0: Success. < 0: Failure.
214
+ * 0: Success.
215
+ * < 0: Failure.
195
216
  */
196
217
  abstract setZones(zones: SpatialAudioZone[], zoneCount: number): number;
197
218
 
@@ -199,11 +220,18 @@ export abstract class IBaseSpatialAudioEngine {
199
220
  * Sets the sound attenuation properties of the media player.
200
221
  *
201
222
  * @param playerId The ID of the media player.
202
- * @param attenuation The sound attenuation coefficient of the remote user or media player. The value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.(0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process.
203
- * @param forceSet Whether to force the sound attenuation effect of the media player:true: Force attenuation to set the attenuation of the media player. At this time, the attenuation coefficient of the sound insulation are set in the audioAttenuation in the SpatialAudioZone does not take effect for the media player.false: Do not force attenuation to set the sound attenuation effect of the media player, as shown in the following two cases.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method.
223
+ * @param attenuation The sound attenuation coefficient of the remote user or media player. The value range is [0,1]. The values are as follows:
224
+ * 0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.
225
+ * (0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.
226
+ * 0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.
227
+ * (0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process.
228
+ * @param forceSet Whether to force the sound attenuation effect of the media player: true : Force attenuation to set the attenuation of the media player. At this time, the attenuation coefficient of the sound insulation are set in the audioAttenuation in the SpatialAudioZone does not take effect for the media player. false : Do not force attenuation to set the sound attenuation effect of the media player, as shown in the following two cases.
229
+ * If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.
230
+ * If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method.
204
231
  *
205
232
  * @returns
206
- * 0: Success. < 0: Failure.
233
+ * 0: Success.
234
+ * < 0: Failure.
207
235
  */
208
236
  abstract setPlayerAttenuation(
209
237
  playerId: number,
@@ -214,13 +242,15 @@ export abstract class IBaseSpatialAudioEngine {
214
242
  /**
215
243
  * Stops or resumes subscribing to the audio stream of a specified user.
216
244
  *
217
- * Call this method after joinChannel . When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteRemoteAudioStream method in IRtcEngine .
245
+ * Call this method after joinChannel.
246
+ * When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteRemoteAudioStream method in IRtcEngine.
218
247
  *
219
248
  * @param uid The user ID. This parameter must be the same as the user ID passed in when the user joined the channel.
220
- * @param mute Whether to subscribe to the specified remote user's audio stream.true: Stop subscribing to the audio stream of the specified user.false: (Default) Subscribe to the audio stream of the specified user. The SDK decides whether to subscribe according to the distance between the local user and the remote user.
249
+ * @param mute Whether to subscribe to the specified remote user's audio stream. true : Stop subscribing to the audio stream of the specified user. false : (Default) Subscribe to the audio stream of the specified user. The SDK decides whether to subscribe according to the distance between the local user and the remote user.
221
250
  *
222
251
  * @returns
223
- * 0: Success. < 0: Failure.
252
+ * 0: Success.
253
+ * < 0: Failure.
224
254
  */
225
255
  abstract muteRemoteAudioStream(uid: number, mute: boolean): number;
226
256
  }
@@ -228,29 +258,32 @@ export abstract class IBaseSpatialAudioEngine {
228
258
  /**
229
259
  * This class calculates user positions through the SDK to implement the spatial audio effect.
230
260
  *
231
- * This class inherits from IBaseSpatialAudioEngine . Before calling other APIs in this class, you need to call the initialize method to initialize this class.
261
+ * This class inherits from IBaseSpatialAudioEngine. Before calling other APIs in this class, you need to call the initialize method to initialize this class.
232
262
  */
233
263
  export abstract class ILocalSpatialAudioEngine extends IBaseSpatialAudioEngine {
234
264
  /**
235
265
  * Initializes ILocalSpatialAudioEngine.
236
266
  *
237
- * Before calling other methods of the ILocalSpatialAudioEngine class, you need to call this method to initialize ILocalSpatialAudioEngine . The SDK supports creating only one ILocalSpatialAudioEngine instance for an app.
267
+ * Before calling other methods of the ILocalSpatialAudioEngine class, you need to call this method to initialize ILocalSpatialAudioEngine.
268
+ * The SDK supports creating only one ILocalSpatialAudioEngine instance for an app.
238
269
  *
239
270
  * @returns
240
- * 0: Success. < 0: Failure.
271
+ * 0: Success.
272
+ * < 0: Failure.
241
273
  */
242
274
  abstract initialize(): number;
243
275
 
244
276
  /**
245
277
  * Updates the spatial position of the specified remote user.
246
278
  *
247
- * After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user. Call this method after joinChannel .
279
+ * After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user. Call this method after joinChannel.
248
280
  *
249
281
  * @param uid The user ID. This parameter must be the same as the user ID passed in when the user joined the channel.
250
282
  * @param posInfo The spatial position of the remote user. See RemoteVoicePositionInfo.
251
283
  *
252
284
  * @returns
253
- * 0: Success. < 0: Failure.
285
+ * 0: Success.
286
+ * < 0: Failure.
254
287
  */
255
288
  abstract updateRemotePosition(
256
289
  uid: number,
@@ -274,7 +307,8 @@ export abstract class ILocalSpatialAudioEngine extends IBaseSpatialAudioEngine {
274
307
  * @param uid The user ID. This parameter must be the same as the user ID passed in when the user joined the channel.
275
308
  *
276
309
  * @returns
277
- * 0: Success. < 0: Failure.
310
+ * 0: Success.
311
+ * < 0: Failure.
278
312
  */
279
313
  abstract removeRemotePosition(uid: number): number;
280
314
 
@@ -292,7 +326,8 @@ export abstract class ILocalSpatialAudioEngine extends IBaseSpatialAudioEngine {
292
326
  * After successfully calling this method, the local user no longer hears any remote users. After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial positions of all remote users.
293
327
  *
294
328
  * @returns
295
- * 0: Success. < 0: Failure.
329
+ * 0: Success.
330
+ * < 0: Failure.
296
331
  */
297
332
  abstract clearRemotePositions(): number;
298
333
 
@@ -310,13 +345,13 @@ export abstract class ILocalSpatialAudioEngine extends IBaseSpatialAudioEngine {
310
345
  * (0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.
311
346
  * 0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.
312
347
  * (0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process.
313
- * @param forceSet Whether to force the user's sound attenuation effect:true: Force attenuation to set the sound attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation of the SpatialAudioZone does not take effect for the user.
348
+ * @param forceSet Whether to force the user's sound attenuation effect: true : Force attenuation to set the sound attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation of the SpatialAudioZone does not take effect for the user.
314
349
  * If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.
315
- * If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method.
316
- * false: Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases.
350
+ * If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. false : Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases.
317
351
  *
318
352
  * @returns
319
- * 0: Success. < 0: Failure.
353
+ * 0: Success.
354
+ * < 0: Failure.
320
355
  */
321
356
  abstract setRemoteAudioAttenuation(
322
357
  uid: number,
@@ -19,7 +19,8 @@ export abstract class IAudioDeviceManager {
19
19
  * Enumerates the audio playback devices.
20
20
  *
21
21
  * @returns
22
- * Success: Returns an AudioDeviceInfo array, which includes all the audio playback devices. Failure: An empty array.
22
+ * Success: Returns an AudioDeviceInfo array, which includes all the audio playback devices.
23
+ * Failure: An empty array.
23
24
  */
24
25
  abstract enumeratePlaybackDevices(): AudioDeviceInfo[];
25
26
 
@@ -27,7 +28,8 @@ export abstract class IAudioDeviceManager {
27
28
  * Enumerates the audio capture devices.
28
29
  *
29
30
  * @returns
30
- * Success: An AudioDeviceInfo array, which includes all the audio capture devices. Failure: An empty array.
31
+ * Success: An AudioDeviceInfo array, which includes all the audio capture devices.
32
+ * Failure: An empty array.
31
33
  */
32
34
  abstract enumerateRecordingDevices(): AudioDeviceInfo[];
33
35
 
@@ -36,10 +38,11 @@ export abstract class IAudioDeviceManager {
36
38
  *
37
39
  * You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is speaker 1, you call this method to set the audio route as speaker 2 before joinging a channel and then start a device test, the SDK conducts device test on speaker 2. After the device test is completed and you join a channel, the SDK still uses speaker 1, the default audio route.
38
40
  *
39
- * @param deviceId The ID of the specified audio playback device. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId.The maximum length is MaxDeviceIdLengthType.
41
+ * @param deviceId The ID of the specified audio playback device. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId. The maximum length is MaxDeviceIdLengthType.
40
42
  *
41
43
  * @returns
42
- * 0: Success. < 0: Failure.
44
+ * 0: Success.
45
+ * < 0: Failure.
43
46
  */
44
47
  abstract setPlaybackDevice(deviceId: string): number;
45
48
 
@@ -74,10 +77,11 @@ export abstract class IAudioDeviceManager {
74
77
  *
75
78
  * You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as bluetooth earphones before joinging a channel and then start a device test, the SDK conducts device test on the bluetooth earphones. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing.
76
79
  *
77
- * @param deviceId The ID of the audio capture device. You can get the Device ID by calling enumerateRecordingDevices. Connecting or disconnecting the audio device does not change the value of deviceId.The maximum length is MaxDeviceIdLengthType.
80
+ * @param deviceId The ID of the audio capture device. You can get the Device ID by calling enumerateRecordingDevices. Connecting or disconnecting the audio device does not change the value of deviceId. The maximum length is MaxDeviceIdLengthType.
78
81
  *
79
82
  * @returns
80
- * 0: Success. < 0: Failure.
83
+ * 0: Success.
84
+ * < 0: Failure.
81
85
  */
82
86
  abstract setRecordingDevice(deviceId: string): number;
83
87
 
@@ -105,7 +109,8 @@ export abstract class IAudioDeviceManager {
105
109
  * @param volume The volume of the audio recording device. The value range is [0,255]. 0 means no sound, 255 means maximum volume.
106
110
  *
107
111
  * @returns
108
- * 0: Success. < 0: Failure.
112
+ * 0: Success.
113
+ * < 0: Failure.
109
114
  */
110
115
  abstract setRecordingDeviceVolume(volume: number): number;
111
116
 
@@ -117,12 +122,16 @@ export abstract class IAudioDeviceManager {
117
122
  /**
118
123
  * Sets the loopback device.
119
124
  *
120
- * The SDK uses the current playback device as the loopback device by default. If you want to specify another audio device as the loopback device, call this method, and set deviceId to the loopback device you want to specify. You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as a sound card before joinging a channel and then start a device test, the SDK conducts device test on the sound card. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing. This method is for Windows and macOS only. The scenarios where this method is applicable are as follows: Use app A to play music through a Bluetooth headset; when using app B for a video conference, play through the speakers. If the loopback device is set as the Bluetooth headset, the SDK publishes the music in app A to the remote end. If the loopback device is set as the speaker, the SDK does not publish the music in app A to the remote end. If you set the loopback device as the Bluetooth headset, and then use a wired headset to play the music in app A, you need to call this method again, set the loopback device as the wired headset, and the SDK continues to publish the music in app A to remote end.
125
+ * The SDK uses the current playback device as the loopback device by default. If you want to specify another audio device as the loopback device, call this method, and set deviceId to the loopback device you want to specify. You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as a sound card before joinging a channel and then start a device test, the SDK conducts device test on the sound card. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing. This method is for Windows and macOS only. The scenarios where this method is applicable are as follows: Use app A to play music through a Bluetooth headset; when using app B for a video conference, play through the speakers.
126
+ * If the loopback device is set as the Bluetooth headset, the SDK publishes the music in app A to the remote end.
127
+ * If the loopback device is set as the speaker, the SDK does not publish the music in app A to the remote end.
128
+ * If you set the loopback device as the Bluetooth headset, and then use a wired headset to play the music in app A, you need to call this method again, set the loopback device as the wired headset, and the SDK continues to publish the music in app A to remote end.
121
129
  *
122
- * @param deviceId Specifies the loopback device of the SDK. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId.The maximum length is MaxDeviceIdLengthType.
130
+ * @param deviceId Specifies the loopback device of the SDK. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId. The maximum length is MaxDeviceIdLengthType.
123
131
  *
124
132
  * @returns
125
- * 0: Success. < 0: Failure.
133
+ * 0: Success.
134
+ * < 0: Failure.
126
135
  */
127
136
  abstract setLoopbackDevice(deviceId: string): number;
128
137
 
@@ -159,86 +168,106 @@ export abstract class IAudioDeviceManager {
159
168
  /**
160
169
  * Starts the audio playback device test.
161
170
  *
162
- * This method tests whether the audio playback device works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device. Ensure that you call this method before joining a channel.
171
+ * This method tests whether the audio playback device works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device.
172
+ * Ensure that you call this method before joining a channel.
163
173
  *
164
- * @param testAudioFilePath The path of the audio file. The data format is string in UTF-8.Supported file formats: wav, mp3, m4a, and aac.Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz.
174
+ * @param testAudioFilePath The path of the audio file. The data format is string in UTF-8.
175
+ * Supported file formats: wav, mp3, m4a, and aac.
176
+ * Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz.
165
177
  *
166
178
  * @returns
167
- * 0: Success. < 0: Failure.
179
+ * 0: Success.
180
+ * < 0: Failure.
168
181
  */
169
182
  abstract startPlaybackDeviceTest(testAudioFilePath: string): number;
170
183
 
171
184
  /**
172
185
  * Stops the audio playback device test.
173
186
  *
174
- * This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method. Ensure that you call this method before joining a channel.
187
+ * This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method.
188
+ * Ensure that you call this method before joining a channel.
175
189
  *
176
190
  * @returns
177
- * 0: Success. < 0: Failure.
191
+ * 0: Success.
192
+ * < 0: Failure.
178
193
  */
179
194
  abstract stopPlaybackDeviceTest(): number;
180
195
 
181
196
  /**
182
197
  * Starts the audio capture device test.
183
198
  *
184
- * This method tests whether the audio capture device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method, which reports uid = 0 and the volume information of the capturing device. Ensure that you call this method before joining a channel.
199
+ * This method tests whether the audio capture device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method, which reports uid = 0 and the volume information of the capturing device.
200
+ * Ensure that you call this method before joining a channel.
185
201
  *
186
202
  * @param indicationInterval The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback.
187
203
  *
188
204
  * @returns
189
- * 0: Success. < 0: Failure.
205
+ * 0: Success.
206
+ * < 0: Failure.
190
207
  */
191
208
  abstract startRecordingDeviceTest(indicationInterval: number): number;
192
209
 
193
210
  /**
194
211
  * Stops the audio capture device test.
195
212
  *
196
- * This method stops the audio capture device test. You must call this method to stop the test after calling the startRecordingDeviceTest method. Ensure that you call this method before joining a channel.
213
+ * This method stops the audio capture device test. You must call this method to stop the test after calling the startRecordingDeviceTest method.
214
+ * Ensure that you call this method before joining a channel.
197
215
  *
198
216
  * @returns
199
- * 0: Success. < 0: Failure.
217
+ * 0: Success.
218
+ * < 0: Failure.
200
219
  */
201
220
  abstract stopRecordingDeviceTest(): number;
202
221
 
203
222
  /**
204
223
  * Starts an audio device loopback test.
205
224
  *
206
- * This method tests whether the local audio capture device and playback device are working properly. After starting the test, the audio capture device records the local audio, and the audio playback device plays the captured audio. The SDK triggers two independent onAudioVolumeIndication callbacks at the time interval set in this method, which reports the volume information of the capture device ( uid = 0) and the volume information of the playback device ( uid = 1) respectively. You can call this method either before or after joining a channel. This method only takes effect when called by the host. This method tests local audio devices and does not report the network conditions. When you finished testing, call stopAudioDeviceLoopbackTest to stop the audio device loopback test.
225
+ * This method tests whether the local audio capture device and playback device are working properly. After starting the test, the audio capture device records the local audio, and the audio playback device plays the captured audio. The SDK triggers two independent onAudioVolumeIndication callbacks at the time interval set in this method, which reports the volume information of the capture device (uid = 0) and the volume information of the playback device (uid = 1) respectively.
226
+ * You can call this method either before or after joining a channel.
227
+ * This method only takes effect when called by the host.
228
+ * This method tests local audio devices and does not report the network conditions.
229
+ * When you finished testing, call stopAudioDeviceLoopbackTest to stop the audio device loopback test.
207
230
  *
208
231
  * @param indicationInterval The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback.
209
232
  *
210
233
  * @returns
211
- * 0: Success. < 0: Failure.
234
+ * 0: Success.
235
+ * < 0: Failure.
212
236
  */
213
237
  abstract startAudioDeviceLoopbackTest(indicationInterval: number): number;
214
238
 
215
239
  /**
216
240
  * Stops the audio device loopback test.
217
241
  *
218
- * You can call this method either before or after joining a channel. This method only takes effect when called by the host. Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method.
242
+ * You can call this method either before or after joining a channel.
243
+ * This method only takes effect when called by the host.
244
+ * Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method.
219
245
  *
220
246
  * @returns
221
- * 0: Success. < 0: Failure.
247
+ * 0: Success.
248
+ * < 0: Failure.
222
249
  */
223
250
  abstract stopAudioDeviceLoopbackTest(): number;
224
251
 
225
252
  /**
226
253
  * Sets the audio playback device used by the SDK to follow the system default audio playback device.
227
254
  *
228
- * @param enable Whether to follow the system default audio playback device:true: Follow the system default audio playback device. The SDK immediately switches the audio playback device when the system default audio playback device changes.false: Do not follow the system default audio playback device. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected.
255
+ * @param enable Whether to follow the system default audio playback device: true : Follow the system default audio playback device. The SDK immediately switches the audio playback device when the system default audio playback device changes. false : Do not follow the system default audio playback device. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected.
229
256
  *
230
257
  * @returns
231
- * 0: Success. < 0: Failure.
258
+ * 0: Success.
259
+ * < 0: Failure.
232
260
  */
233
261
  abstract followSystemPlaybackDevice(enable: boolean): number;
234
262
 
235
263
  /**
236
264
  * Sets the audio recording device used by the SDK to follow the system default audio recording device.
237
265
  *
238
- * @param enable Whether to follow the system default audio recording device:true: Follow the system default audio playback device. The SDK immediately switches the audio recording device when the system default audio recording device changes.false: Do not follow the system default audio playback device. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected.
266
+ * @param enable Whether to follow the system default audio recording device: true : Follow the system default audio playback device. The SDK immediately switches the audio recording device when the system default audio recording device changes. false : Do not follow the system default audio playback device. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected.
239
267
  *
240
268
  * @returns
241
- * 0: Success. < 0: Failure.
269
+ * 0: Success.
270
+ * < 0: Failure.
242
271
  */
243
272
  abstract followSystemRecordingDevice(enable: boolean): number;
244
273
 
@@ -247,10 +276,11 @@ export abstract class IAudioDeviceManager {
247
276
  *
248
277
  * This method is for Windows and macOS only.
249
278
  *
250
- * @param enable Whether to follow the system default audio playback device:true: Follow the system default audio playback device. When the default playback device of the system is changed, the SDK immediately switches to the loopback device.false: Do not follow the system default audio playback device. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected.
279
+ * @param enable Whether to follow the system default audio playback device: true : Follow the system default audio playback device. When the default playback device of the system is changed, the SDK immediately switches to the loopback device. false : Do not follow the system default audio playback device. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected.
251
280
  *
252
281
  * @returns
253
- * 0: Success. < 0: Failure.
282
+ * 0: Success.
283
+ * < 0: Failure.
254
284
  */
255
285
  abstract followSystemLoopbackDevice(enable: boolean): number;
256
286
 
@@ -265,7 +295,7 @@ export abstract class IAudioDeviceManager {
265
295
  * This method is for Windows and macOS only.
266
296
  *
267
297
  * @returns
268
- * The details about the default audio playback device. See AudioDeviceInfo .
298
+ * The details about the default audio playback device. See AudioDeviceInfo.
269
299
  */
270
300
  abstract getPlaybackDefaultDevice(): AudioDeviceInfo;
271
301
 
@@ -275,7 +305,7 @@ export abstract class IAudioDeviceManager {
275
305
  * This method is for Windows and macOS only.
276
306
  *
277
307
  * @returns
278
- * The details about the default audio capture device. See AudioDeviceInfo .
308
+ * The details about the default audio capture device. See AudioDeviceInfo.
279
309
  */
280
310
  abstract getRecordingDefaultDevice(): AudioDeviceInfo;
281
311
  }
@@ -96,15 +96,21 @@ export class IMediaEngineImpl implements IMediaEngine {
96
96
  return 'MediaEngine_pushAudioFrame';
97
97
  }
98
98
 
99
- pullAudioFrame(): AudioFrame {
100
- const apiType = this.getApiTypeFromPullAudioFrame();
101
- const jsonParams = {};
99
+ pullAudioFrame(frame: AudioFrame): number {
100
+ const apiType = this.getApiTypeFromPullAudioFrame(frame);
101
+ const jsonParams = {
102
+ frame: frame,
103
+ toJSON: () => {
104
+ return {
105
+ frame: frame,
106
+ };
107
+ },
108
+ };
102
109
  const jsonResults = callIrisApi.call(this, apiType, jsonParams);
103
- const frame = jsonResults.frame;
104
- return frame;
110
+ return jsonResults.result;
105
111
  }
106
112
 
107
- protected getApiTypeFromPullAudioFrame(): string {
113
+ protected getApiTypeFromPullAudioFrame(frame: AudioFrame): string {
108
114
  return 'MediaEngine_pullAudioFrame';
109
115
  }
110
116
 
@@ -26,10 +26,8 @@ import {
26
26
  LocalAccessPointConfiguration,
27
27
  LocalTranscoderConfiguration,
28
28
  LowlightEnhanceOptions,
29
- NetworkType,
30
29
  RecorderStreamInfo,
31
30
  Rectangle,
32
- ScreenCaptureFramerateCapability,
33
31
  ScreenCaptureParameters,
34
32
  ScreenCaptureParameters2,
35
33
  ScreenScenarioType,
@@ -264,7 +262,7 @@ export function processIRtcEngineEventHandler(
264
262
  case 'onFirstLocalVideoFramePublished':
265
263
  if (handler.onFirstLocalVideoFramePublished !== undefined) {
266
264
  handler.onFirstLocalVideoFramePublished(
267
- jsonParams.connection,
265
+ jsonParams.source,
268
266
  jsonParams.elapsed
269
267
  );
270
268
  }
@@ -413,7 +411,7 @@ export function processIRtcEngineEventHandler(
413
411
 
414
412
  case 'onLocalVideoStats':
415
413
  if (handler.onLocalVideoStats !== undefined) {
416
- handler.onLocalVideoStats(jsonParams.connection, jsonParams.stats);
414
+ handler.onLocalVideoStats(jsonParams.source, jsonParams.stats);
417
415
  }
418
416
  break;
419
417
 
@@ -4732,13 +4730,13 @@ export class IRtcEngineImpl implements IRtcEngine {
4732
4730
  return 'RtcEngine_isCameraExposureSupported';
4733
4731
  }
4734
4732
 
4735
- setCameraExposureFactor(value: number): number {
4736
- const apiType = this.getApiTypeFromSetCameraExposureFactor(value);
4733
+ setCameraExposureFactor(factor: number): number {
4734
+ const apiType = this.getApiTypeFromSetCameraExposureFactor(factor);
4737
4735
  const jsonParams = {
4738
- value: value,
4736
+ factor: factor,
4739
4737
  toJSON: () => {
4740
4738
  return {
4741
- value: value,
4739
+ factor: factor,
4742
4740
  };
4743
4741
  },
4744
4742
  };
@@ -4746,7 +4744,7 @@ export class IRtcEngineImpl implements IRtcEngine {
4746
4744
  return jsonResults.result;
4747
4745
  }
4748
4746
 
4749
- protected getApiTypeFromSetCameraExposureFactor(value: number): string {
4747
+ protected getApiTypeFromSetCameraExposureFactor(factor: number): string {
4750
4748
  return 'RtcEngine_setCameraExposureFactor';
4751
4749
  }
4752
4750
 
@@ -5153,7 +5151,7 @@ export class IRtcEngineImpl implements IRtcEngine {
5153
5151
  return 'RtcEngine_updateScreenCapture';
5154
5152
  }
5155
5153
 
5156
- queryScreenCaptureCapability(): ScreenCaptureFramerateCapability {
5154
+ queryScreenCaptureCapability(): number {
5157
5155
  const apiType = this.getApiTypeFromQueryScreenCaptureCapability();
5158
5156
  const jsonParams = {};
5159
5157
  const jsonResults = callIrisApi.call(this, apiType, jsonParams);
@@ -6647,7 +6645,7 @@ export class IRtcEngineImpl implements IRtcEngine {
6647
6645
  return 'RtcEngine_enableWirelessAccelerate';
6648
6646
  }
6649
6647
 
6650
- getNetworkType(): NetworkType {
6648
+ getNetworkType(): number {
6651
6649
  const apiType = this.getApiTypeFromGetNetworkType();
6652
6650
  const jsonParams = {};
6653
6651
  const jsonResults = callIrisApi.call(this, apiType, jsonParams);