agora-electron-sdk 4.4.0-dev.1 → 4.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/CHANGELOG.md +7 -0
  2. package/js/AgoraSdk.js +1 -1
  3. package/js/Private/AgoraBase.js +28 -28
  4. package/js/Private/AgoraMediaBase.js +30 -4
  5. package/js/Private/IAgoraLog.js +4 -0
  6. package/js/Private/IAgoraRhythmPlayer.js +12 -12
  7. package/js/Private/IAgoraRtcEngine.js +64 -64
  8. package/js/Private/impl/IAgoraRtcEngineImpl.js +0 -32
  9. package/js/Private/internal/RtcEngineExInternal.js +9 -3
  10. package/js/Renderer/AgoraView.js +21 -2
  11. package/js/Renderer/IRendererManager.js +15 -2
  12. package/js/Renderer/RendererCache.js +2 -2
  13. package/package.json +9 -5
  14. package/scripts/downloadPrebuild.js +9 -3
  15. package/ts/AgoraSdk.ts +1 -1
  16. package/ts/Private/AgoraBase.ts +35 -31
  17. package/ts/Private/AgoraMediaBase.ts +60 -40
  18. package/ts/Private/IAgoraLog.ts +4 -0
  19. package/ts/Private/IAgoraMediaEngine.ts +8 -19
  20. package/ts/Private/IAgoraMediaPlayer.ts +13 -12
  21. package/ts/Private/IAgoraMusicContentCenter.ts +13 -1
  22. package/ts/Private/IAgoraRhythmPlayer.ts +12 -12
  23. package/ts/Private/IAgoraRtcEngine.ts +296 -262
  24. package/ts/Private/IAgoraRtcEngineEx.ts +35 -34
  25. package/ts/Private/IAudioDeviceManager.ts +4 -4
  26. package/ts/Private/impl/IAgoraRtcEngineImpl.ts +0 -42
  27. package/ts/Private/internal/RtcEngineExInternal.ts +7 -2
  28. package/ts/Renderer/AgoraView.ts +33 -4
  29. package/ts/Renderer/IRendererManager.ts +20 -2
  30. package/ts/Renderer/RendererCache.ts +2 -2
  31. package/ts/Types.ts +1 -1
  32. package/types/AgoraSdk.d.ts +1 -1
  33. package/types/Private/AgoraBase.d.ts +35 -31
  34. package/types/Private/AgoraBase.d.ts.map +1 -1
  35. package/types/Private/AgoraMediaBase.d.ts +59 -40
  36. package/types/Private/AgoraMediaBase.d.ts.map +1 -1
  37. package/types/Private/IAgoraLog.d.ts +5 -1
  38. package/types/Private/IAgoraLog.d.ts.map +1 -1
  39. package/types/Private/IAgoraMediaEngine.d.ts +8 -19
  40. package/types/Private/IAgoraMediaEngine.d.ts.map +1 -1
  41. package/types/Private/IAgoraMediaPlayer.d.ts +13 -12
  42. package/types/Private/IAgoraMediaPlayer.d.ts.map +1 -1
  43. package/types/Private/IAgoraMusicContentCenter.d.ts +13 -1
  44. package/types/Private/IAgoraMusicContentCenter.d.ts.map +1 -1
  45. package/types/Private/IAgoraRhythmPlayer.d.ts +12 -12
  46. package/types/Private/IAgoraRtcEngine.d.ts +296 -260
  47. package/types/Private/IAgoraRtcEngine.d.ts.map +1 -1
  48. package/types/Private/IAgoraRtcEngineEx.d.ts +35 -34
  49. package/types/Private/IAgoraRtcEngineEx.d.ts.map +1 -1
  50. package/types/Private/IAudioDeviceManager.d.ts +4 -4
  51. package/types/Private/impl/IAgoraRtcEngineImpl.d.ts +0 -4
  52. package/types/Private/impl/IAgoraRtcEngineImpl.d.ts.map +1 -1
  53. package/types/Private/internal/RtcEngineExInternal.d.ts.map +1 -1
  54. package/types/Renderer/AgoraView.d.ts +8 -2
  55. package/types/Renderer/AgoraView.d.ts.map +1 -1
  56. package/types/Renderer/IRendererManager.d.ts.map +1 -1
  57. package/types/Renderer/RendererCache.d.ts +1 -1
  58. package/types/Renderer/RendererCache.d.ts.map +1 -1
  59. package/types/Types.d.ts +1 -1
  60. package/types/Types.d.ts.map +1 -1
@@ -238,7 +238,7 @@ export enum ErrorCodeType {
238
238
  ErrNetDown = 14,
239
239
  /**
240
240
  * 17: The request to join the channel is rejected. Possible reasons include the following:
241
- * The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state.
241
+ * The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state.
242
242
  * After calling startEchoTest for the call test, the user tries to join the channel without calling stopEchoTest to end the current test. To join a channel, the call test must be ended by calling stopEchoTest.
243
243
  */
244
244
  ErrJoinChannelRejected = 17,
@@ -1300,6 +1300,10 @@ export class AdvanceOptions {
1300
1300
  * Compression preference for video encoding. See CompressionPreference.
1301
1301
  */
1302
1302
  compressionPreference?: CompressionPreference;
1303
+ /**
1304
+ * Whether to encode and send the Alpha data present in the video frame to the remote end: true : Encode and send Alpha data. false : (Default) Do not encode and send Alpha data.
1305
+ */
1306
+ encodeAlpha?: boolean;
1303
1307
  }
1304
1308
 
1305
1309
  /**
@@ -1437,7 +1441,7 @@ export class VideoEncoderConfiguration {
1437
1441
  */
1438
1442
  orientationMode?: OrientationMode;
1439
1443
  /**
1440
- * Video degradation preference under limited bandwidth. See DegradationPreference.
1444
+ * Video degradation preference under limited bandwidth. See DegradationPreference. When this parameter is set to MaintainFramerate (1) or MaintainBalanced (2), orientationMode needs to be set to OrientationModeAdaptive (0) at the same time, otherwise the setting will not take effect.
1441
1445
  */
1442
1446
  degradationPreference?: DegradationPreference;
1443
1447
  /**
@@ -1946,7 +1950,7 @@ export enum AudioScenarioType {
1946
1950
  */
1947
1951
  AudioScenarioGameStreaming = 3,
1948
1952
  /**
1949
- * 5: Chatroom scenario, where users need to frequently switch the user role or mute and unmute the microphone. For example, education scenarios. In this scenario, audience members receive a pop-up window to request permission of using microphones.
1953
+ * 5: Chatroom scenario, where users need to frequently switch the user role or mute and unmute the microphone. For example, education scenarios.
1950
1954
  */
1951
1955
  AudioScenarioChatroom = 5,
1952
1956
  /**
@@ -2030,7 +2034,7 @@ export enum VideoApplicationScenarioType {
2030
2034
  */
2031
2035
  ApplicationScenarioGeneral = 0,
2032
2036
  /**
2033
- * If set to ApplicationScenarioMeeting (1), the SDK automatically enables the following strategies:
2037
+ * ApplicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies:
2034
2038
  * In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers.
2035
2039
  * The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers.
2036
2040
  * If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth.
@@ -2047,7 +2051,7 @@ export enum VideoApplicationScenarioType {
2047
2051
  */
2048
2052
  ApplicationScenarioMeeting = 1,
2049
2053
  /**
2050
- * @ignore
2054
+ * ApplicationScenario1v1 (2) is suitable for 1v1 video call scenarios. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. 2: 1v1 video call scenario.
2051
2055
  */
2052
2056
  ApplicationScenario1v1 = 2,
2053
2057
  }
@@ -2300,23 +2304,23 @@ export enum LocalVideoStreamReason {
2300
2304
  */
2301
2305
  LocalVideoStreamReasonScreenCaptureWindowNotSupported = 20,
2302
2306
  /**
2303
- * @ignore
2307
+ * 21: (Windows only) The screen has not captured any data available for window sharing.
2304
2308
  */
2305
2309
  LocalVideoStreamReasonScreenCaptureFailure = 21,
2306
2310
  /**
2307
- * @ignore
2311
+ * 22: No permission for screen capture.
2308
2312
  */
2309
2313
  LocalVideoStreamReasonScreenCaptureNoPermission = 22,
2310
2314
  /**
2311
- * @ignore
2315
+ * 24: (Windows only) An unexpected error occurred during screen sharing (possibly due to window blocking failure), resulting in decreased performance, but the screen sharing process itself was not affected.
2312
2316
  */
2313
2317
  LocalVideoStreamReasonScreenCaptureAutoFallback = 24,
2314
2318
  /**
2315
- * @ignore
2319
+ * 25: (Windows only) The window for the current screen capture is hidden and not visible on the current screen.
2316
2320
  */
2317
2321
  LocalVideoStreamReasonScreenCaptureWindowHidden = 25,
2318
2322
  /**
2319
- * @ignore
2323
+ * 26: (Windows only) The window for screen capture has been restored from hidden state.
2320
2324
  */
2321
2325
  LocalVideoStreamReasonScreenCaptureWindowRecoverFromHidden = 26,
2322
2326
  /**
@@ -2324,15 +2328,15 @@ export enum LocalVideoStreamReason {
2324
2328
  */
2325
2329
  LocalVideoStreamReasonScreenCaptureWindowRecoverFromMinimized = 27,
2326
2330
  /**
2327
- * @ignore
2331
+ * 28: (Windows only) Screen capture has been paused. Common scenarios reporting this error code: The current screen may have been switched to a secure desktop, such as a UAC dialog box or Winlogon desktop.
2328
2332
  */
2329
2333
  LocalVideoStreamReasonScreenCapturePaused = 28,
2330
2334
  /**
2331
- * @ignore
2335
+ * 29: (Windows only) Screen capture has resumed from paused state.
2332
2336
  */
2333
2337
  LocalVideoStreamReasonScreenCaptureResumed = 29,
2334
2338
  /**
2335
- * @ignore
2339
+ * 30: The displayer used for screen capture is disconnected.
2336
2340
  */
2337
2341
  LocalVideoStreamReasonScreenCaptureDisplayDisconnected = 30,
2338
2342
  }
@@ -3419,7 +3423,7 @@ export enum ConnectionChangedReasonType {
3419
3423
  */
3420
3424
  export enum ClientRoleChangeFailedReason {
3421
3425
  /**
3422
- * 1: The number of hosts in the channel is already at the upper limit. This enumerator is reported only when the support for 128 users is enabled. The maximum number of hosts is based on the actual number of hosts configured when you enable the 128-user feature.
3426
+ * 1: The number of hosts in the channel exceeds the limit. This enumerator is reported only when the support for 128 users is enabled. The maximum number of hosts is based on the actual number of hosts configured when you enable the 128-user feature.
3423
3427
  */
3424
3428
  ClientRoleChangeFailedTooManyBroadcasters = 1,
3425
3429
  /**
@@ -3427,11 +3431,11 @@ export enum ClientRoleChangeFailedReason {
3427
3431
  */
3428
3432
  ClientRoleChangeFailedNotAuthorized = 2,
3429
3433
  /**
3430
- * 3: The request is timed out. Agora recommends you prompt the user to check the network connection and try to switch their user role again.
3434
+ * 3: The request is timed out. Agora recommends you prompt the user to check the network connection and try to switch their user role again. Deprecated: This enumerator is deprecated since v4.4.0 and is not recommended for use.
3431
3435
  */
3432
3436
  ClientRoleChangeFailedRequestTimeOut = 3,
3433
3437
  /**
3434
- * 4: The SDK connection fails. You can use reason reported in the onConnectionStateChanged callback to troubleshoot the failure.
3438
+ * 4: The SDK is disconnected from the Agora edge server. You can troubleshoot the failure through the reason reported by onConnectionStateChanged. Deprecated: This enumerator is deprecated since v4.4.0 and is not recommended for use.
3435
3439
  */
3436
3440
  ClientRoleChangeFailedConnectionFailed = 4,
3437
3441
  }
@@ -3533,7 +3537,7 @@ export enum NetworkType {
3533
3537
  */
3534
3538
  export enum VideoViewSetupMode {
3535
3539
  /**
3536
- * 0: (Default) Replaces a view.
3540
+ * 0: (Default) Clear all added views and replace with a new view.
3537
3541
  */
3538
3542
  VideoViewSetupReplace = 0,
3539
3543
  /**
@@ -3593,7 +3597,7 @@ export class VideoCanvas {
3593
3597
  */
3594
3598
  cropArea?: Rectangle;
3595
3599
  /**
3596
- * (Optional) Whether the receiver enables alpha mask rendering: true : The receiver enables alpha mask rendering. false : (Default) The receiver disables alpha mask rendering. Alpha mask rendering can create images with transparent effects and extract portraits from videos. When used in combination with other methods, you can implement effects such as portrait-in-picture and watermarking.
3600
+ * (Optional) Whether to enable alpha mask rendering: true : Enable alpha mask rendering. false : (Default) Disable alpha mask rendering. Alpha mask rendering can create images with transparent effects and extract portraits from videos. When used in combination with other methods, you can implement effects such as portrait-in-picture and watermarking.
3597
3601
  * The receiver can render alpha channel information only when the sender enables alpha transmission.
3598
3602
  * To enable alpha transmission,.
3599
3603
  */
@@ -3857,7 +3861,7 @@ export class ColorEnhanceOptions {
3857
3861
  */
3858
3862
  export enum BackgroundSourceType {
3859
3863
  /**
3860
- * 0: Process the background as alpha information without replacement, only separating the portrait and the background. After setting this value, you can call startLocalVideoTranscoder to implement the picture-in-picture effect.
3864
+ * 0: Process the background as alpha data without replacement, only separating the portrait and the background. After setting this value, you can call startLocalVideoTranscoder to implement the picture-in-picture effect.
3861
3865
  */
3862
3866
  BackgroundNone = 0,
3863
3867
  /**
@@ -4221,47 +4225,47 @@ export enum HeadphoneEqualizerPreset {
4221
4225
  }
4222
4226
 
4223
4227
  /**
4224
- * @ignore
4228
+ * Voice AI tuner sound types.
4225
4229
  */
4226
4230
  export enum VoiceAiTunerType {
4227
4231
  /**
4228
- * @ignore
4232
+ * 0: Mature male voice. A deep and magnetic male voice.
4229
4233
  */
4230
4234
  VoiceAiTunerMatureMale = 0,
4231
4235
  /**
4232
- * @ignore
4236
+ * 1: Fresh male voice. A fresh and slightly sweet male voice.
4233
4237
  */
4234
4238
  VoiceAiTunerFreshMale = 1,
4235
4239
  /**
4236
- * @ignore
4240
+ * 2: Elegant female voice. A deep and charming female voice.
4237
4241
  */
4238
4242
  VoiceAiTunerElegantFemale = 2,
4239
4243
  /**
4240
- * @ignore
4244
+ * 3: Sweet female voice. A high-pitched and cute female voice.
4241
4245
  */
4242
4246
  VoiceAiTunerSweetFemale = 3,
4243
4247
  /**
4244
- * @ignore
4248
+ * 4: Warm male singing. A warm and melodious male voice.
4245
4249
  */
4246
4250
  VoiceAiTunerWarmMaleSinging = 4,
4247
4251
  /**
4248
- * @ignore
4252
+ * 5: Gentle female singing. A soft and delicate female voice.
4249
4253
  */
4250
4254
  VoiceAiTunerGentleFemaleSinging = 5,
4251
4255
  /**
4252
- * @ignore
4256
+ * 6: Husky male singing. A unique husky male voice.
4253
4257
  */
4254
4258
  VoiceAiTunerHuskyMaleSinging = 6,
4255
4259
  /**
4256
- * @ignore
4260
+ * 7: Warm elegant female singing. A warm and mature female voice.
4257
4261
  */
4258
4262
  VoiceAiTunerWarmElegantFemaleSinging = 7,
4259
4263
  /**
4260
- * @ignore
4264
+ * 8: Powerful male singing. A strong and powerful male voice.
4261
4265
  */
4262
4266
  VoiceAiTunerPowerfulMaleSinging = 8,
4263
4267
  /**
4264
- * @ignore
4268
+ * 9: Dreamy female singing. A dreamy and soft female voice.
4265
4269
  */
4266
4270
  VoiceAiTunerDreamyFemaleSinging = 9,
4267
4271
  }
@@ -4402,7 +4406,7 @@ export class AudioRecordingConfiguration {
4402
4406
  */
4403
4407
  fileRecordingType?: AudioFileRecordingType;
4404
4408
  /**
4405
- * Recording quality. See AudioRecordingQualityType. Note: This parameter applies to AAC files only.
4409
+ * Recording quality. See AudioRecordingQualityType. This parameter applies to AAC files only.
4406
4410
  */
4407
4411
  quality?: AudioRecordingQualityType;
4408
4412
  /**
@@ -2,23 +2,23 @@ import './extension/AgoraMediaBaseExtension';
2
2
  import { EncodedVideoFrameInfo } from './AgoraBase';
3
3
 
4
4
  /**
5
- * @ignore
5
+ * The context information of the extension.
6
6
  */
7
7
  export class ExtensionContext {
8
8
  /**
9
- * @ignore
9
+ * Whether the uid in ExtensionContext is valid: true : The uid is valid. false : The uid is invalid.
10
10
  */
11
11
  isValid?: boolean;
12
12
  /**
13
- * @ignore
13
+ * The user ID. 0 represents a local user, while greater than 0 represents a remote user.
14
14
  */
15
15
  uid?: number;
16
16
  /**
17
- * @ignore
17
+ * The name of the extension provider.
18
18
  */
19
19
  providerName?: string;
20
20
  /**
21
- * @ignore
21
+ * The name of the extension.
22
22
  */
23
23
  extensionName?: string;
24
24
  }
@@ -308,7 +308,7 @@ export enum ContentInspectType {
308
308
  }
309
309
 
310
310
  /**
311
- * A ContentInspectModule structure used to configure the frequency of video screenshot and upload.
311
+ * ContentInspectModule A structure used to configure the frequency of video screenshot and upload.
312
312
  */
313
313
  export class ContentInspectModule {
314
314
  /**
@@ -322,7 +322,7 @@ export class ContentInspectModule {
322
322
  }
323
323
 
324
324
  /**
325
- * Configuration of video screenshot and upload.
325
+ * Screenshot and upload configuration.
326
326
  */
327
327
  export class ContentInspectConfig {
328
328
  /**
@@ -399,6 +399,10 @@ export class AudioPcmFrame {
399
399
  * The audio frame.
400
400
  */
401
401
  data_?: number[];
402
+ /**
403
+ * @ignore
404
+ */
405
+ is_stereo_?: boolean;
402
406
  }
403
407
 
404
408
  /**
@@ -833,6 +837,32 @@ export class Hdr10MetadataInfo {
833
837
  maxFrameAverageLightLevel?: number;
834
838
  }
835
839
 
840
+ /**
841
+ * @ignore
842
+ */
843
+ export enum AlphaStitchMode {
844
+ /**
845
+ * @ignore
846
+ */
847
+ NoAlphaStitch = 0,
848
+ /**
849
+ * @ignore
850
+ */
851
+ AlphaStitchUp = 1,
852
+ /**
853
+ * @ignore
854
+ */
855
+ AlphaStitchBelow = 2,
856
+ /**
857
+ * @ignore
858
+ */
859
+ AlphaStitchLeft = 3,
860
+ /**
861
+ * @ignore
862
+ */
863
+ AlphaStitchRight = 4,
864
+ }
865
+
836
866
  /**
837
867
  * @ignore
838
868
  */
@@ -924,7 +954,7 @@ export class ExternalVideoFrame {
924
954
  /**
925
955
  * @ignore
926
956
  */
927
- fence_object?: number;
957
+ fenceObject?: number;
928
958
  /**
929
959
  * This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix.
930
960
  */
@@ -932,27 +962,33 @@ export class ExternalVideoFrame {
932
962
  /**
933
963
  * This parameter only applies to video data in Texture format. The MetaData buffer. The default value is NULL.
934
964
  */
935
- metadata_buffer?: Uint8Array;
965
+ metadataBuffer?: Uint8Array;
936
966
  /**
937
967
  * This parameter only applies to video data in Texture format. The MetaData size. The default value is 0.
938
968
  */
939
- metadata_size?: number;
969
+ metadataSize?: number;
940
970
  /**
941
- * @ignore
971
+ * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
942
972
  */
943
973
  alphaBuffer?: Uint8Array;
944
974
  /**
945
- * @ignore
975
+ * This parameter only applies to video data in BGRA or RGBA format. Whether to extract the alpha channel data from the video frame and automatically fill it into alphaBuffer : true :Extract and fill the alpha channel data. false : (Default) Do not extract and fill the Alpha channel data. For video data in BGRA or RGBA format, you can set the Alpha channel data in either of the following ways:
976
+ * Automatically by setting this parameter to true.
977
+ * Manually through the alphaBuffer parameter.
946
978
  */
947
979
  fillAlphaBuffer?: boolean;
948
980
  /**
949
- * @ignore
981
+ * When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame. See AlphaStitchMode.
950
982
  */
951
- alphaStitchMode?: number;
983
+ alphaStitchMode?: AlphaStitchMode;
952
984
  /**
953
- * This parameter only applies to video data in Windows Texture format. It represents an index of an ID3D11Texture2D texture object used by the video frame in the ID3D11Texture2D array.
985
+ * This parameter only applies to video data in Windows Texture format. It represents a pointer to an object of type ID3D11Texture2D, which is used by a video frame.
954
986
  */
955
- texture_slice_index?: number;
987
+ d3d11Texture2d?: any;
988
+ /**
989
+ * @ignore
990
+ */
991
+ textureSliceIndex?: number;
956
992
  /**
957
993
  * @ignore
958
994
  */
@@ -1034,19 +1070,19 @@ export class VideoFrame {
1034
1070
  */
1035
1071
  matrix?: number[];
1036
1072
  /**
1037
- * @ignore
1073
+ * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
1038
1074
  */
1039
1075
  alphaBuffer?: Uint8Array;
1040
1076
  /**
1041
- * @ignore
1077
+ * When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame. See AlphaStitchMode.
1042
1078
  */
1043
- alphaStitchMode?: number;
1079
+ alphaStitchMode?: AlphaStitchMode;
1044
1080
  /**
1045
1081
  * @ignore
1046
1082
  */
1047
1083
  pixelBuffer?: Uint8Array;
1048
1084
  /**
1049
- * The meta information in the video frame. To use this parameter, please.
1085
+ * The meta information in the video frame. To use this parameter, please contact.
1050
1086
  */
1051
1087
  metaInfo?: IVideoFrameMetaInfo;
1052
1088
  /**
@@ -1420,6 +1456,7 @@ export interface IVideoFrameObserver {
1420
1456
  * Occurs each time the SDK receives a video frame before encoding.
1421
1457
  *
1422
1458
  * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios. After processing, you can send the processed video data back to the SDK in this callback.
1459
+ * It is recommended that you ensure the modified parameters in videoFrame are consistent with the actual situation of the video frames in the video frame buffer. Otherwise, it may cause unexpected rotation, distortion, and other issues in the local preview and remote video display.
1423
1460
  * It's recommended that you implement this callback through the C++ API.
1424
1461
  * Due to framework limitations, this callback does not support sending processed video data back to the SDK.
1425
1462
  * The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced.
@@ -1443,6 +1480,7 @@ export interface IVideoFrameObserver {
1443
1480
  * Occurs each time the SDK receives a video frame sent by the remote user.
1444
1481
  *
1445
1482
  * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios.
1483
+ * It is recommended that you ensure the modified parameters in videoFrame are consistent with the actual situation of the video frames in the video frame buffer. Otherwise, it may cause unexpected rotation, distortion, and other issues in the local preview and remote video display.
1446
1484
  * If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
1447
1485
  * It's recommended that you implement this callback through the C++ API.
1448
1486
  * Due to framework limitations, this callback does not support sending processed video data back to the SDK.
@@ -1594,26 +1632,8 @@ export interface IFaceInfoObserver {
1594
1632
  * yaw: Head yaw angle. A positve value means turning left, while a negative value means turning right.
1595
1633
  * roll: Head roll angle. A positve value means tilting to the right, while a negative value means tilting to the left.
1596
1634
  * timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON:
1597
- * {
1598
- * "faces":[{
1599
- * "blendshapes":{
1600
- * "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0,
1601
- * "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0,
1602
- * "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0,
1603
- * "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0,
1604
- * "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0,
1605
- * "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0,
1606
- * "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0,
1607
- * "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0,
1608
- * "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0,
1609
- * "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0,
1610
- * "tongueOut":0.0
1611
- * },
1612
- * "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5},
1613
- *
1614
- * }],
1615
- * "timestamp":"654879876546"
1616
- * }
1635
+ * { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5},
1636
+ * }], "timestamp":"654879876546" }
1617
1637
  *
1618
1638
  * @returns
1619
1639
  * true : Facial information JSON parsing successful. false : Facial information JSON parsing failed.
@@ -28,6 +28,10 @@ export enum LogLevel {
28
28
  * @ignore
29
29
  */
30
30
  LogLevelApiCall = 0x0010,
31
+ /**
32
+ * @ignore
33
+ */
34
+ LogLevelDebug = 0x0020,
31
35
  }
32
36
 
33
37
  /**
@@ -44,7 +44,7 @@ export abstract class IMediaEngine {
44
44
  /**
45
45
  * Registers an audio frame observer object.
46
46
  *
47
- * Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame, onRecordAudioFrame, onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks. Ensure that you call this method before joining a channel.
47
+ * Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger the onMixedAudioFrame, onRecordAudioFrame, onPlaybackAudioFrame, onPlaybackAudioFrameBeforeMixing or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks.
48
48
  *
49
49
  * @param observer The observer instance. See IAudioFrameObserver. Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object.
50
50
  *
@@ -64,10 +64,6 @@ export abstract class IMediaEngine {
64
64
  * Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then:
65
65
  * The raw video data of group A users can be obtained through the callback in IVideoFrameObserver, and the SDK renders the data by default.
66
66
  * The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one IVideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the IVideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.
67
- * Ensure that you call this method before joining a channel.
68
- * When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances:
69
- * When network conditions deteriorate, the video resolution decreases incrementally.
70
- * If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.
71
67
  *
72
68
  * @param observer The observer instance. See IVideoFrameObserver.
73
69
  *
@@ -103,7 +99,7 @@ export abstract class IMediaEngine {
103
99
  * Registers a facial information observer.
104
100
  *
105
101
  * You can call this method to register the onFaceInfo callback to receive the facial information processed by Agora speech driven extension. When calling this method to register a facial information observer, you can register callbacks in the IFaceInfoObserver class as needed. After successfully registering the facial information observer, the SDK triggers the callback you have registered when it captures the facial information converted by the speech driven extension.
106
- * Ensure that you call this method before joining a channel.
102
+ * Call this method before joining a channel.
107
103
  * Before calling this method, you need to make sure that the speech driven extension has been enabled by calling enableExtension.
108
104
  *
109
105
  * @param observer Facial information observer, see IFaceInfoObserver.
@@ -117,9 +113,7 @@ export abstract class IMediaEngine {
117
113
  /**
118
114
  * Pushes the external audio frame.
119
115
  *
120
- * Before calling this method to push external audio data, perform the following steps:
121
- * Call createCustomAudioTrack to create a custom audio track and get the audio track ID.
122
- * Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAudioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
116
+ * Call this method to push external audio frames through the audio track.
123
117
  *
124
118
  * @param frame The external audio frame. See AudioFrame.
125
119
  * @param trackId The audio track ID. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish.
@@ -133,12 +127,7 @@ export abstract class IMediaEngine {
133
127
  /**
134
128
  * Pulls the remote audio data.
135
129
  *
136
- * Before calling this method, call setExternalAudioSink (enabled : true) to notify the app to enable and set the external audio rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback.
137
- * Call this method after joining a channel.
138
- * Both this method and onPlaybackAudioFrame callback can be used to get audio data after remote mixing. Note that after calling setExternalAudioSink to enable external audio rendering, the app no longer receives data from the onPlaybackAudioFrame callback. Therefore, you should choose between this method and the onPlaybackAudioFrame callback based on your actual business requirements. The specific distinctions between them are as follows:
139
- * After calling this method, the app automatically pulls the audio data from the SDK. By setting the audio data parameters, the SDK adjusts the frame buffer to help the app handle latency, effectively avoiding audio playback jitter.
140
- * The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter.
141
- * This method is only used for retrieving audio data after remote mixing. If you need to get audio data from different audio processing stages such as capture and playback, you can register the corresponding callbacks by calling registerAudioFrameObserver.
130
+ * After a successful call of this method, the app pulls the decoded and mixed audio data for playback.
142
131
  *
143
132
  * @returns
144
133
  * The AudioFrame instance, if the method call succeeds.
@@ -149,7 +138,7 @@ export abstract class IMediaEngine {
149
138
  /**
150
139
  * Configures the external video source.
151
140
  *
152
- * Call this method before joining a channel.
141
+ * After calling this method to enable an external video source, you can call pushVideoFrame to push external video data to the SDK.
153
142
  *
154
143
  * @param enabled Whether to use the external video source: true : Use the external video source. The SDK prepares to accept the external video frame. false : (Default) Do not use the external video source.
155
144
  * @param useTexture Whether to use the external video frame in the Texture format. true : Use the external video frame in the Texture format. false : (Default) Do not use the external video frame in the Texture format.
@@ -170,7 +159,7 @@ export abstract class IMediaEngine {
170
159
  /**
171
160
  * Sets the external audio source parameters.
172
161
  *
173
- * Deprecated: This method is deprecated, use createCustomAudioTrack instead. Call this method before joining a channel.
162
+ * Deprecated: This method is deprecated, use createCustomAudioTrack instead.
174
163
  *
175
164
  * @param enabled Whether to enable the external audio source: true : Enable the external audio source. false : (Default) Disable the external audio source.
176
165
  * @param sampleRate The sample rate (Hz) of the external audio source which can be set as 8000, 16000, 32000, 44100, or 48000.
@@ -193,7 +182,7 @@ export abstract class IMediaEngine {
193
182
  /**
194
183
  * Creates a custom audio track.
195
184
  *
196
- * Ensure that you call this method before joining a channel. To publish a custom audio source, see the following steps:
185
+ * Call this method before joining a channel. To publish a custom audio source, see the following steps:
197
186
  * Call this method to create a custom audio track and get the audio track ID.
198
187
  * Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAudioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
199
188
  * Call pushAudioFrame and specify trackId as the audio track ID set in step 2. You can then publish the corresponding custom audio source in the channel.
@@ -224,7 +213,7 @@ export abstract class IMediaEngine {
224
213
  /**
225
214
  * Sets the external audio sink.
226
215
  *
227
- * This method applies to scenarios where you want to use external audio data for playback. After you set the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want.
216
+ * After enabling the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want.
228
217
  *
229
218
  * @param enabled Whether to enable or disable the external audio sink: true : Enables the external audio sink. false : (Default) Disables the external audio sink.
230
219
  * @param sampleRate The sample rate (Hz) of the external audio sink, which can be set as 16000, 32000, 44100, or 48000.
@@ -31,7 +31,7 @@ export abstract class IMediaPlayer {
31
31
  /**
32
32
  * Opens the media resource.
33
33
  *
34
- * This method is called asynchronously. If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting PlayerStateOpenCompleted before calling the play method to play the file.
34
+ * This method is called asynchronously.
35
35
  *
36
36
  * @param url The path of the media file. Both local path and online path are supported.
37
37
  * @param startPos The starting position (ms) for playback. Default value is 0.
@@ -58,8 +58,6 @@ export abstract class IMediaPlayer {
58
58
  /**
59
59
  * Plays the media file.
60
60
  *
61
- * After calling open or seek, you can call this method to play the media file.
62
- *
63
61
  * @returns
64
62
  * 0: Success.
65
63
  * < 0: Failure.
@@ -78,6 +76,8 @@ export abstract class IMediaPlayer {
78
76
  /**
79
77
  * Stops playing the media track.
80
78
  *
79
+ * After calling this method to stop playback, if you want to play again, you need to call open or openWithMediaSource to open the media resource.
80
+ *
81
81
  * @returns
82
82
  * 0: Success.
83
83
  * < 0: Failure.
@@ -96,9 +96,8 @@ export abstract class IMediaPlayer {
96
96
  /**
97
97
  * Seeks to a new playback position.
98
98
  *
99
- * After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position. To play the media file from a specific position, do the following:
100
- * Call this method to seek to the position you want to begin playback.
101
- * Call the play method to play the media file.
99
+ * If you call seek after the playback has completed (upon receiving callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaybackCompleted or PlayerStatePlaybackAllLoopsCompleted), the SDK will play the media file from the specified position. At this point, you will receive callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaying.
100
+ * If you call seek while the playback is paused, upon successful call of this method, the SDK will seek to the specified position. To resume playback, call resume or play .
102
101
  *
103
102
  * @param newPos The new playback position (ms).
104
103
  *
@@ -152,8 +151,6 @@ export abstract class IMediaPlayer {
152
151
  /**
153
152
  * Gets the detailed information of the media stream.
154
153
  *
155
- * Call this method after calling getStreamCount.
156
- *
157
154
  * @param index The index of the media stream. This parameter must be less than the return value of getStreamCount.
158
155
  *
159
156
  * @returns
@@ -168,6 +165,8 @@ export abstract class IMediaPlayer {
168
165
  * If you want to loop, call this method and set the number of the loops. When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as PlayerStatePlaybackAllLoopsCompleted.
169
166
  *
170
167
  * @param loopCount The number of times the audio effect loops:
168
+ * ≥0: Number of times for playing. For example, setting it to 0 means no loop playback, playing only once; setting it to 1 means loop playback once, playing a total of twice.
169
+ * -1: Play the audio file in an infinite loop.
171
170
  *
172
171
  * @returns
173
172
  * 0: Success.
@@ -314,6 +313,8 @@ export abstract class IMediaPlayer {
314
313
  /**
315
314
  * Sets the view.
316
315
  *
316
+ * @param view The render view. On Windows, this parameter sets the window handle (HWND).
317
+ *
317
318
  * @returns
318
319
  * 0: Success.
319
320
  * < 0: Failure.
@@ -567,9 +568,9 @@ export abstract class IMediaPlayer {
567
568
  abstract setSoundPositionParams(pan: number, gain: number): number;
568
569
 
569
570
  /**
570
- * Set media player options for providing technical previews or special customization features.
571
+ * Sets media player options.
571
572
  *
572
- * The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. Ensure that you call this method before open or openWithMediaSource.
573
+ * The media player supports setting options through key and value. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together.
573
574
  *
574
575
  * @param key The key of the option.
575
576
  * @param value The value of the key.
@@ -581,9 +582,9 @@ export abstract class IMediaPlayer {
581
582
  abstract setPlayerOptionInInt(key: string, value: number): number;
582
583
 
583
584
  /**
584
- * Set media player options for providing technical previews or special customization features.
585
+ * Sets media player options.
585
586
  *
586
- * Ensure that you call this method before open or openWithMediaSource. The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together.
587
+ * The media player supports setting options through key and value. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together.
587
588
  *
588
589
  * @param key The key of the option.
589
590
  * @param value The value of the key.
@@ -289,7 +289,19 @@ export interface IMusicContentCenterEventHandler {
289
289
  ): void;
290
290
 
291
291
  /**
292
- * @ignore
292
+ * 音乐资源的详细信息回调。
293
+ *
294
+ * 当你调用 getSongSimpleInfo 获取某一音乐资源的详细信息后,SDK 会触发该回调。
295
+ *
296
+ * @param requestId The request ID. 本次请求的唯一标识。
297
+ * @param songCode The code of the music, which is an unique identifier of the music.
298
+ * @param simpleInfo 音乐资源的相关信息,包含下列内容:
299
+ * 副歌片段的开始和结束的时间(ms)
300
+ * 副歌片段的歌词下载地址
301
+ * 副歌片段时长(ms)
302
+ * 歌曲名称
303
+ * 歌手名
304
+ * @param reason 音乐内容中心的请求状态码,详见 MusicContentCenterStateReason 。
293
305
  */
294
306
  onSongSimpleInfoResult?(
295
307
  requestId: string,