agora-electron-sdk 4.2.2-dev.4 → 4.2.2-dev.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/CHANGELOG.md +22 -0
  2. package/js/Private/AgoraBase.js +139 -53
  3. package/js/Private/AgoraMediaBase.js +13 -7
  4. package/js/Private/IAgoraRtcEngine.js +11 -11
  5. package/js/Private/IAgoraRtcEngineEx.js +1 -1
  6. package/js/Private/IAgoraSpatialAudio.js +2 -2
  7. package/js/Private/impl/IAgoraMediaEngineImpl.js +12 -6
  8. package/js/Private/impl/IAgoraRtcEngineImpl.js +7 -7
  9. package/js/Private/internal/IrisApiEngine.js +13 -0
  10. package/js/Private/internal/MusicContentCenterInternal.js +3 -0
  11. package/js/Private/internal/RtcEngineExInternal.js +24 -7
  12. package/js/Private/ti/IAgoraRtcEngine-ti.js +2 -2
  13. package/js/Renderer/WebGLRenderer/index.js +19 -19
  14. package/js/Renderer/YUVCanvasRenderer/index.js +1 -1
  15. package/package.json +3 -3
  16. package/scripts/zipBuild.js +3 -1
  17. package/ts/Private/AgoraBase.ts +296 -133
  18. package/ts/Private/AgoraMediaBase.ts +68 -34
  19. package/ts/Private/AgoraMediaPlayerTypes.ts +8 -5
  20. package/ts/Private/IAgoraLog.ts +7 -3
  21. package/ts/Private/IAgoraMediaEngine.ts +76 -35
  22. package/ts/Private/IAgoraMediaPlayer.ts +126 -64
  23. package/ts/Private/IAgoraMediaPlayerSource.ts +3 -1
  24. package/ts/Private/IAgoraRtcEngine.ts +1325 -530
  25. package/ts/Private/IAgoraRtcEngineEx.ts +227 -94
  26. package/ts/Private/IAgoraSpatialAudio.ts +71 -36
  27. package/ts/Private/IAudioDeviceManager.ts +61 -31
  28. package/ts/Private/impl/IAgoraMediaEngineImpl.ts +12 -6
  29. package/ts/Private/impl/IAgoraRtcEngineImpl.ts +9 -11
  30. package/ts/Private/internal/IrisApiEngine.ts +14 -0
  31. package/ts/Private/internal/MusicContentCenterInternal.ts +4 -0
  32. package/ts/Private/internal/RtcEngineExInternal.ts +36 -14
  33. package/ts/Private/ti/IAgoraRtcEngine-ti.ts +2 -2
  34. package/ts/Renderer/WebGLRenderer/index.ts +26 -21
  35. package/ts/Renderer/YUVCanvasRenderer/index.ts +1 -1
  36. package/types/Private/AgoraBase.d.ts +298 -135
  37. package/types/Private/AgoraBase.d.ts.map +1 -1
  38. package/types/Private/AgoraMediaBase.d.ts +69 -35
  39. package/types/Private/AgoraMediaBase.d.ts.map +1 -1
  40. package/types/Private/AgoraMediaPlayerTypes.d.ts +8 -5
  41. package/types/Private/AgoraMediaPlayerTypes.d.ts.map +1 -1
  42. package/types/Private/IAgoraLog.d.ts +7 -3
  43. package/types/Private/IAgoraLog.d.ts.map +1 -1
  44. package/types/Private/IAgoraMediaEngine.d.ts +76 -35
  45. package/types/Private/IAgoraMediaEngine.d.ts.map +1 -1
  46. package/types/Private/IAgoraMediaPlayer.d.ts +126 -64
  47. package/types/Private/IAgoraMediaPlayer.d.ts.map +1 -1
  48. package/types/Private/IAgoraMediaPlayerSource.d.ts +3 -1
  49. package/types/Private/IAgoraMediaPlayerSource.d.ts.map +1 -1
  50. package/types/Private/IAgoraRtcEngine.d.ts +1326 -530
  51. package/types/Private/IAgoraRtcEngine.d.ts.map +1 -1
  52. package/types/Private/IAgoraRtcEngineEx.d.ts +227 -94
  53. package/types/Private/IAgoraRtcEngineEx.d.ts.map +1 -1
  54. package/types/Private/IAgoraSpatialAudio.d.ts +71 -36
  55. package/types/Private/IAgoraSpatialAudio.d.ts.map +1 -1
  56. package/types/Private/IAudioDeviceManager.d.ts +61 -31
  57. package/types/Private/IAudioDeviceManager.d.ts.map +1 -1
  58. package/types/Private/impl/IAgoraMediaEngineImpl.d.ts +2 -2
  59. package/types/Private/impl/IAgoraMediaEngineImpl.d.ts.map +1 -1
  60. package/types/Private/impl/IAgoraRtcEngineImpl.d.ts +5 -5
  61. package/types/Private/impl/IAgoraRtcEngineImpl.d.ts.map +1 -1
  62. package/types/Private/internal/IrisApiEngine.d.ts.map +1 -1
  63. package/types/Private/internal/MusicContentCenterInternal.d.ts +1 -0
  64. package/types/Private/internal/MusicContentCenterInternal.d.ts.map +1 -1
  65. package/types/Private/internal/RtcEngineExInternal.d.ts.map +1 -1
  66. package/types/Renderer/WebGLRenderer/index.d.ts +1 -1
  67. package/types/Renderer/WebGLRenderer/index.d.ts.map +1 -1
@@ -62,15 +62,15 @@ export enum VideoSourceType {
62
62
  */
63
63
  VideoSourceCameraThird = 11,
64
64
  /**
65
- * 12:The fourth camera.
65
+ * 12: The fourth camera.
66
66
  */
67
67
  VideoSourceCameraFourth = 12,
68
68
  /**
69
- * 13:The third screen.
69
+ * 13: The third screen.
70
70
  */
71
71
  VideoSourceScreenThird = 13,
72
72
  /**
73
- * 14:The fourth screen.
73
+ * 14: The fourth screen.
74
74
  */
75
75
  VideoSourceScreenFourth = 14,
76
76
  /**
@@ -270,7 +270,7 @@ export enum ContentInspectType {
270
270
  }
271
271
 
272
272
  /**
273
- * A structure used to configure the frequency of video screenshot and upload.ContentInspectModule
273
+ * A ContentInspectModule structure used to configure the frequency of video screenshot and upload.
274
274
  */
275
275
  export class ContentInspectModule {
276
276
  /**
@@ -288,11 +288,11 @@ export class ContentInspectModule {
288
288
  */
289
289
  export class ContentInspectConfig {
290
290
  /**
291
- * Additional information on the video content (maximum length: 1024 Bytes).The SDK sends the screenshots and additional information on the video content to the Agora server. Once the video screenshot and upload process is completed, the Agora server sends the additional information and the callback notification to your server.
291
+ * Additional information on the video content (maximum length: 1024 Bytes). The SDK sends the screenshots and additional information on the video content to the Agora server. Once the video screenshot and upload process is completed, the Agora server sends the additional information and the callback notification to your server.
292
292
  */
293
293
  extraInfo?: string;
294
294
  /**
295
- * Functional module. See ContentInspectModule.A maximum of 32 ContentInspectModule instances can be configured, and the value range of MAX_CONTENT_INSPECT_MODULE_COUNT is an integer in [1,32].A function module can only be configured with one instance at most. Currently only the video screenshot and upload function is supported.
295
+ * Functional module. See ContentInspectModule. A maximum of 32 ContentInspectModule instances can be configured, and the value range of MAX_CONTENT_INSPECT_MODULE_COUNT is an integer in [1,32]. A function module can only be configured with one instance at most. Currently only the video screenshot and upload function is supported.
296
296
  */
297
297
  modules?: ContentInspectModule[];
298
298
  /**
@@ -433,6 +433,10 @@ export enum VideoPixelFormat {
433
433
  * 16: The format is I422.
434
434
  */
435
435
  VideoPixelI422 = 16,
436
+ /**
437
+ * @ignore
438
+ */
439
+ VideoTextureId3d11texture2d = 17,
436
440
  }
437
441
 
438
442
  /**
@@ -575,6 +579,14 @@ export class ExternalVideoFrame {
575
579
  * @ignore
576
580
  */
577
581
  alphaBuffer?: Uint8Array;
582
+ /**
583
+ * @ignore
584
+ */
585
+ d3d11_texture_2d?: any;
586
+ /**
587
+ * @ignore
588
+ */
589
+ texture_slice_index?: number;
578
590
  }
579
591
 
580
592
  /**
@@ -643,6 +655,10 @@ export class VideoFrame {
643
655
  * This parameter only applies to video data in Texture format. Texture ID.
644
656
  */
645
657
  textureId?: number;
658
+ /**
659
+ * @ignore
660
+ */
661
+ d3d11Texture2d?: any;
646
662
  /**
647
663
  * This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix.
648
664
  */
@@ -736,7 +752,9 @@ export class AudioFrame {
736
752
  */
737
753
  bytesPerSample?: BytesPerSample;
738
754
  /**
739
- * The number of audio channels (the data are interleaved if it is stereo).1: Mono.2: Stereo.
755
+ * The number of audio channels (the data are interleaved if it is stereo).
756
+ * 1: Mono.
757
+ * 2: Stereo.
740
758
  */
741
759
  channels?: number;
742
760
  /**
@@ -744,11 +762,11 @@ export class AudioFrame {
744
762
  */
745
763
  samplesPerSec?: number;
746
764
  /**
747
- * The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved.The size of the data buffer is as follows: buffer = samples × channels × bytesPerSample.
765
+ * The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved. The size of the data buffer is as follows: buffer = samples × channels × bytesPerSample.
748
766
  */
749
767
  buffer?: Uint8Array;
750
768
  /**
751
- * The timestamp (ms) of the external audio frame.You can use this timestamp to restore the order of the captured audio frame, and synchronize audio and video frames in video scenarios, including scenarios where external video sources are used.
769
+ * The timestamp (ms) of the external audio frame. You can use this timestamp to restore the order of the captured audio frame, and synchronize audio and video frames in video scenarios, including scenarios where external video sources are used.
752
770
  */
753
771
  renderTimeMs?: number;
754
772
  /**
@@ -794,15 +812,24 @@ export enum AudioFramePosition {
794
812
  /**
795
813
  * Audio data format.
796
814
  *
797
- * The SDK sets the audio data format in the following callbacks according to AudioParams . onRecordAudioFrame onPlaybackAudioFrame onMixedAudioFrame The SDK calculates the sampling interval through the samplesPerCall , sampleRate , and channel parameters in AudioParams , and triggers the onRecordAudioFrame , onPlaybackAudioFrame , onMixedAudioFrame , and onEarMonitoringAudioFrame callbacks according to the sampling interval. Sample interval (sec) = samplePerCall /( sampleRate × channel ) . Ensure that the sample interval ≥ 0.01 (s).
815
+ * The SDK sets the audio data format in the following callbacks according to AudioParams. onRecordAudioFrame onPlaybackAudioFrame onMixedAudioFrame
816
+ * The SDK calculates the sampling interval through the samplesPerCall, sampleRate, and channel parameters in AudioParams, and triggers the onRecordAudioFrame, onPlaybackAudioFrame, onMixedAudioFrame, and onEarMonitoringAudioFrame callbacks according to the sampling interval. Sample interval (sec) = samplePerCall /(sampleRate × channel).
817
+ * Ensure that the sample interval ≥ 0.01 (s).
798
818
  */
799
819
  export class AudioParams {
800
820
  /**
801
- * The audio sample rate (Hz), which can be set as one of the following values:8000.(Default) 16000.32000.4410048000
821
+ * The audio sample rate (Hz), which can be set as one of the following values:
822
+ * 8000.
823
+ * (Default) 16000.
824
+ * 32000.
825
+ * 44100
826
+ * 48000
802
827
  */
803
828
  sample_rate?: number;
804
829
  /**
805
- * The number of audio channels, which can be set as either of the following values:1: (Default) Mono.2: Stereo.
830
+ * The number of audio channels, which can be set as either of the following values:
831
+ * 1: (Default) Mono.
832
+ * 2: Stereo.
806
833
  */
807
834
  channels?: number;
808
835
  /**
@@ -828,7 +855,7 @@ export interface IAudioFrameObserverBase {
828
855
  * @param audioFrame The raw audio data. See AudioFrame.
829
856
  *
830
857
  * @returns
831
- * Reserved for future use.
858
+ * Without practical meaning.
832
859
  */
833
860
  onRecordAudioFrame?(channelId: string, audioFrame: AudioFrame): void;
834
861
 
@@ -841,7 +868,7 @@ export interface IAudioFrameObserverBase {
841
868
  * @param audioFrame The raw audio data. See AudioFrame.
842
869
  *
843
870
  * @returns
844
- * Reserved for future use.
871
+ * Without practical meaning.
845
872
  */
846
873
  onPlaybackAudioFrame?(channelId: string, audioFrame: AudioFrame): void;
847
874
 
@@ -854,7 +881,7 @@ export interface IAudioFrameObserverBase {
854
881
  * @param audioFrame The raw audio data. See AudioFrame.
855
882
  *
856
883
  * @returns
857
- * Reserved for future use.
884
+ * Without practical meaning.
858
885
  */
859
886
  onMixedAudioFrame?(channelId: string, audioFrame: AudioFrame): void;
860
887
 
@@ -866,7 +893,7 @@ export interface IAudioFrameObserverBase {
866
893
  * @param audioFrame The raw audio data. See AudioFrame.
867
894
  *
868
895
  * @returns
869
- * Reserved for future use.
896
+ * Without practical meaning.
870
897
  */
871
898
  onEarMonitoringAudioFrame?(audioFrame: AudioFrame): void;
872
899
  }
@@ -883,7 +910,7 @@ export interface IAudioFrameObserver extends IAudioFrameObserverBase {
883
910
  * @param audioFrame The raw audio data. See AudioFrame.
884
911
  *
885
912
  * @returns
886
- * Reserved for future use.
913
+ * Without practical meaning.
887
914
  */
888
915
  onPlaybackAudioFrameBeforeMixing?(
889
916
  channelId: string,
@@ -960,7 +987,7 @@ export interface IVideoEncodedFrameObserver {
960
987
  /**
961
988
  * Reports that the receiver has received the to-be-decoded video frame sent by the remote end.
962
989
  *
963
- * If you call the setRemoteVideoSubscriptionOptions method and set encodedFrameOnly to true , the SDK triggers this callback locally to report the received encoded video frame information.
990
+ * If you call the setRemoteVideoSubscriptionOptions method and set encodedFrameOnly to true, the SDK triggers this callback locally to report the received encoded video frame information.
964
991
  *
965
992
  * @param uid The user ID of the remote user.
966
993
  * @param imageBuffer The encoded video image buffer.
@@ -968,7 +995,7 @@ export interface IVideoEncodedFrameObserver {
968
995
  * @param videoEncodedFrameInfo For the information of the encoded video frame, see EncodedVideoFrameInfo.
969
996
  *
970
997
  * @returns
971
- * Reserved for future use.
998
+ * Without practical meaning.
972
999
  */
973
1000
  onEncodedVideoFrameReceived?(
974
1001
  uid: number,
@@ -983,11 +1010,11 @@ export interface IVideoEncodedFrameObserver {
983
1010
  */
984
1011
  export enum VideoFrameProcessMode {
985
1012
  /**
986
- * Read-only mode.In this mode, you do not modify the video frame. The video frame observer is a renderer.
1013
+ * Read-only mode. In this mode, you do not modify the video frame. The video frame observer is a renderer.
987
1014
  */
988
1015
  ProcessModeReadOnly = 0,
989
1016
  /**
990
- * Read and write mode.In this mode, you modify the video frame. The video frame observer is a video filter.
1017
+ * Read and write mode. In this mode, you modify the video frame. The video frame observer is a video filter.
991
1018
  */
992
1019
  ProcessModeReadWrite = 1,
993
1020
  }
@@ -999,13 +1026,18 @@ export interface IVideoFrameObserver {
999
1026
  /**
1000
1027
  * Occurs each time the SDK receives a video frame captured by local devices.
1001
1028
  *
1002
- * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios. Once the pre-processing is complete, you can directly modify videoFrame in this callback, and set the return value to true to send the modified video data to the SDK. The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified. If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
1029
+ * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios. Once the pre-processing is complete, you can directly modify videoFrame in this callback, and set the return value to true to send the modified video data to the SDK.
1030
+ * The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified.
1031
+ * If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
1003
1032
  *
1004
1033
  * @param sourceType Video source types, including cameras, screens, or media player. See VideoSourceType.
1005
- * @param videoFrame The video frame. See VideoFrame.The default value of the video frame data format obtained through this callback is as follows:macOS: YUV 420Windows: YUV 420
1034
+ * @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
1035
+ * macOS: YUV 420
1036
+ * Windows: YUV 420
1006
1037
  *
1007
1038
  * @returns
1008
- * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use. When the video processing mode is ProcessModeReadWrite : true : Sets the SDK to receive the video frame. false : Sets the SDK to discard the video frame.
1039
+ * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use.
1040
+ * When the video processing mode is ProcessModeReadWrite : true : Sets the SDK to receive the video frame. false : Sets the SDK to discard the video frame.
1009
1041
  */
1010
1042
  onCaptureVideoFrame?(
1011
1043
  sourceType: VideoSourceType,
@@ -1015,19 +1047,17 @@ export interface IVideoFrameObserver {
1015
1047
  /**
1016
1048
  * Occurs each time the SDK receives a video frame before encoding.
1017
1049
  *
1018
- * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios. After processing, you can send the processed video data back to the SDK in this callback. The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced.
1050
+ * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios. After processing, you can send the processed video data back to the SDK in this callback.
1051
+ * The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced.
1019
1052
  *
1020
1053
  * @param sourceType The type of the video source. See VideoSourceType.
1021
- * @param videoFrame The video frame. See VideoFrame.The default value of the video frame data format obtained through this callback is as follows:
1054
+ * @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
1022
1055
  * macOS: YUV 420
1023
1056
  * Windows: YUV 420
1024
1057
  *
1025
1058
  * @returns
1026
- * When the video processing mode is ProcessModeReadOnly :
1027
- * true : Reserved for future use.
1028
- * false : Reserved for future use. When the video processing mode is ProcessModeReadWrite :
1029
- * true : Sets the SDK to receive the video frame.
1030
- * false : Sets the SDK to discard the video frame.
1059
+ * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use.
1060
+ * When the video processing mode is ProcessModeReadWrite : true : Sets the SDK to receive the video frame. false : Sets the SDK to discard the video frame.
1031
1061
  */
1032
1062
  onPreEncodeVideoFrame?(
1033
1063
  sourceType: VideoSourceType,
@@ -1042,14 +1072,18 @@ export interface IVideoFrameObserver {
1042
1072
  /**
1043
1073
  * Occurs each time the SDK receives a video frame sent by the remote user.
1044
1074
  *
1045
- * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios. If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
1075
+ * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios.
1076
+ * If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
1046
1077
  *
1047
1078
  * @param channelId The channel ID.
1048
1079
  * @param remoteUid The user ID of the remote user who sends the current video frame.
1049
- * @param videoFrame The video frame. See VideoFrame.The default value of the video frame data format obtained through this callback is as follows:macOS: YUV 420Windows: YUV 420
1080
+ * @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
1081
+ * macOS: YUV 420
1082
+ * Windows: YUV 420
1050
1083
  *
1051
1084
  * @returns
1052
- * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use. When the video processing mode is ProcessModeReadWrite : true : Sets the SDK to receive the video frame. false : Sets the SDK to discard the video frame.
1085
+ * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use.
1086
+ * When the video processing mode is ProcessModeReadWrite : true : Sets the SDK to receive the video frame. false : Sets the SDK to discard the video frame.
1053
1087
  */
1054
1088
  onRenderVideoFrame?(
1055
1089
  channelId: string,
@@ -378,7 +378,7 @@ export class PlayerUpdatedInfo {
378
378
  */
379
379
  deviceId?: string;
380
380
  /**
381
- * The statistics about the media file being cached.If you call the openWithMediaSource method and set enableCache as true, the statistics about the media file being cached is updated every second after the media file is played. See CacheStatistics.
381
+ * The statistics about the media file being cached. If you call the openWithMediaSource method and set enableCache as true, the statistics about the media file being cached is updated every second after the media file is played. See CacheStatistics.
382
382
  */
383
383
  cacheStatistics?: CacheStatistics;
384
384
  }
@@ -400,19 +400,22 @@ export class MediaSource {
400
400
  */
401
401
  startPos?: number;
402
402
  /**
403
- * Whether to enable autoplay once the media file is opened:true: (Default) Enables autoplay.false: Disables autoplay.If autoplay is disabled, you need to call the play method to play a media file after it is opened.
403
+ * Whether to enable autoplay once the media file is opened: true : (Default) Enables autoplay. false : Disables autoplay. If autoplay is disabled, you need to call the play method to play a media file after it is opened.
404
404
  */
405
405
  autoPlay?: boolean;
406
406
  /**
407
- * Whether to cache the media file when it is being played:true:Enables caching.false: (Default) Disables caching.Agora only supports caching on-demand audio and video streams that are not transmitted in HLS protocol.If you need to enable caching, pass in a value to uri; otherwise, caching is based on the url of the media file.If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics.
407
+ * Whether to cache the media file when it is being played: true :Enables caching. false : (Default) Disables caching.
408
+ * Agora only supports caching on-demand audio and video streams that are not transmitted in HLS protocol.
409
+ * If you need to enable caching, pass in a value to uri; otherwise, caching is based on the url of the media file.
410
+ * If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics.
408
411
  */
409
412
  enableCache?: boolean;
410
413
  /**
411
- * Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service:true: The media resource to be played is a live or on-demand video distributed through Media Broadcast service.false: (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service.If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter.
414
+ * Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service: true : The media resource to be played is a live or on-demand video distributed through Media Broadcast service. false : (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service. If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter.
412
415
  */
413
416
  isAgoraSource?: boolean;
414
417
  /**
415
- * Whether the media resource to be opened is a live stream:true: The media resource is a live stream.false: (Default) The media resource is not a live stream.If the media resource you want to open is a live stream, Agora recommends that you set this parameter as true so that the live stream can be loaded more quickly.If the media resource you open is not a live stream, but you set isLiveSource as true, the media resource is not to be loaded more quickly.
418
+ * Whether the media resource to be opened is a live stream: true : The media resource is a live stream. false : (Default) The media resource is not a live stream. If the media resource you want to open is a live stream, Agora recommends that you set this parameter as true so that the live stream can be loaded more quickly. If the media resource you open is not a live stream, but you set isLiveSource as true, the media resource is not to be loaded more quickly.
416
419
  */
417
420
  isLiveSource?: boolean;
418
421
  }
@@ -68,15 +68,19 @@ export enum LogFilterType {
68
68
  */
69
69
  export class LogConfig {
70
70
  /**
71
- * The complete path of the log files. Ensure that the path for the log file exists and is writable. You can use this parameter to rename the log files.The default path is:macOS:If Sandbox is enabled: App Sandbox/Library/Logs/agorasdk.log. For example, /Users/<username>/Library/Containers/<AppBundleIdentifier>/Data/Library/Logs/agorasdk.log.If Sandbox is disabled: ~/Library/Logs/agorasdk.logWindows: C:\Users\<user_name>\AppData\Local\Agora\<process_name>\agorasdk.log.
71
+ * The complete path of the log files. Ensure that the path for the log file exists and is writable. You can use this parameter to rename the log files. The default path is:
72
+ * macOS:
73
+ * If Sandbox is enabled: App Sandbox/Library/Logs/agorasdk.log. For example, /Users/<username>/Library/Containers/<AppBundleIdentifier>/Data/Library/Logs/agorasdk.log.
74
+ * If Sandbox is disabled: ~/Library/Logs/agorasdk.log
75
+ * Windows: C:\Users\<user_name>\AppData\Local\Agora\<process_name>\agorasdk.log.
72
76
  */
73
77
  filePath?: string;
74
78
  /**
75
- * The size (KB) of an agorasdk.log file. The value range is [128,1024]. The default value is 1,024 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 1,024 KB, the SDK automatically adjusts it to 1,024 KB.
79
+ * The size (KB) of an agorasdk.log file. The value range is [128,20480]. The default value is 2,048 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 20,480 KB, the SDK automatically adjusts it to 20,480 KB.
76
80
  */
77
81
  fileSizeInKB?: number;
78
82
  /**
79
- * The output level of the SDK log file. See LogLevel.For example, if you set the log level to WARN, the SDK outputs the logs within levels FATAL, ERROR, and WARN.
83
+ * The output level of the SDK log file. See LogLevel. For example, if you set the log level to WARN, the SDK outputs the logs within levels FATAL, ERROR, and WARN.
80
84
  */
81
85
  level?: LogLevel;
82
86
  }
@@ -43,36 +43,56 @@ export abstract class IMediaEngine {
43
43
  /**
44
44
  * Registers an audio frame observer object.
45
45
  *
46
- * Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame , onRecordAudioFrame , onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks. Ensure that you call this method before joining a channel.
46
+ * Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame, onRecordAudioFrame, onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks. Ensure that you call this method before joining a channel.
47
47
  *
48
- * @param observer The observer object instance. See IAudioFrameObserver. Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object.
48
+ * @param observer The observer instance. See IAudioFrameObserver. Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object.
49
49
  *
50
50
  * @returns
51
- * 0: Success. < 0: Failure.
51
+ * 0: Success.
52
+ * < 0: Failure.
52
53
  */
53
54
  abstract registerAudioFrameObserver(observer: IAudioFrameObserver): number;
54
55
 
55
56
  /**
56
57
  * Registers a raw video frame observer object.
57
58
  *
58
- * If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true . Call muteAllRemoteVideoStreams ( false ) to start receiving the video streams of all remote users. Then: The raw video data of group A users can be obtained through the callback in IVideoFrameObserver , and the SDK renders the data by default. The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver . If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one IVideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the IVideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. Ensure that you call this method before joining a channel. When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances: When network conditions deteriorate, the video resolution decreases incrementally. If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.
59
+ * If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps:
60
+ * Call registerVideoFrameObserver to register the raw video frame observer before joining the channel.
61
+ * Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel.
62
+ * After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true.
63
+ * Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then:
64
+ * The raw video data of group A users can be obtained through the callback in IVideoFrameObserver, and the SDK renders the data by default.
65
+ * The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one IVideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the IVideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.
66
+ * Ensure that you call this method before joining a channel.
67
+ * When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances:
68
+ * When network conditions deteriorate, the video resolution decreases incrementally.
69
+ * If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.
59
70
  *
60
- * @param observer The observer object instance. See IVideoFrameObserver.
71
+ * @param observer The observer instance. See IVideoFrameObserver.
61
72
  *
62
73
  * @returns
63
- * 0: Success. < 0: Failure.
74
+ * 0: Success.
75
+ * < 0: Failure.
64
76
  */
65
77
  abstract registerVideoFrameObserver(observer: IVideoFrameObserver): number;
66
78
 
67
79
  /**
68
80
  * Registers a receiver object for the encoded video image.
69
81
  *
70
- * If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one IVideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true . Call muteAllRemoteVideoStreams ( false ) to start receiving the video streams of all remote users. Then: The raw video data of group A users can be obtained through the callback in IVideoFrameObserver , and the SDK renders the data by default. The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver . Call this method before joining a channel.
82
+ * If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one IVideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps:
83
+ * Call registerVideoFrameObserver to register the raw video frame observer before joining the channel.
84
+ * Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel.
85
+ * After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true.
86
+ * Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then:
87
+ * The raw video data of group A users can be obtained through the callback in IVideoFrameObserver, and the SDK renders the data by default.
88
+ * The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver.
89
+ * Call this method before joining a channel.
71
90
  *
72
91
  * @param observer The video frame observer object. See IVideoEncodedFrameObserver.
73
92
  *
74
93
  * @returns
75
- * 0: Success. < 0: Failure.
94
+ * 0: Success.
95
+ * < 0: Failure.
76
96
  */
77
97
  abstract registerVideoEncodedFrameObserver(
78
98
  observer: IVideoEncodedFrameObserver
@@ -85,32 +105,41 @@ export abstract class IMediaEngine {
85
105
  * @param trackId The audio track ID. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish.
86
106
  *
87
107
  * @returns
88
- * 0: Success. < 0: Failure.
108
+ * 0: Success.
109
+ * < 0: Failure.
89
110
  */
90
111
  abstract pushAudioFrame(frame: AudioFrame, trackId?: number): number;
91
112
 
92
113
  /**
93
114
  * Pulls the remote audio data.
94
115
  *
95
- * Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful method call, the app pulls the decoded and mixed audio data for playback. This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method. Call this method after joining a channel. Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback. The difference between this method and the onPlaybackAudioFrame callback is as follows: The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter. After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback.
116
+ * Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful method call, the app pulls the decoded and mixed audio data for playback.
117
+ * This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method.
118
+ * Call this method after joining a channel.
119
+ * Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback.
120
+ * The difference between this method and the onPlaybackAudioFrame callback is as follows:
121
+ * The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter.
122
+ * After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback.
96
123
  *
97
124
  * @returns
98
- * The AudioFrame instance, if the method call succeeds. An error code, if the call fails,.
125
+ * The AudioFrame instance, if the method call succeeds.
126
+ * An error code, if the call fails,.
99
127
  */
100
- abstract pullAudioFrame(): AudioFrame;
128
+ abstract pullAudioFrame(frame: AudioFrame): number;
101
129
 
102
130
  /**
103
131
  * Configures the external video source.
104
132
  *
105
133
  * Call this method before joining a channel.
106
134
  *
107
- * @param enabled Whether to use the external video source:true: Use the external video source. The SDK prepares to accept the external video frame.false: (Default) Do not use the external video source.
108
- * @param useTexture Whether to use the external video frame in the Texture format.true: Use the external video frame in the Texture format.false: (Default) Do not use the external video frame in the Texture format.
135
+ * @param enabled Whether to use the external video source: true : Use the external video source. The SDK prepares to accept the external video frame. false : (Default) Do not use the external video source.
136
+ * @param useTexture Whether to use the external video frame in the Texture format. true : Use the external video frame in the Texture format. false : (Default) Do not use the external video frame in the Texture format.
109
137
  * @param sourceType Whether the external video frame is encoded. See ExternalVideoSourceType.
110
- * @param encodedVideoOption Video encoding options. This parameter needs to be set if sourceType is EncodedVideoFrame. To set this parameter, contact .
138
+ * @param encodedVideoOption Video encoding options. This parameter needs to be set if sourceType is EncodedVideoFrame. To set this parameter, contact.
111
139
  *
112
140
  * @returns
113
- * 0: Success. < 0: Failure.
141
+ * 0: Success.
142
+ * < 0: Failure.
114
143
  */
115
144
  abstract setExternalVideoSource(
116
145
  enabled: boolean,
@@ -122,17 +151,17 @@ export abstract class IMediaEngine {
122
151
  /**
123
152
  * Sets the external audio source parameters.
124
153
  *
125
- * Call this method before joining a channel.
154
+ * Deprecated: This method is deprecated, use createCustomAudioTrack instead. Call this method before joining a channel.
126
155
  *
127
- * @param enabled Whether to enable the external audio source:true: Enable the external audio source.false: (Default) Disable the external audio source.
156
+ * @param enabled Whether to enable the external audio source: true : Enable the external audio source. false : (Default) Disable the external audio source.
128
157
  * @param sampleRate The sample rate (Hz) of the external audio source which can be set as 8000, 16000, 32000, 44100, or 48000.
129
158
  * @param channels The number of channels of the external audio source, which can be set as 1 (Mono) or 2 (Stereo).
130
- * @param sourceNumber The number of external audio sources. The value of this parameter should be larger than 0. The SDK creates a corresponding number of custom audio tracks based on this parameter value and names the audio tracks starting from 0. In ChannelMediaOptions, you can set publishCustomAudioSourceId to the audio track ID you want to publish.
131
- * @param localPlayback Whether to play the external audio source:true: Play the external audio source.false: (Default) Do not play the external source.
132
- * @param publish Whether to publish audio to the remote users:true: (Default) Publish audio to the remote users.false: Do not publish audio to the remote users.
159
+ * @param localPlayback Whether to play the external audio source: true : Play the external audio source. false : (Default) Do not play the external source.
160
+ * @param publish Whether to publish audio to the remote users: true : (Default) Publish audio to the remote users. false : Do not publish audio to the remote users.
133
161
  *
134
162
  * @returns
135
- * 0: Success. < 0: Failure.
163
+ * 0: Success.
164
+ * < 0: Failure.
136
165
  */
137
166
  abstract setExternalAudioSource(
138
167
  enabled: boolean,
@@ -143,15 +172,19 @@ export abstract class IMediaEngine {
143
172
  ): number;
144
173
 
145
174
  /**
146
- * Creates a customized audio track.
175
+ * Creates a custom audio track.
147
176
  *
148
- * When you need to publish multiple custom captured audios in the channel, you can refer to the following steps: Call this method to create a custom audio track and get the audio track ID. In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true . If you call pushAudioFrame trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels.
177
+ * To publish a custom audio source to multiple channels, see the following steps:
178
+ * Call this method to create a custom audio track and get the audio track ID.
179
+ * In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
180
+ * If you call pushAudioFrame, and specify trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels.
149
181
  *
150
- * @param trackType The type of the custom audio track. See AudioTrackType.
182
+ * @param trackType The type of the custom audio track. See AudioTrackType. If AudioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2.
151
183
  * @param config The configuration of the custom audio track. See AudioTrackConfig.
152
184
  *
153
185
  * @returns
154
- * If the method call is successful, the audio track ID is returned as the unique identifier of the audio track. If the method call fails, a negative value is returned.
186
+ * If the method call is successful, the audio track ID is returned as the unique identifier of the audio track.
187
+ * If the method call fails, a negative value is returned.
155
188
  */
156
189
  abstract createCustomAudioTrack(
157
190
  trackType: AudioTrackType,
@@ -164,7 +197,8 @@ export abstract class IMediaEngine {
164
197
  * @param trackId The custom audio track ID returned in createCustomAudioTrack.
165
198
  *
166
199
  * @returns
167
- * 0: Success. < 0: Failure.
200
+ * 0: Success.
201
+ * < 0: Failure.
168
202
  */
169
203
  abstract destroyCustomAudioTrack(trackId: number): number;
170
204
 
@@ -173,12 +207,15 @@ export abstract class IMediaEngine {
173
207
  *
174
208
  * This method applies to scenarios where you want to use external audio data for playback. After you set the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want.
175
209
  *
176
- * @param enabled Whether to enable or disable the external audio sink:true: Enables the external audio sink.false: (Default) Disables the external audio sink.
210
+ * @param enabled Whether to enable or disable the external audio sink: true : Enables the external audio sink. false : (Default) Disables the external audio sink.
177
211
  * @param sampleRate The sample rate (Hz) of the external audio sink, which can be set as 16000, 32000, 44100, or 48000.
178
- * @param channels The number of audio channels of the external audio sink:1: Mono.2: Stereo.
212
+ * @param channels The number of audio channels of the external audio sink:
213
+ * 1: Mono.
214
+ * 2: Stereo.
179
215
  *
180
216
  * @returns
181
- * 0: Success. < 0: Failure.
217
+ * 0: Success.
218
+ * < 0: Failure.
182
219
  */
183
220
  abstract setExternalAudioSink(
184
221
  enabled: boolean,
@@ -197,13 +234,14 @@ export abstract class IMediaEngine {
197
234
  /**
198
235
  * Pushes the external raw video frame to the SDK.
199
236
  *
200
- * If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true , you can call this method to push the unencoded external video frame to the SDK.
237
+ * If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true, you can call this method to push the unencoded external video frame to the SDK.
201
238
  *
202
239
  * @param frame The external raw video frame to be pushed. See ExternalVideoFrame.
203
240
  * @param videoTrackId The video track ID returned by calling the createCustomVideoTrack method. The default value is 0.
204
241
  *
205
242
  * @returns
206
- * 0: Success. < 0: Failure.
243
+ * 0: Success.
244
+ * < 0: Failure.
207
245
  */
208
246
  abstract pushVideoFrame(
209
247
  frame: ExternalVideoFrame,
@@ -231,7 +269,8 @@ export abstract class IMediaEngine {
231
269
  * @param observer The audio frame observer, reporting the reception of each audio frame. See IAudioFrameObserver.
232
270
  *
233
271
  * @returns
234
- * 0: Success. < 0: Failure.
272
+ * 0: Success.
273
+ * < 0: Failure.
235
274
  */
236
275
  abstract unregisterAudioFrameObserver(observer: IAudioFrameObserver): number;
237
276
 
@@ -241,7 +280,8 @@ export abstract class IMediaEngine {
241
280
  * @param observer The video observer, reporting the reception of each video frame. See IVideoFrameObserver.
242
281
  *
243
282
  * @returns
244
- * 0: Success. < 0: Failure.
283
+ * 0: Success.
284
+ * < 0: Failure.
245
285
  */
246
286
  abstract unregisterVideoFrameObserver(observer: IVideoFrameObserver): number;
247
287
 
@@ -251,7 +291,8 @@ export abstract class IMediaEngine {
251
291
  * @param observer The video observer, reporting the reception of each video frame. See IVideoEncodedFrameObserver.
252
292
  *
253
293
  * @returns
254
- * 0: Success. < 0: Failure.
294
+ * 0: Success.
295
+ * < 0: Failure.
255
296
  */
256
297
  abstract unregisterVideoEncodedFrameObserver(
257
298
  observer: IVideoEncodedFrameObserver