agora-electron-sdk 4.4.0-dev.2 → 4.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2034,7 +2034,7 @@ export enum VideoApplicationScenarioType {
2034
2034
  */
2035
2035
  ApplicationScenarioGeneral = 0,
2036
2036
  /**
2037
- * ApplicationScenarioMeeting (1) is suitable for meeting scenarios. If set to ApplicationScenarioMeeting (1), the SDK automatically enables the following strategies:
2037
+ * ApplicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies:
2038
2038
  * In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers.
2039
2039
  * The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers.
2040
2040
  * If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth.
@@ -2051,7 +2051,7 @@ export enum VideoApplicationScenarioType {
2051
2051
  */
2052
2052
  ApplicationScenarioMeeting = 1,
2053
2053
  /**
2054
- * @ignore
2054
+ * ApplicationScenario1v1 (2) is suitable for 1v1 video call scenarios. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. 2: 1v1 video call scenario.
2055
2055
  */
2056
2056
  ApplicationScenario1v1 = 2,
2057
2057
  }
@@ -2304,23 +2304,23 @@ export enum LocalVideoStreamReason {
2304
2304
  */
2305
2305
  LocalVideoStreamReasonScreenCaptureWindowNotSupported = 20,
2306
2306
  /**
2307
- * @ignore
2307
+ * 21: (Windows only) The screen has not captured any data available for window sharing.
2308
2308
  */
2309
2309
  LocalVideoStreamReasonScreenCaptureFailure = 21,
2310
2310
  /**
2311
- * @ignore
2311
+ * 22: No permission for screen capture.
2312
2312
  */
2313
2313
  LocalVideoStreamReasonScreenCaptureNoPermission = 22,
2314
2314
  /**
2315
- * @ignore
2315
+ * 24: (Windows only) An unexpected error occurred during screen sharing (possibly due to window blocking failure), resulting in decreased performance, but the screen sharing process itself was not affected.
2316
2316
  */
2317
2317
  LocalVideoStreamReasonScreenCaptureAutoFallback = 24,
2318
2318
  /**
2319
- * @ignore
2319
+ * 25: (Windows only) The window for the current screen capture is hidden and not visible on the current screen.
2320
2320
  */
2321
2321
  LocalVideoStreamReasonScreenCaptureWindowHidden = 25,
2322
2322
  /**
2323
- * @ignore
2323
+ * 26: (Windows only) The window for screen capture has been restored from hidden state.
2324
2324
  */
2325
2325
  LocalVideoStreamReasonScreenCaptureWindowRecoverFromHidden = 26,
2326
2326
  /**
@@ -2328,15 +2328,15 @@ export enum LocalVideoStreamReason {
2328
2328
  */
2329
2329
  LocalVideoStreamReasonScreenCaptureWindowRecoverFromMinimized = 27,
2330
2330
  /**
2331
- * @ignore
2331
+ * 28: (Windows only) Screen capture has been paused. Common scenarios reporting this error code: The current screen may have been switched to a secure desktop, such as a UAC dialog box or Winlogon desktop.
2332
2332
  */
2333
2333
  LocalVideoStreamReasonScreenCapturePaused = 28,
2334
2334
  /**
2335
- * @ignore
2335
+ * 29: (Windows only) Screen capture has resumed from paused state.
2336
2336
  */
2337
2337
  LocalVideoStreamReasonScreenCaptureResumed = 29,
2338
2338
  /**
2339
- * @ignore
2339
+ * 30: The displayer used for screen capture is disconnected.
2340
2340
  */
2341
2341
  LocalVideoStreamReasonScreenCaptureDisplayDisconnected = 30,
2342
2342
  }
@@ -3537,7 +3537,7 @@ export enum NetworkType {
3537
3537
  */
3538
3538
  export enum VideoViewSetupMode {
3539
3539
  /**
3540
- * 0: (Default) Replaces a view.
3540
+ * 0: (Default) Clear all added views and replace with a new view.
3541
3541
  */
3542
3542
  VideoViewSetupReplace = 0,
3543
3543
  /**
@@ -4225,47 +4225,47 @@ export enum HeadphoneEqualizerPreset {
4225
4225
  }
4226
4226
 
4227
4227
  /**
4228
- * @ignore
4228
+ * Voice AI tuner sound types.
4229
4229
  */
4230
4230
  export enum VoiceAiTunerType {
4231
4231
  /**
4232
- * @ignore
4232
+ * 0: Mature male voice. A deep and magnetic male voice.
4233
4233
  */
4234
4234
  VoiceAiTunerMatureMale = 0,
4235
4235
  /**
4236
- * @ignore
4236
+ * 1: Fresh male voice. A fresh and slightly sweet male voice.
4237
4237
  */
4238
4238
  VoiceAiTunerFreshMale = 1,
4239
4239
  /**
4240
- * @ignore
4240
+ * 2: Elegant female voice. A deep and charming female voice.
4241
4241
  */
4242
4242
  VoiceAiTunerElegantFemale = 2,
4243
4243
  /**
4244
- * @ignore
4244
+ * 3: Sweet female voice. A high-pitched and cute female voice.
4245
4245
  */
4246
4246
  VoiceAiTunerSweetFemale = 3,
4247
4247
  /**
4248
- * @ignore
4248
+ * 4: Warm male singing. A warm and melodious male voice.
4249
4249
  */
4250
4250
  VoiceAiTunerWarmMaleSinging = 4,
4251
4251
  /**
4252
- * @ignore
4252
+ * 5: Gentle female singing. A soft and delicate female voice.
4253
4253
  */
4254
4254
  VoiceAiTunerGentleFemaleSinging = 5,
4255
4255
  /**
4256
- * @ignore
4256
+ * 6: Husky male singing. A unique husky male voice.
4257
4257
  */
4258
4258
  VoiceAiTunerHuskyMaleSinging = 6,
4259
4259
  /**
4260
- * @ignore
4260
+ * 7: Warm elegant female singing. A warm and mature female voice.
4261
4261
  */
4262
4262
  VoiceAiTunerWarmElegantFemaleSinging = 7,
4263
4263
  /**
4264
- * @ignore
4264
+ * 8: Powerful male singing. A strong and powerful male voice.
4265
4265
  */
4266
4266
  VoiceAiTunerPowerfulMaleSinging = 8,
4267
4267
  /**
4268
- * @ignore
4268
+ * 9: Dreamy female singing. A dreamy and soft female voice.
4269
4269
  */
4270
4270
  VoiceAiTunerDreamyFemaleSinging = 9,
4271
4271
  }
@@ -4406,7 +4406,7 @@ export class AudioRecordingConfiguration {
4406
4406
  */
4407
4407
  fileRecordingType?: AudioFileRecordingType;
4408
4408
  /**
4409
- * Recording quality. See AudioRecordingQualityType. Note: This parameter applies to AAC files only.
4409
+ * Recording quality. See AudioRecordingQualityType. This parameter applies to AAC files only.
4410
4410
  */
4411
4411
  quality?: AudioRecordingQualityType;
4412
4412
  /**
@@ -2,23 +2,23 @@ import './extension/AgoraMediaBaseExtension';
2
2
  import { EncodedVideoFrameInfo } from './AgoraBase';
3
3
 
4
4
  /**
5
- * @ignore
5
+ * The context information of the extension.
6
6
  */
7
7
  export class ExtensionContext {
8
8
  /**
9
- * @ignore
9
+ * Whether the uid in ExtensionContext is valid: true : The uid is valid. false : The uid is invalid.
10
10
  */
11
11
  isValid?: boolean;
12
12
  /**
13
- * @ignore
13
+ * The user ID. 0 represents a local user, while greater than 0 represents a remote user.
14
14
  */
15
15
  uid?: number;
16
16
  /**
17
- * @ignore
17
+ * The name of the extension provider.
18
18
  */
19
19
  providerName?: string;
20
20
  /**
21
- * @ignore
21
+ * The name of the extension.
22
22
  */
23
23
  extensionName?: string;
24
24
  }
@@ -960,27 +960,29 @@ export class ExternalVideoFrame {
960
960
  */
961
961
  matrix?: number[];
962
962
  /**
963
- * @ignore
963
+ * This parameter only applies to video data in Texture format. The MetaData buffer. The default value is NULL.
964
964
  */
965
965
  metadataBuffer?: Uint8Array;
966
966
  /**
967
- * @ignore
967
+ * This parameter only applies to video data in Texture format. The MetaData size. The default value is 0.
968
968
  */
969
969
  metadataSize?: number;
970
970
  /**
971
- * @ignore
971
+ * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
972
972
  */
973
973
  alphaBuffer?: Uint8Array;
974
974
  /**
975
- * @ignore
975
+ * This parameter only applies to video data in BGRA or RGBA format. Whether to extract the alpha channel data from the video frame and automatically fill it into alphaBuffer : true :Extract and fill the alpha channel data. false : (Default) Do not extract and fill the Alpha channel data. For video data in BGRA or RGBA format, you can set the Alpha channel data in either of the following ways:
976
+ * Automatically by setting this parameter to true.
977
+ * Manually through the alphaBuffer parameter.
976
978
  */
977
979
  fillAlphaBuffer?: boolean;
978
980
  /**
979
- * When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame.
981
+ * When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame. See AlphaStitchMode.
980
982
  */
981
983
  alphaStitchMode?: AlphaStitchMode;
982
984
  /**
983
- * @ignore
985
+ * This parameter only applies to video data in Windows Texture format. It represents a pointer to an object of type ID3D11Texture2D, which is used by a video frame.
984
986
  */
985
987
  d3d11Texture2d?: any;
986
988
  /**
@@ -1064,15 +1066,15 @@ export class VideoFrame {
1064
1066
  */
1065
1067
  textureId?: number;
1066
1068
  /**
1067
- * @ignore
1069
+ * This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix.
1068
1070
  */
1069
1071
  matrix?: number[];
1070
1072
  /**
1071
- * @ignore
1073
+ * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering.
1072
1074
  */
1073
1075
  alphaBuffer?: Uint8Array;
1074
1076
  /**
1075
- * When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame.
1077
+ * When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame. See AlphaStitchMode.
1076
1078
  */
1077
1079
  alphaStitchMode?: AlphaStitchMode;
1078
1080
  /**
@@ -1629,7 +1631,9 @@ export interface IFaceInfoObserver {
1629
1631
  * pitch: Head pitch angle. A positve value means looking down, while a negative value means looking up.
1630
1632
  * yaw: Head yaw angle. A positve value means turning left, while a negative value means turning right.
1631
1633
  * roll: Head roll angle. A positve value means tilting to the right, while a negative value means tilting to the left.
1632
- * timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON: { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, }], "timestamp":"654879876546" }
1634
+ * timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON:
1635
+ * { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5},
1636
+ * }], "timestamp":"654879876546" }
1633
1637
  *
1634
1638
  * @returns
1635
1639
  * true : Facial information JSON parsing successful. false : Facial information JSON parsing failed.
@@ -31,7 +31,7 @@ export abstract class IMediaPlayer {
31
31
  /**
32
32
  * Opens the media resource.
33
33
  *
34
- * This method is called asynchronously. If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting PlayerStateOpenCompleted before calling the play method to play the file.
34
+ * This method is called asynchronously.
35
35
  *
36
36
  * @param url The path of the media file. Both local path and online path are supported.
37
37
  * @param startPos The starting position (ms) for playback. Default value is 0.
@@ -58,8 +58,6 @@ export abstract class IMediaPlayer {
58
58
  /**
59
59
  * Plays the media file.
60
60
  *
61
- * After calling open or seek, you can call this method to play the media file.
62
- *
63
61
  * @returns
64
62
  * 0: Success.
65
63
  * < 0: Failure.
@@ -78,6 +76,8 @@ export abstract class IMediaPlayer {
78
76
  /**
79
77
  * Stops playing the media track.
80
78
  *
79
+ * After calling this method to stop playback, if you want to play again, you need to call open or openWithMediaSource to open the media resource.
80
+ *
81
81
  * @returns
82
82
  * 0: Success.
83
83
  * < 0: Failure.
@@ -96,9 +96,8 @@ export abstract class IMediaPlayer {
96
96
  /**
97
97
  * Seeks to a new playback position.
98
98
  *
99
- * After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position. To play the media file from a specific position, do the following:
100
- * Call this method to seek to the position you want to begin playback.
101
- * Call the play method to play the media file.
99
+ * If you call seek after the playback has completed (upon receiving callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaybackCompleted or PlayerStatePlaybackAllLoopsCompleted), the SDK will play the media file from the specified position. At this point, you will receive callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaying.
100
+ * If you call seek while the playback is paused, upon successful call of this method, the SDK will seek to the specified position. To resume playback, call resume or play .
102
101
  *
103
102
  * @param newPos The new playback position (ms).
104
103
  *
@@ -152,8 +151,6 @@ export abstract class IMediaPlayer {
152
151
  /**
153
152
  * Gets the detailed information of the media stream.
154
153
  *
155
- * Call this method after calling getStreamCount.
156
- *
157
154
  * @param index The index of the media stream. This parameter must be less than the return value of getStreamCount.
158
155
  *
159
156
  * @returns
@@ -168,6 +165,8 @@ export abstract class IMediaPlayer {
168
165
  * If you want to loop, call this method and set the number of the loops. When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as PlayerStatePlaybackAllLoopsCompleted.
169
166
  *
170
167
  * @param loopCount The number of times the audio effect loops:
168
+ * ≥0: Number of times for playing. For example, setting it to 0 means no loop playback, playing only once; setting it to 1 means loop playback once, playing a total of twice.
169
+ * -1: Play the audio file in an infinite loop.
171
170
  *
172
171
  * @returns
173
172
  * 0: Success.
@@ -569,9 +568,9 @@ export abstract class IMediaPlayer {
569
568
  abstract setSoundPositionParams(pan: number, gain: number): number;
570
569
 
571
570
  /**
572
- * Set media player options for providing technical previews or special customization features.
571
+ * Sets media player options.
573
572
  *
574
- * The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. Ensure that you call this method before open or openWithMediaSource.
573
+ * The media player supports setting options through key and value. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together.
575
574
  *
576
575
  * @param key The key of the option.
577
576
  * @param value The value of the key.
@@ -583,9 +582,9 @@ export abstract class IMediaPlayer {
583
582
  abstract setPlayerOptionInInt(key: string, value: number): number;
584
583
 
585
584
  /**
586
- * Set media player options for providing technical previews or special customization features.
585
+ * Sets media player options.
587
586
  *
588
- * Ensure that you call this method before open or openWithMediaSource. The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together.
587
+ * The media player supports setting options through key and value. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together.
589
588
  *
590
589
  * @param key The key of the option.
591
590
  * @param value The value of the key.
@@ -289,7 +289,19 @@ export interface IMusicContentCenterEventHandler {
289
289
  ): void;
290
290
 
291
291
  /**
292
- * @ignore
292
+ * 音乐资源的详细信息回调。
293
+ *
294
+ * 当你调用 getSongSimpleInfo 获取某一音乐资源的详细信息后,SDK 会触发该回调。
295
+ *
296
+ * @param requestId The request ID. 本次请求的唯一标识。
297
+ * @param songCode The code of the music, which is an unique identifier of the music.
298
+ * @param simpleInfo 音乐资源的相关信息,包含下列内容:
299
+ * 副歌片段的开始和结束的时间(ms)
300
+ * 副歌片段的歌词下载地址
301
+ * 副歌片段时长(ms)
302
+ * 歌曲名称
303
+ * 歌手名
304
+ * @param reason 音乐内容中心的请求状态码,详见 MusicContentCenterStateReason 。
293
305
  */
294
306
  onSongSimpleInfoResult?(
295
307
  requestId: string,