aliyun-rtc-sdk 7.1.2 → 7.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aliyun-rtc-sdk.es.js +1131 -955
- package/dist/aliyun-rtc-sdk.umd.js +28 -13
- package/dist/plugins/audioProcessPlugin.d.ts +55 -34
- package/dist/plugins/audioProcessPlugin.js +105 -102
- package/dist/plugins/beautyPlugin.d.ts +15 -26
- package/dist/types/index.d.ts +35 -12
- package/package.json +2 -2
|
@@ -12,25 +12,11 @@ interface IAudioProfile {
|
|
|
12
12
|
maxBitrate: number;
|
|
13
13
|
}
|
|
14
14
|
|
|
15
|
-
declare enum FacingMode {
|
|
16
|
-
USER = "user",
|
|
17
|
-
ENVIRONMENT = "environment"
|
|
18
|
-
}
|
|
19
15
|
interface IError {
|
|
20
16
|
code: number;
|
|
21
17
|
reason: string;
|
|
22
18
|
relatedLink: string;
|
|
23
19
|
}
|
|
24
|
-
interface IAudioConstraints extends MediaTrackConstraints {
|
|
25
|
-
}
|
|
26
|
-
interface IVideoConstraints {
|
|
27
|
-
deviceId?: string;
|
|
28
|
-
facingMode?: FacingMode;
|
|
29
|
-
aspectRatio?: ConstrainDouble;
|
|
30
|
-
width?: number;
|
|
31
|
-
height?: number;
|
|
32
|
-
frameRate?: number;
|
|
33
|
-
}
|
|
34
20
|
interface IScreenConstraints {
|
|
35
21
|
audio?: boolean | MediaTrackConstraints;
|
|
36
22
|
video?: boolean | MediaTrackConstraints;
|
|
@@ -44,11 +30,12 @@ interface IDeviceManager {
|
|
|
44
30
|
getCameraList(): Promise<Array<MediaDeviceInfo>>;
|
|
45
31
|
getMicList(): Promise<Array<MediaDeviceInfo>>;
|
|
46
32
|
getSpeakerList(): Promise<Array<MediaDeviceInfo>>;
|
|
47
|
-
getAudioTrack(constraints:
|
|
48
|
-
getVideoTrack(constraints:
|
|
33
|
+
getAudioTrack(constraints: MediaTrackConstraints): Promise<MediaStreamTrack>;
|
|
34
|
+
getVideoTrack(constraints: MediaTrackConstraints): Promise<MediaStreamTrack>;
|
|
35
|
+
getAudioVideoTracks(constraints: MediaStreamConstraints): Promise<MediaStreamTrack[]>;
|
|
49
36
|
getScreenTrack(constraints: IScreenConstraints): Promise<MediaStreamTrack[]>;
|
|
50
|
-
getAudioStream(constraints:
|
|
51
|
-
getVideoStream(constraints:
|
|
37
|
+
getAudioStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
|
|
38
|
+
getVideoStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
|
|
52
39
|
getScreenStream(constraints: IScreenConstraints): Promise<MediaStream>;
|
|
53
40
|
}
|
|
54
41
|
|
|
@@ -69,11 +56,12 @@ declare class BrowserDeviceManager implements IDeviceManager {
|
|
|
69
56
|
getCameraList(): Promise<Array<MediaDeviceInfo>>;
|
|
70
57
|
getMicList(): Promise<Array<MediaDeviceInfo>>;
|
|
71
58
|
getSpeakerList(): Promise<Array<MediaDeviceInfo>>;
|
|
72
|
-
getAudioTrack(constraints:
|
|
73
|
-
getVideoTrack(constraints:
|
|
59
|
+
getAudioTrack(constraints: MediaTrackConstraints): Promise<MediaStreamTrack>;
|
|
60
|
+
getVideoTrack(constraints: MediaTrackConstraints): Promise<MediaStreamTrack>;
|
|
61
|
+
getAudioVideoTracks(constraints: MediaStreamConstraints): Promise<MediaStreamTrack[]>;
|
|
74
62
|
getScreenTrack(constraints: IScreenConstraints): Promise<MediaStreamTrack[]>;
|
|
75
|
-
getAudioStream(constraints:
|
|
76
|
-
getVideoStream(constraints:
|
|
63
|
+
getAudioStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
|
|
64
|
+
getVideoStream(constraints: MediaTrackConstraints): Promise<MediaStream>;
|
|
77
65
|
getScreenStream(constraints: IScreenConstraints): Promise<MediaStream>;
|
|
78
66
|
checkSupportScreenShare(): boolean;
|
|
79
67
|
private createDeviceManager;
|
|
@@ -714,6 +702,15 @@ var grammarExports = grammar$1.exports;
|
|
|
714
702
|
};
|
|
715
703
|
} (parser));
|
|
716
704
|
|
|
705
|
+
interface ISubDeleteResult {
|
|
706
|
+
url: string;
|
|
707
|
+
stream?: any;
|
|
708
|
+
aStream?: any;
|
|
709
|
+
vStream?: any;
|
|
710
|
+
aMsid?: string;
|
|
711
|
+
vMsid?: string;
|
|
712
|
+
}
|
|
713
|
+
|
|
717
714
|
interface IListenerMap {
|
|
718
715
|
[s: string]: any;
|
|
719
716
|
}
|
|
@@ -815,6 +812,7 @@ declare class LocalStream extends Stream {
|
|
|
815
812
|
initWithRetry(config: IStreamConfig, res?: () => void, rej?: (reason?: any) => void): Promise<void>;
|
|
816
813
|
private initMicAudio;
|
|
817
814
|
private initCameraVideo;
|
|
815
|
+
private initMicCamera;
|
|
818
816
|
private initScreenVideoAudio;
|
|
819
817
|
private tryMixMicAndScreenAudio;
|
|
820
818
|
private initTracks;
|
|
@@ -884,8 +882,8 @@ declare class RemoteStream extends Stream {
|
|
|
884
882
|
}
|
|
885
883
|
|
|
886
884
|
interface IStreamConfig {
|
|
887
|
-
audio?:
|
|
888
|
-
video?:
|
|
885
|
+
audio?: MediaTrackConstraints | boolean;
|
|
886
|
+
video?: MediaTrackConstraints | boolean;
|
|
889
887
|
screen?: IScreenConstraints | boolean;
|
|
890
888
|
custom?: boolean;
|
|
891
889
|
mediaStream?: MediaStream;
|
|
@@ -2576,12 +2574,12 @@ declare class LocalStreamManager extends EventEmitter$1<LocalStreamManagerListen
|
|
|
2576
2574
|
* 设置 cameraVideoConstraints
|
|
2577
2575
|
* @param {IVideoConstraints} cameraVideoConstraints
|
|
2578
2576
|
*/
|
|
2579
|
-
setCameraVideoConstraints(cameraVideoConstraints:
|
|
2577
|
+
setCameraVideoConstraints(cameraVideoConstraints: MediaTrackConstraints): void;
|
|
2580
2578
|
/**
|
|
2581
2579
|
* 设置 micAudioConstraints
|
|
2582
2580
|
* @param {IAudioConstraints} micAudioConstraints
|
|
2583
2581
|
*/
|
|
2584
|
-
setMicAudioConstraints(micAudioConstraints:
|
|
2582
|
+
setMicAudioConstraints(micAudioConstraints: MediaTrackConstraints): void;
|
|
2585
2583
|
}
|
|
2586
2584
|
|
|
2587
2585
|
interface DeviceStatus {
|
|
@@ -3011,10 +3009,7 @@ declare class RtsManager extends EventEmitter$1<RtsManagerEventListener> {
|
|
|
3011
3009
|
private getSubConfig;
|
|
3012
3010
|
private httpSubscribe;
|
|
3013
3011
|
private subscribeAdd;
|
|
3014
|
-
subscribeDelete(subscribeOptions: RemoteSubscribeOptions): Promise<
|
|
3015
|
-
url: string;
|
|
3016
|
-
stream: any;
|
|
3017
|
-
} | undefined>;
|
|
3012
|
+
subscribeDelete(subscribeOptions: RemoteSubscribeOptions): Promise<ISubDeleteResult | undefined>;
|
|
3018
3013
|
subscibeStop(streamUrl: string): Promise<void>;
|
|
3019
3014
|
subscribeAddDataChannel(streamUrl: string): Promise<any>;
|
|
3020
3015
|
subscribeStopDatachannel(streamUrl: string, datachannel: any): Promise<any>;
|
|
@@ -3542,6 +3537,30 @@ interface AliRtcBeautyConfig {
|
|
|
3542
3537
|
smoothnessLevel: number;
|
|
3543
3538
|
}
|
|
3544
3539
|
|
|
3540
|
+
interface ImageHolderPluginOptions {
|
|
3541
|
+
imageUrl: string;
|
|
3542
|
+
}
|
|
3543
|
+
declare class AliRtcImageHolderPlugin extends AliRtcPlugin {
|
|
3544
|
+
private url;
|
|
3545
|
+
private image?;
|
|
3546
|
+
private canvas;
|
|
3547
|
+
private mediaStream?;
|
|
3548
|
+
private updateableManager;
|
|
3549
|
+
private updateable;
|
|
3550
|
+
private hasVideoTrack;
|
|
3551
|
+
constructor(url: string, fps?: number);
|
|
3552
|
+
get initOptions(): {
|
|
3553
|
+
imageUrl: string;
|
|
3554
|
+
};
|
|
3555
|
+
setOptions(options: ImageHolderPluginOptions): void;
|
|
3556
|
+
private drawImageToCanvas;
|
|
3557
|
+
start(): Promise<MediaStreamTrack>;
|
|
3558
|
+
stop(): void;
|
|
3559
|
+
isSupported(): boolean;
|
|
3560
|
+
shouldUpdate(_streamInfo: AliRtcLocalStreamInfo): boolean;
|
|
3561
|
+
process(streamInfo: AliRtcLocalStreamInfo): Promise<void>;
|
|
3562
|
+
}
|
|
3563
|
+
|
|
3545
3564
|
declare class AliRtcEngine extends EventEmitter$1<AliRtcEngineEventListener> {
|
|
3546
3565
|
/**
|
|
3547
3566
|
* @ignore
|
|
@@ -3597,6 +3616,7 @@ declare class AliRtcEngine extends EventEmitter$1<AliRtcEngineEventListener> {
|
|
|
3597
3616
|
static AliRtcConnectionStatusChangeReason: typeof AliRtcConnectionStatusChangeReason;
|
|
3598
3617
|
static AliRtcPlugin: typeof AliRtcPlugin;
|
|
3599
3618
|
static AliRtcPluginTrackType: typeof AliRtcPluginTrackType;
|
|
3619
|
+
static AliRtcImageHolderPlugin: typeof AliRtcImageHolderPlugin;
|
|
3600
3620
|
static AliRtcLiveTranscodingParam: typeof AliRtcLiveTranscodingParam;
|
|
3601
3621
|
static AliRtcLiveTranscodingEncodeParam: typeof AliRtcLiveTranscodingEncodeParam;
|
|
3602
3622
|
static AliRtcLiveTranscodingMixParam: typeof LiveTranscodingMixParam;
|
|
@@ -4065,12 +4085,12 @@ declare class AliRtcEngine extends EventEmitter$1<AliRtcEngineEventListener> {
|
|
|
4065
4085
|
* - 如果关闭推送音频流{@link publishLocalAudioStream},SDK在入会后不会打开音频采集
|
|
4066
4086
|
* @param {IAudioConstraints} audioConstraints 音频采集参数
|
|
4067
4087
|
*/
|
|
4068
|
-
startAudioCapture(audioConstraints?:
|
|
4088
|
+
startAudioCapture(audioConstraints?: MediaTrackConstraints): Promise<void>;
|
|
4069
4089
|
/**
|
|
4070
4090
|
* @brief 设置音频采集参数
|
|
4071
4091
|
* @param constraints 音频采集参数
|
|
4072
4092
|
*/
|
|
4073
|
-
setAudioConstraints(constraints:
|
|
4093
|
+
setAudioConstraints(constraints: MediaTrackConstraints): Promise<void>;
|
|
4074
4094
|
/**
|
|
4075
4095
|
* 关闭音频采集
|
|
4076
4096
|
* @note 调用此接口后,入会后、离会再入会,采集设备保持关闭状态
|
|
@@ -4474,6 +4494,7 @@ declare class SLSReporter {
|
|
|
4474
4494
|
reportPublishMonitor(callId: string, traceId: string, msid: string, track: MediaStreamTrack | undefined, stats: any[]): void;
|
|
4475
4495
|
reportSubscribeMonitor(callId: string, remoteId: string, traceId: string, msid: string, stats: any[]): void;
|
|
4476
4496
|
reportNetworkMonitor(candidates: any[]): void;
|
|
4497
|
+
reportLoopAudioDelay(ssrc: string, result: any): void;
|
|
4477
4498
|
/**
|
|
4478
4499
|
* 加入房间成功埋点
|
|
4479
4500
|
* @param {number} joinTime
|
|
@@ -5350,8 +5371,8 @@ declare class AliRtcLocalStreamInfo extends EventEmitter$1<AliRtcLocalStreamList
|
|
|
5350
5371
|
private _previewStream?;
|
|
5351
5372
|
plugins: AliRtcPlugin[];
|
|
5352
5373
|
private _profileManager?;
|
|
5353
|
-
cameraVideoConstraints?:
|
|
5354
|
-
micAudioConstraints?:
|
|
5374
|
+
cameraVideoConstraints?: MediaTrackConstraints;
|
|
5375
|
+
micAudioConstraints?: MediaTrackConstraints;
|
|
5355
5376
|
private get profileManager();
|
|
5356
5377
|
get audioProfile(): AudioProfileKey | undefined;
|
|
5357
5378
|
constructor(type: AliRtcRawDataStreamType);
|