zby-live-sdk 1.0.49-beta-talrtc0922 → 1.0.49-beta-talrtc1014

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/.babelrc +5 -5
  2. package/.editorconfig +13 -13
  3. package/.eslintrc.js +29 -29
  4. package/CHANGELOG.md +381 -370
  5. package/README.md +276 -276
  6. package/dist/zby-live-sdk.cjs.js +4 -3
  7. package/dist/zby-live-sdk.esm.js +4 -3
  8. package/dist/zby-live-sdk.umd.js +4 -3
  9. package/package.json +1 -1
  10. package/src/channel/getSendMsgParams.js +66 -66
  11. package/src/channel/index.js +138 -138
  12. package/src/channel/pomelo/index.js +184 -184
  13. package/src/channel/pomelo/latestQueue.js +151 -151
  14. package/src/channel/pomelo/polemo.js +749 -749
  15. package/src/channel/pomelo/util.js +54 -54
  16. package/src/channel/sdk-cb.js +73 -73
  17. package/src/channel/stream-msg.js +97 -97
  18. package/src/channel/zby/index.js +74 -74
  19. package/src/channel/zby/interactWithChannel.js +4 -4
  20. package/src/channel/zby/interactWithChannelControl.js +1568 -1568
  21. package/src/channel/zby/interactWithChannelEntry.js +318 -318
  22. package/src/config/config.js +153 -153
  23. package/src/default/base.js +70 -70
  24. package/src/default/extend.js +36 -36
  25. package/src/default/index.js +9 -9
  26. package/src/live/base.js +42 -42
  27. package/src/live/call-method.js +9 -9
  28. package/src/live/extend.js +53 -53
  29. package/src/live/index.js +9 -9
  30. package/src/network/api.js +50 -50
  31. package/src/network/commonFetch.js +66 -66
  32. package/src/network/dataReport.js +429 -429
  33. package/src/notice.js +394 -394
  34. package/src/tool/base.js +74 -74
  35. package/src/tool/call-method.js +9 -9
  36. package/src/tool/extend.js +42 -42
  37. package/src/tool/index.js +9 -9
  38. package/src/util/bridge.js +87 -87
  39. package/src/util/bridge1.js +46 -46
  40. package/src/util/dict.js +51 -51
  41. package/src/util/sessionStorage.js +29 -29
  42. package/src/util/sha256.js +482 -482
  43. package/src/util/util.js +308 -308
  44. package/src/zby-av-sdk/agora-sdk.js +711 -711
  45. package/src/zby-av-sdk/device.js +145 -145
  46. package/src/zby-av-sdk/rtc-sdk.js +2839 -2839
  47. package/src/zby-av-sdk/talrtc-sdk.js +2392 -2348
  48. package/src/zby-av-sdk/trtc-sdk.js +1801 -1801
  49. package/src/zby-av-sdk/zby-av-sdk.js +1891 -1891
  50. package/src/zby-av-sdk/zego-sdk.js +2987 -2987
  51. package/src/zby-live-sdk.js +1561 -1557
@@ -1,1802 +1,1802 @@
1
- import dataReport from '../network/dataReport.js';
2
- import defaultApi from '../default';
3
- import NOTICE from '../notice';
4
- import { deviceListReport } from './device.js';
5
- import util from '../util/util';
6
- import { compareS } from '../util/sessionStorage.js';
7
-
8
-
9
- // 扩展标识
10
- const extensionId = 'trtc_ext';
11
- // 端提供的 API 入口
12
- const EM = window.EM;
13
- // 是否执行过 TRTC 的 AddListener 的标志
14
- let hasAddListener = false;
15
- // 监听id
16
- let EMListenerId = 0;
17
- //静音推流时,音量值是否上抛上层标志 false=没有声音不往上层通知/true=有声音有上层通知
18
- let isNoticeMicVolumeTRTC = false;
19
- // rtc小班课=0;rtc大班课=1
20
- const classType = 0;
21
- // 维护的一份拉流的 streamId 与本地预览通道的映射表
22
- let streamIdToPreviewId = {};
23
- let previewIdToStreamId = {};
24
- // 拉流的播放通道初始化从 0 开始依次递增至99(共100路流),-1 为推流的本地视频预览
25
- // const uiChnIndexs = new Array(50).fill(0).map((a, b) => b + 1).reverse();
26
- const uiChnIndexs = new Array(100).fill(0).map((a, b) => b).reverse();
27
- // 维护的一份当前正在使用的音频设备的 deviceId 的列表
28
- let usingAudioDeviceId = {
29
- speaker: '',
30
- microphone: ''
31
- };
32
- // 心跳
33
- let heartBeatDataReportObj = {};
34
- let heartBeatDataReportTimer = null;
35
- let isFirstHeartBeatReport = false;
36
- let streamIdRtcPlayerInfo = {};
37
- let streamIdRtcPlayerInfo1 = {};
38
- //记录底层推流回调的值
39
- let dataTrtcCapture = {};
40
- let qualityLocalArr = [];
41
- let qualityRemoteArr = [];
42
-
43
- const callMethod = (name, args) => {
44
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
45
- if (EM) {
46
- return new Promise((resolve, reject) => {
47
- // let noneCamera = (name === 'SetVideoDevice' && !args.pszDeviceID);
48
- // let noneMicrophone = (name === 'SetAudioDevice' && args.deviceType === 0 && !args.pszDeviceID);
49
- // let noneSpeaker = (name === 'SetAudioDevice' && args.deviceType === 1 && !args.pszDeviceID);
50
- // if (noneCamera || noneMicrophone || noneSpeaker) {
51
- // return resolve();
52
- // }
53
- EM.CallMethod(
54
- extensionId,
55
- name,
56
- JSON.stringify({...args, classType}),
57
- (code, msg) => {
58
- defaultApi.writeLog(`${name} Code: ${code}\nMessage: ${msg}\nParams: ${JSON.stringify({...args, classType})}`);
59
- resolve({
60
- code,
61
- msg
62
- });
63
- }
64
- );
65
- });
66
- }
67
- };
68
-
69
- const loadTrtc = (extensionVersion) => {
70
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
71
- if (EM) {
72
- return new Promise((resolve, reject) => {
73
- removerListener();
74
- console.log('jjjjjjj3',extensionId,extensionVersion);
75
- EM.Load(
76
- extensionId,
77
- extensionVersion,
78
- false,
79
- (code, msg) => {
80
- console.log('jjjjjjj4')
81
- defaultApi.writeLog(`loadTrtc Code: ${code}\nMessage: ${msg}`);
82
- addListener();
83
- resolve();
84
- }
85
- );
86
- });
87
- }
88
- };
89
-
90
- //卸载监听
91
- const removerListener = () => {
92
- hasAddListener = false;
93
- defaultApi.writeLog(`TRTC::action--removerListener EMListenerId:${EMListenerId}`);
94
- EM.RemoverListener(extensionId, EMListenerId, (ec, content) => {});
95
- EMListenerId = 0;
96
- };
97
-
98
- //加载监听
99
-
100
- /**
101
- * @function 添加扩展监听机制
102
- * @param userId:Number 用户 id,必选
103
- * @param userName:String 用户名,必选
104
- * @param roomId:String 频道(房间) id,必选
105
- * @param nNetType:Number 网络类型,可选,默认为 1
106
- * @return void
107
- */
108
- const addListener = () => {
109
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
110
- if (EM && !hasAddListener) {
111
- hasAddListener = true;
112
- EM.AddListener(extensionId, (event, data) => {
113
- if (data && data.indexOf(extensionId) > -1) {
114
- try {
115
- EMListenerId = JSON.parse(data)[extensionId];
116
- defaultApi.writeLog(`TRTC::addListener-- EMListenerId: ${EMListenerId}`);
117
- } catch (error) {}
118
- }
119
- if (!event || !data) {
120
- return;
121
- }
122
- let _data = JSON.parse(data);
123
- switch (event) {
124
- //推流相关
125
- //直播推流器错误通知,推流器出现错误时,会回调该通知
126
- case 'onLocalError':
127
- defaultApi.writeLog(`TRTC::addListener-- onLocalError: ${JSON.stringify(_data)}`);
128
- NOTICE.pushStreamError({
129
- errorStreamType:_data.streamType,
130
- code: _data.code,
131
- errorMsg: _data
132
- })
133
- dataReport.pushStreamError({
134
- errorStreamType:_data.streamType,
135
- code: _data.code
136
- });
137
- break;
138
- //直播推流器警告通知
139
- case 'onLocalWarning':
140
- defaultApi.writeLog(`TRTC::addListener-- onLocalWarning: ${JSON.stringify(_data)}`);
141
- NOTICE.pushStreamWarning({
142
- warnStreamType:_data.streamType,
143
- code: _data.code
144
- })
145
- dataReport.pushStreamWarning({
146
- warnStreamType:_data.streamType,
147
- code: _data.code
148
- });
149
- break;
150
- //首帧音频推送完成的回调通知
151
- case 'onSendLocalFirstAudioFrame':
152
- defaultApi.writeLog(`TRTC::addListener-- onSendLocalFirstAudioFrame: ${JSON.stringify(_data)}`);
153
- break;
154
- //首帧视频推送完成的回调通知
155
- case 'onSendLocalFirstVideoFrame':
156
- defaultApi.writeLog(`TRTC::addListener-- onSendLocalFirstVideoFrame: ${JSON.stringify(_data)}`);
157
- NOTICE.pushFlowSuccess({code:0,publish_streamid: _data.strStreamId});
158
- dataReport.publishResult({
159
- code: '0',
160
- // publish_streamid: window.zbyAVSDK_init_params.trtc.streamId,
161
- });
162
- break;
163
- //麦克风采集音量值回调
164
- case 'onLocalMicrophoneVolumeUpdate':
165
- // defaultApi.writeLog(`TRTC::addListener-- onLocalMicrophoneVolumeUpdate: ${JSON.stringify(_data)}`);
166
- if (isNoticeMicVolumeTRTC) {
167
- NOTICE.captureMicVolumeChanged({
168
- volume: Math.round(_data.volume)
169
- });
170
- }
171
- // heartBeatDataReportObj.volume.push(Math.round(_data.pCaptureSoundLevel[0].soundLevel));
172
- heartBeatDataReportObj.volume = heartBeatDataReportObj.volume + Math.round(_data.volume) + ',';
173
- break;
174
- break;
175
- //推流器连接状态回调通知。推流器连接状态 0 与服务器断开连接/1 正在连接服务器/2 连接服务器成功/3 重连服务器中
176
- case 'onLocalConnectStatusUpdate':
177
- defaultApi.writeLog(`TRTC::addListener-- onLocalConnectStatusUpdate: ${JSON.stringify(_data)}`);
178
- dataReport.localConnectStatus({
179
- connectStatuStreamType:_data.streamType,
180
- state: _data.state
181
- });
182
- break;
183
- //网络质量的实时统计回调
184
- // 统计回调每间隔2秒抛出一次,用于通知 SDK 感知到的当前网络的质量。
185
- // * SDK 会使用一组内嵌的自研算法对当前网络的延迟高低、带宽大小以及稳定情况进行评估,并计算出一个的评估结果:
186
- // * 如果评估结果为 1(Excellent) 代表当前的网络情况非常好,如果评估结果为 6(Down)代表当前网络无法支撑 SDK 的正常使用。
187
- // * @param quality 网络状态。0未检测;1当前网络非常好;2当前网络比较好;3当前网络一般;4当前网络较差;5当前网络很差;6当前网络不满足 TRTC 的最低要求
188
- case 'onLocalNetworkQuality':
189
- // defaultApi.writeLog(`TRTC::addListener-- onLocalNetworkQuality: ${_data.quality}`);
190
- if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
191
- NOTICE.localNetworkQuality({code: _data.quality});
192
- dataReport.localNetworkQuality({
193
- code: _data.quality,
194
- // publish_streamid: window.zbyAVSDK_init_params.trtc.streamId,
195
- });
196
- }
197
- qualityLocalArr.push(_data.quality);
198
- break;
199
- //直播推流器统计数据回调 number类型
200
- //appCpu 当前 App 的 CPU 使用率(%)
201
- // systemCpu 当前系统的 CPU 使用率(%)
202
- // width 视频宽度
203
- // height 视频高度
204
- // fps 帧率(fps)
205
- // audioBitrate 视频码率(Kbps)
206
- // videoBitrate 音频码率(Kbps)
207
- // case 'onLocalStatisticsUpdate':
208
- // defaultApi.writeLog(`TRTC::addListener-- onLocalStatisticsUpdate: ${_data}`);
209
- // break;
210
-
211
- //拉流相关
212
- //远端流错误通知,拉流出现错误时,会回调该通知
213
- case 'onRemoteError':
214
- defaultApi.writeLog(`TRTC::addListener-- onRemoteError: ${JSON.stringify(_data)}`);
215
- NOTICE.pullStreamError({
216
- strErrorStreamId: _data.strStreamId,
217
- code:_data.code
218
- })
219
- //是否需要数据上报?@yuqian
220
- break;
221
- //远端流警告通知,拉流出现警告时,会回调该通知。
222
- case 'onRemoteWarning':
223
- defaultApi.writeLog(`TRTC::addListener-- onRemoteWarning: ${JSON.stringify(_data)}`);
224
- NOTICE.pullStreamWarning({
225
- strWarnStreamId: _data.strStreamId,
226
- code:_data.code
227
- })
228
- //是否需要数据上报?@yuqian
229
- break;
230
- //拉到音频首帧
231
- case 'onRemoteAudioStatus':
232
- // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioStatus: ${_data}`);
233
- console.log('onRemoteAudioStatus',_data);
234
- try{
235
- NOTICE.firstAudioSize({
236
- streamId: _data.strStreamId,
237
- userId: util.getUidByStreamId(_data.strStreamId)
238
- });
239
- dataReport.firstAudioSize({
240
- pull_streamid: _data.strStreamId,
241
- pull_uid: util.getUidByStreamIdDr(_data.strStreamId),
242
- code:'0'
243
- });
244
- } catch (e) { };
245
- break;
246
- //远端流音量大小
247
- case 'onRemoteAudioVolume':
248
- // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioVolume: ${JSON.stringify(_data)}`);
249
- let cbData = {
250
- streamId: _data.strStreamId,
251
- volume: _data.volume
252
- }
253
- NOTICE.playerVolumeChanged(cbData);
254
- NOTICE.volumeChange(cbData);
255
- // currStreamIdRtcPlayerInfo && currStreamIdRtcPlayerInfo.volume.push(Math.round(item.soundLevel));
256
- // streamIdRtcPlayerInfo[_data.strStreamId] && streamIdRtcPlayerInfo[_data.strStreamId].volume.push(Math.round(item.soundLevel));
257
- if(streamIdRtcPlayerInfo[_data.strStreamId]) {
258
- streamIdRtcPlayerInfo[_data.strStreamId].volume = streamIdRtcPlayerInfo[_data.strStreamId].volume + Math.round(_data.volume)+ ',';
259
- }
260
- break;
261
- // //远端流收到首帧音频
262
- // case 'onRemoteAudioStart':
263
- // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioStart: ${_data}`);
264
- // break;
265
- // //远端流停止推送音频
266
- // case 'onRemoteAudioStop':
267
- // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioStop: ${_data}`);
268
- // break;
269
- //mute 视频状态,true:远端开始推流,并拉流收到首帧;false:远端停止推流。
270
- case 'onRemoteVideoStatus':
271
- defaultApi.writeLog(`TRTC::addListener-- onRemoteVideoStatus: ${JSON.stringify(_data)}`);
272
- console.log('onRemoteVideoStatus',_data);
273
- try{
274
- NOTICE.pullFlowResult({code:0,pull_streamid: _data.strStreamId});
275
- NOTICE.firstVideoSize({
276
- streamId: _data.strStreamId,
277
- // userId: _data.strStreamId.split('_').length == 5 ? _data.strStreamId : util.getUidByStreamId(_data.strStreamId)
278
- userId: util.getUidByStreamId(_data.strStreamId)
279
- });
280
- dataReport.firstVideoSize({
281
- pull_streamid: _data.strStreamId,
282
- pull_uid: util.getUidByStreamIdDr(_data.strStreamId),
283
- code:'0'
284
- });
285
- } catch (e) { };
286
- break;
287
- //sei
288
- case 'onRecvSEIMsg':
289
- // defaultApi.writeLog(`TRTC::addListener-- onRecvSEIMsg: ${_data}`);
290
- break;
291
- //网络质量的实时统计回调
292
- // * 该统计回调每间隔2秒抛出一次,用于通知 SDK 感知到的当前网络的质量。
293
- // * SDK 会使用一组内嵌的自研算法对当前网络的延迟高低、带宽大小以及稳定情况进行评估,并计算出一个的评估结果:
294
- // * 如果评估结果为 1(Excellent) 代表当前的网络情况非常好,如果评估结果为 6(Down)代表当前网络无法支撑 SDK 的正常使用。
295
- // *
296
- // * @param strStreamId 流ID,表示远端用户信息。
297
- // * @param quality 网络状态。0未检测;1当前网络非常好;2当前网络比较好;3当前网络一般;4当前网络较差;5当前网络很差;6当前网络不满足 TRTC 的最低要求
298
- case 'onRemoteNetworkQuality':
299
- // defaultApi.writeLog(`TRTC::addListener-- onRemoteNetworkQuality: ${_data}`);
300
- if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
301
- NOTICE.remoteNetworkQuality({code: _data.quality});
302
- dataReport.remoteNetworkQuality({
303
- code: _data.quality,
304
- // publish_streamid: window.zbyAVSDK_init_params.trtc.streamId,
305
- });
306
- }
307
- qualityRemoteArr.push(_data.quality);
308
- break;
309
- // 转推 CDN 上发布音视频流的事件回调
310
- // * @param state 0转推成功,1表示失败
311
- // * @param err 0表示成功,其余值表示失败
312
- // * @param errMsg 具体错误原因
313
- case 'onRtmpStreamingStateChanged':
314
- defaultApi.writeLog(`TRTC::addListener-- onRtmpStreamingStateChanged: ${JSON.stringify(_data)}`);
315
- break;
316
- //* 本地麦克风设备的通断状态发生变化(仅适用于桌面系统)
317
- // * @param deviceId 设备 ID
318
- // * @param state 通断状态,0:设备已添加;1:设备已被移除;2:设备已启用。
319
- case 'onMicphoneDeviceChanged':
320
- defaultApi.writeLog(`TRTC::addListener-- onMicphoneDeviceChanged: ${JSON.stringify(_data)}`);
321
- setMicrophoneDevice({deviceId: _data.state ==1 ? '' : _data.deviceId, operationType: 'hotPlug', deviceState: _data.state, changedDeviceId: _data.deviceId});
322
- break;
323
- //* 本地摄像头设备的通断状态发生变化(仅适用于桌面系统)
324
- // * @param deviceId 设备 ID
325
- // * @param state 通断状态,0:设备已添加;1:设备已被移除;2:设备已启用。
326
- case 'onVideoDeviceChanged':
327
- defaultApi.writeLog(`TRTC::addListener-- onVideoDeviceChanged: ${JSON.stringify(_data)}`);
328
- setVideoDevice({deviceId: _data.state ==1 ? '' : _data.deviceId, operationType: 'hotPlug', deviceState: _data.state, changedDeviceId: _data.deviceId});
329
- break;
330
- //扬声器热插拔
331
- case 'onSpeakerDeviceChanged':
332
- defaultApi.writeLog(`TRTC::addListener-- onSpeakerDeviceChanged: ${JSON.stringify(_data)}`);
333
- setSpeakerDevice({deviceId: _data.state ==1 ? '' : _data.deviceId, operationType: 'hotPlug', deviceState: _data.state, changedDeviceId: _data.deviceId});
334
- break;
335
- //推流的统计数据。5秒一个周期
336
- case 'onLocalStatistics':
337
- // defaultApi.writeLog(`TRTC::addListener-- onLocalStatistics: ${_data}`);
338
- console.log('onLocalStatistics',JSON.parse(_data.json));
339
- NOTICE.pushLossAndDelay({
340
- // userId: util.getUidByStreamId(_data.strStreamId),
341
- delay:_data.rtt,
342
- lostrate:_data.packetLoss
343
- });
344
- heartBeatDataReportCalc(event, _data);
345
- break;
346
- //拉流的统计数据。5秒一个周期
347
- case 'onRemoteStatistics':
348
- // defaultApi.writeLog(`TRTC::addListener-- onRemoteStatistics: ${_data}`);
349
- console.log('onRemoteStatistics',JSON.parse(_data.json));
350
- try {
351
- NOTICE.playLossAndDelay({
352
- userId: util.getUidByStreamId(_data.streamId),
353
- delay:_data.rtt,
354
- lostrate:_data.packetLoss
355
- });
356
- }catch(e){}
357
- heartBeatDataReportCalc(event, _data);
358
- break;
359
- case 'onStatisticsUpdate':
360
- // defaultApi.writeLog(`TRTC::addListener-- onStatisticsUpdate: ${JSON.stringify(_data)}`);
361
- // console.log('onStatisticsUpdate',JSON.parse(_data.json).local);
362
- // console.log('onStatisticsUpdate',JSON.parse(_data.json));
363
- // heartBeatDataReportCalc(event, _data);
364
- break;
365
- }
366
- if (zbysdk.openListenerLog) {
367
- console.log(`[zby-live-sdk]--TRTC::Listener:: event: ${event}, data: ${data}`);
368
- }
369
- });
370
- }
371
- };
372
-
373
-
374
- //初始化相关
375
- const init = async (args) => {
376
- defaultApi.writeLog(`avsdk TRTC::init ${JSON.stringify(args)}`);
377
- const usedDevices = window.zbyAVSDK_device_checker_init;
378
- const {devices} = args;
379
- const _devices = {
380
- camera: (devices && devices.camera) || (usedDevices && usedDevices.camera && usedDevices.camera.use) || '',
381
- microphone: (devices && devices.microphone) || (usedDevices && usedDevices.microphone && usedDevices.microphone.use) || '',
382
- speaker: (devices && devices.speaker) || (usedDevices && usedDevices.speaker && usedDevices.speaker.use) || ''
383
- };
384
- await loadTrtc(args.extensionVersion);
385
- await startEngine(args.appId,args.userId, args.usersign);
386
- if(args.role === 'teacher' && args.mode === 1){
387
- //坐立模式开启外部采集器,站立关闭外部采集器,只有小组课坐立模式才会调用,通过这个把rtmp数据给到zego或者rtc(头像),开启了外部采集,zego不会再本地采集数据
388
- await setEnableExternVideoCapture(true);
389
- }
390
- //小班课 需要加载采集插件的输入数据,为了拼接zego头像
391
- if (args.classMode === 1 || args.classMode === 2) {
392
- defaultApi.writeLog('loadCollectionInputEntry')
393
- loadCollectionInputEntry();
394
- };
395
- await setCameraEncodeBiarate(args.encodeCaptureBitrate);
396
- await setCameraEncodeFps(args.encodeCaptureFps);
397
- await setCameraCaptureResolution(args.previewResolutionWidth, args.previewResolutionHeight);
398
- await setCameraEncodeResolution(args.encodedResolutionWidth, args.encodedResolutionHeight);
399
- // await setCameraEncodeBiarate(args.screenSameBitrate,1);
400
- // await setCameraEncodeFps(args.screenSameFps, 1);
401
- // await setCameraCaptureResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, 1);
402
- // await setCameraEncodeResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, 1);
403
-
404
- // await setCameraEncodeBiarate(500000);
405
- // await setCameraEncodeFps(15);
406
- // await setCameraCaptureResolution(640, 480);
407
- // await setCameraEncodeResolution(320, 240);
408
- await setDefaultDevice(_devices,'default');
409
- await setMicphoneVolumInterval(500);
410
- window.current_sdk_type = 'trtc';
411
- defaultApi.writeLog('trtc init finished current_sdk_type : trtc');
412
- };
413
-
414
-
415
- /**
416
- * @function 初始化引擎
417
- * @param appid:String TRTC 秘钥,必选
418
- * @param userid:String
419
- * @param usersign:String
420
- * @return Promise | void
421
- */
422
- const startEngine = (appid, userid, usersign) => {
423
- defaultApi.writeLog(`avsdk TRTC::StartEngine appid: ${appid} userid: ${userid} usersign: ${usersign}`);
424
- return callMethod('StartEngine', {
425
- appid,
426
- userid:userid + '',
427
- usersign
428
- });
429
- };
430
-
431
- const setEnableExternVideoCapture = (enable) => {
432
- defaultApi.writeLog(`avsdk TRTC::EnableExternVideoCapture`);
433
- return callMethod('EnableExternVideoCapture', {
434
- enable
435
- });
436
- };
437
-
438
- /**
439
- * @function 销毁引擎
440
- * @return Promise | void
441
- */
442
- const destroyEngine = () => {
443
- return callMethod('DestroyEngine', {});
444
- };
445
-
446
- //设置采集分辨率
447
- const setCameraCaptureResolution = (width, height) => {
448
- return callMethod('SetCameraCaptureResolution',{width, height});
449
- }
450
- //设置编码分辨率
451
- const setCameraEncodeResolution = (width, height) => {
452
- return callMethod('SetCameraEncodeResolution',{width, height})
453
- }
454
- //设置编码帧率
455
- const setCameraEncodeFps = (fps) => {
456
- return callMethod('SetCameraEncodeFps',{fps})
457
- }
458
- //设置编码码率
459
- const setCameraEncodeBiarate = (bitrate) => {
460
- return callMethod('SetCameraEncodeBiarate',{bitrate})
461
- }
462
- //获取指定摄像头的支持的分辨率
463
- const getCameraResolution = (deviceId) => {
464
- return callMethod('GetCameraResolution',{deviceId})
465
- }
466
-
467
-
468
- /**
469
- * @function 设置默认的硬件设备,包括摄像头、麦克风以及扬声器
470
- * @return Promise
471
- */
472
- export const setDefaultDevice = async (devices, operationType) => {
473
- // 设置默认的摄像头
474
- if (devices && devices.camera) {
475
- if (zbyAVSDK_device_checker_init.camera.list.length == 0) {
476
- await getCameraDeviceListTrtc();
477
- }
478
- await setVideoDevice({deviceId: devices.camera, operationType:`${operationType}_1`});
479
- } else {
480
- const cameraData = await getCameraDeviceListTrtc();
481
- if (cameraData.length) {
482
- let hasSetCamera = false;
483
- for (let item of cameraData) {
484
- if (item.isDefault) {
485
- await setVideoDevice({deviceId: item.deviceId, operationType:`${operationType}_2`});
486
- hasSetCamera = true;
487
- break;
488
- }
489
- }
490
- if (!hasSetCamera) {
491
- await setVideoDevice({deviceId: cameraData[0].deviceId, operationType:`${operationType}_3`});
492
- }
493
- }
494
- }
495
- // 设置默认的麦克风
496
- if (devices && devices.microphone) {
497
- if (zbyAVSDK_device_checker_init.microphone.list.length == 0) {
498
- await getMicrophoneDeviceListTrtc();
499
- }
500
- console.log('pp2');
501
- await setMicrophoneDevice({deviceId: devices.microphone, operationType:`${operationType}_1`});
502
- } else {
503
- console.log('pp3');
504
- const microPhoneData = await getMicrophoneDeviceListTrtc();
505
- try {
506
- if (microPhoneData.length) {
507
- let hasSetMicrophone = false;
508
- for (let item of microPhoneData) {
509
- if (item.isDefault) {
510
- await setMicrophoneDevice({deviceId: item.deviceId, operationType:`${operationType}_2`});
511
- hasSetMicrophone = true;
512
- break;
513
- }
514
- }
515
- if (!hasSetMicrophone) {
516
- await setMicrophoneDevice({deviceId: microPhoneData[0].deviceId, operationType:`${operationType}_3`});
517
- }
518
- }
519
- } catch (e){
520
- console.log(e);
521
- }
522
-
523
- }
524
- // 设置默认的扬声器
525
- if (devices && devices.speaker) {
526
- await setSpeakerDevice({deviceId: devices.speaker, operationType:`${operationType}_1`});
527
- } else {
528
- const speakerData = await getSpeakerDeviceListTrtc();
529
- if (speakerData.length) {
530
- let hasSetSpeaker = false;
531
- for (let item of speakerData) {
532
- if (item.isDefault) {
533
- await setSpeakerDevice({deviceId: item.deviceId, operationType:`${operationType}_2`});
534
- hasSetSpeaker = true;
535
- break;
536
- }
537
- }
538
- if (!hasSetSpeaker) {
539
- await setSpeakerDevice({deviceId: speakerData[0].deviceId, operationType:`${operationType}_3`});
540
- }
541
- }
542
- }
543
- };
544
-
545
- //麦克风相关
546
- /**
547
- * @function 开启麦克风
548
- * @param intervalMs: Number
549
- * @return Promise | void
550
- */
551
- const startMicrophone = (intervalMs = 500) => {
552
- return callMethod('StartMicrophone', {intervalMs});
553
- }
554
-
555
- /**
556
- * @function 关闭麦克风
557
- * @return Promise | void
558
- */
559
- const stopMicrophone = () => {
560
- return callMethod('StopMicrophone', {});
561
- }
562
-
563
- const openOrCloseMicrophone = async (operation) => {
564
- isNoticeMicVolumeTRTC = operation;
565
- if(operation) {
566
- await startMicrophone();
567
- } else {
568
- await stopMicrophone();
569
- }
570
- }
571
-
572
- /**
573
- * @function 设置麦克风音量大小回调周期
574
- * @param intervalMs: Number
575
- * @return Promise | void
576
- */
577
- const setMicphoneVolumInterval = (intervalMs) => {
578
- return callMethod('SetMicphoneVolumInterval',{intervalMs});
579
- };
580
-
581
- /**
582
- * @function 获取麦克风设备列表
583
- * @return Promise | void
584
- */
585
- const getMicrophoneDeviceList = () => {
586
- return callMethod('GetMicrophoneDeviceList', {});
587
- }
588
-
589
- /**
590
- * @function 获取麦克风列表
591
- * @return Promise | void
592
- */
593
- const getMicrophoneDeviceListTrtc = async () => {
594
- console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc_ii');
595
- let microphoneListArr = JSON.parse(JSON.parse((await getMicrophoneDeviceList()).msg).DeviceList);
596
- console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc_iii',microphoneListArr);
597
- let microphoneList = [];
598
- for (let i = 0, len = microphoneListArr.length; i < len; i++) {
599
- microphoneList.push({
600
- deviceId: microphoneListArr[i].id,
601
- deviceName: microphoneListArr[i].name,
602
- isDefault: microphoneListArr[i].default
603
- });
604
- deviceListReport.micList[microphoneListArr[i].szDeviceId] = microphoneListArr[i].szDeviceName;
605
-
606
- }
607
- if (!window.zbyAVSDK_device_checker_init) {
608
- window.zbyAVSDK_device_checker_init = {};
609
- }
610
- if (!window.zbyAVSDK_device_checker_init.microphone) {
611
- window.zbyAVSDK_device_checker_init.microphone = {};
612
- }
613
- window.zbyAVSDK_device_checker_init.microphone.hasTest = true;
614
- window.zbyAVSDK_device_checker_init.microphone.list = microphoneList;
615
- console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc',microphoneList);
616
- return microphoneList;
617
- }
618
-
619
-
620
- /**
621
- * @function 设置指定音频(当前麦克风)设备
622
- * @param deviceId:String 音频设备 id,必选
623
- * @return Promise | void
624
- */
625
- const setCurrentMicrophoneDevice = (deviceId) => {
626
- window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
627
- return callMethod('SetCurrentMicrophoneDevice', {
628
- deviceId
629
- });
630
- }
631
-
632
- /**
633
- * @function 指定麦克风设备
634
- * @param deviceId:String 麦克风设备 id,必选
635
- * @param operationType:String 操作类型,可选
636
- * @return Promise | void
637
- */
638
- const setMicrophoneDevice = async (args) => {
639
- console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc_i');
640
- let {deviceId, operationType, deviceState, changedDeviceId} = args;
641
- let deviceName = '';
642
- let microPhoneData;
643
- if (!deviceId) {
644
- microPhoneData = await getMicrophoneDeviceListTrtc();
645
- if (microPhoneData.length) {
646
- let hasGetMicrophone = false;
647
- for (let item of microPhoneData) {
648
- if (item.isDefault) {
649
- deviceId = item.deviceId;
650
- deviceName = item.deviceName;
651
- hasGetMicrophone = true;
652
- break;
653
- }
654
- }
655
- if (!hasGetMicrophone) {
656
- deviceId = microPhoneData[0].deviceId;
657
- deviceName = microPhoneData[0].deviceName;
658
- }
659
- } else {
660
- deviceId = '';
661
- }
662
- if (deviceId === '') {
663
- NOTICE.noDevice({
664
- deviceType: 'microphone'
665
- });
666
- }
667
- }
668
- try {
669
- dataReport.setDevice({
670
- device_type:2,
671
- device_id:deviceId,
672
- device_name:deviceListReport.micList[deviceId],
673
- operationType,
674
- fore_state: operationType == 'hotPlug' ? +deviceState+1 : '-'
675
- });
676
- window.zbyAVSDK_device_checker_init.microphone.name = deviceListReport.micList[deviceId];
677
- } catch (e) {};
678
- await setCurrentMicrophoneDevice(deviceId);
679
- if (operationType == 'hotPlug' || operationType == 'deviceError') {
680
- if(!microPhoneData){
681
- microPhoneData = await getMicrophoneDeviceListTrtc();
682
- deviceName = deviceListReport.micList[deviceId];
683
- }
684
- NOTICE[operationType]({
685
- deviceType: 'microphone',
686
- useDeviceId: deviceId,
687
- useDeviceName: deviceName,
688
- deviceList: microPhoneData,
689
- deviceState,
690
- changedDeviceId
691
- });
692
- }
693
- }
694
-
695
- /**
696
- * @function 获取当前麦克风的音量
697
- * @return Promise | void
698
- */
699
- const getCurrentMicrophoneVolume = () => {
700
- return callMethod('GetCurrentMicrophoneVolume', {}).then(ret => {
701
- let volume = 0;
702
- try {
703
- volume = JSON.parse(ret.msg).microphoneVolume;
704
- } catch (e) {
705
- console.error(`zby-live-sdk: getCurrentMicrophoneVolume ret: ${ret}. error: ${e}`);
706
- }
707
- return volume;
708
- });
709
- }
710
-
711
- /**
712
- * @function 设置当前麦克风的音量
713
- * @param volume : number 音量值
714
- * @return Promise | void
715
- */
716
- const setCurrentMicrophoneVolume = (volume) => {
717
- return callMethod('SetCurrentMicrophoneVolume', {volume});
718
- }
719
-
720
- /**
721
- * @function 获取当前麦克风ID
722
- * @return Promise | void
723
- */
724
- const getCurrentMicrophoneId = (volume) => {
725
- return callMethod('GetCurrentMicrophoneId', {volume});
726
- }
727
-
728
- //摄像头相关
729
- /**
730
- * @function 打开摄像头
731
- * @param width: Number
732
- * @param height: Number
733
- * @return Promise | void
734
- */
735
- const startCamera = (width,height) => {
736
- return callMethod('StartCamera', {width,height});
737
- }
738
-
739
- /**
740
- * @function 关闭摄像头
741
- * @return Promise | void
742
- */
743
- const stopCamera = () => {
744
- return callMethod('StopCamera', {});
745
- }
746
-
747
- const openOrCloseCamera = async (operation) => {
748
- console.log('openCa',operation);
749
- if(operation){
750
- await startCamera();
751
- } else {
752
- await stopCamera();
753
- }
754
- }
755
-
756
- /**
757
- * @function 获取摄像头列表
758
- * @return Promise | void
759
- */
760
- const getCameraDeviceList = () => {
761
- return callMethod('GetCameraDeviceList', {});
762
- }
763
-
764
- /**
765
- * @function 获取摄像头列表
766
- * @return Promise | void
767
- */
768
- export const getCameraDeviceListTrtc= async () => {
769
- let videoListArr = JSON.parse(JSON.parse((await getCameraDeviceList()).msg).DeviceList);
770
- console.log('GetCameraDeviceList_cameraList_trtc_iiii',videoListArr);
771
- let videoList = [];
772
- for (let i = 0, len = videoListArr.length; i < len; i++) {
773
- videoList.push({
774
- deviceId: videoListArr[i].id,
775
- deviceName: videoListArr[i].name,
776
- isDefault: videoListArr[i].default
777
- });
778
- deviceListReport.cameraList[videoListArr[i].szDeviceId] = videoListArr[i].szDeviceName;
779
- }
780
- if (!window.zbyAVSDK_device_checker_init) {
781
- window.zbyAVSDK_device_checker_init = {};
782
- }
783
- if (!window.zbyAVSDK_device_checker_init.camera) {
784
- window.zbyAVSDK_device_checker_init.camera = {};
785
- }
786
- window.zbyAVSDK_device_checker_init.camera.hasTest = true;
787
- window.zbyAVSDK_device_checker_init.camera.list = videoList;
788
- console.log('GetCameraDeviceList_cameraList_trtc',videoList);
789
- return videoList;
790
- };
791
-
792
- /**
793
- * @function 设置指定视频(当前摄像头)设备
794
- * @param deviceId:String 视频设备 id,必选
795
- * @return Promise | void
796
- */
797
- const setCurrentCameraDevice = (deviceId) => {
798
- window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
799
- return callMethod('SetCurrentCameraDevice', {
800
- deviceId
801
- });
802
- }
803
-
804
- /**
805
- * @function 指定使用的摄像头
806
- * @param deviceId:String 摄像头 id,必选
807
- * @param operationType:String 操作类型,可选
808
- * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
809
- * 不传即是普通的设置摄像头设备的行为
810
- * @return Promise | void
811
- */
812
- const setVideoDevice = async (args) => {
813
- defaultApi.writeLog(`${JSON.stringify(args)}----setVideoDevice-trtc----`);
814
- let {deviceId, operationType, deviceState, code, changedDeviceId} = args;
815
- let deviceName = '';
816
- let cameraData;
817
- if (!deviceId) {
818
- cameraData = await getCameraDeviceListTrtc();
819
- // console.log('hsghsghsg0',cameraData);
820
- if (cameraData.length) {
821
- let hasGetCamare = false;
822
- // console.log('hsghsghsg1',hasGetSpeaker);
823
- for (let item of cameraData) {
824
- if (item.isDefault) {
825
- deviceId = item.deviceId;
826
- deviceName = item.deviceName;
827
- hasGetCamare = true;
828
- break;
829
- }
830
- }
831
- // console.log('hsghsghsg2',hasGetSpeaker);
832
- if (!hasGetCamare) {
833
- deviceId = cameraData[0].deviceId;
834
- deviceName = cameraData[0].deviceName;
835
- // console.log('hsghsghsg91',deviceId);
836
- }
837
- } else {
838
- deviceId = '';
839
- }
840
- // console.log('hsghsghsg1',deviceId);
841
- if (deviceId === '') {
842
- NOTICE.noDevice({
843
- deviceType: 'camera'
844
- });
845
- }
846
- }
847
- window.zbyAVSDK_device_checker_init.camera.use = deviceId;
848
- window.zbyAVSDK_device_checker_init.camera.name = deviceListReport.cameraList[deviceId];
849
- try {
850
- dataReport.setDevice({
851
- device_type: 1,
852
- device_id:deviceId,
853
- device_name:deviceListReport.cameraList[deviceId],
854
- operationType,
855
- fore_state: operationType == 'hotPlug' ? deviceState+1 : '-'
856
- });
857
- } catch (e) {};
858
- await setCurrentCameraDevice(deviceId);
859
- if (operationType == 'hotPlug' || operationType == 'deviceError') {
860
- if(!cameraData){
861
- cameraData = await getCameraDeviceListTrtc();
862
- deviceName = deviceListReport.cameraList[deviceId];
863
- }
864
- NOTICE[operationType]({
865
- deviceType: 'camera',
866
- useDeviceId: deviceId,
867
- useDeviceName: deviceName,
868
- deviceList: cameraData,
869
- messge: code,
870
- deviceState,
871
- changedDeviceId
872
- });
873
- }
874
- try {
875
- NOTICE.useredCamera({
876
- deviceId,
877
- deviceName
878
- });
879
- } catch (e){ };
880
- };
881
-
882
- /**
883
- * @function 获取当前摄像头ID
884
- * @return Promise | void
885
- */
886
- const GetCurrentCameraId = () => {
887
- return callMethod('GetCurrentCameraId', {});
888
- }
889
-
890
- //扬声器相关
891
-
892
- /**
893
- * @function 获取扬声器列表
894
- * @return Promise | void
895
- */
896
- const getSpeakerDeviceList = () => {
897
- return callMethod('GetSpeakerDeviceList', {});
898
- }
899
-
900
- /**
901
- * @function 获取扬声器列表
902
- * @return Promise | void
903
- */
904
- const getSpeakerDeviceListTrtc = async () => {
905
- let speakerListArr = JSON.parse(JSON.parse((await getSpeakerDeviceList()).msg).DeviceList);
906
- console.log('getSpeakerDeviceList_speakerListArr_trtc_ii',speakerListArr);
907
- let speakerList = [];
908
- for (let i = 0, len = speakerListArr.length; i < len; i++) {
909
- speakerList.push({
910
- deviceId: speakerListArr[i].id,
911
- deviceName: speakerListArr[i].name,
912
- isDefault: speakerListArr[i].default
913
- });
914
- deviceListReport.speakerList[speakerListArr[i].szDeviceId] = speakerListArr[i].szDeviceName;
915
-
916
- }
917
- if (!window.zbyAVSDK_device_checker_init) {
918
- window.zbyAVSDK_device_checker_init = {};
919
- }
920
- if (!window.zbyAVSDK_device_checker_init.speaker) {
921
- window.zbyAVSDK_device_checker_init.speaker = {};
922
- }
923
- window.zbyAVSDK_device_checker_init.speaker.hasTest = true;
924
- window.zbyAVSDK_device_checker_init.speaker.list = speakerList;
925
- console.log('getSpeakerDeviceList_speakerListArr_trtc_iii',speakerList);
926
- return speakerList;
927
- };
928
-
929
- /**
930
- * @function 设置指定音频(当前扬声器)设备
931
- * @param deviceId:String 音频设备 id,必选
932
- * @return Promise | void
933
- */
934
- const setCurrentSpeakerDevice = (deviceId) => {
935
- window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
936
- return callMethod('SetCurrentSpeakerDevice', {
937
- deviceId
938
- });
939
- }
940
-
941
- /**
942
- * @function 指定扬声器
943
- * @param deviceId:String 扬声器 id,必选
944
- * @param operationType:String 操作类型,可选
945
- * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
946
- * 不传即是普通的设置扬声器设备的行为
947
- * @return Promise | void
948
- */
949
- const setSpeakerDevice = async (args) => {
950
- defaultApi.writeLog(`${JSON.stringify(args)}----setSpeakerDevice-trtc----`);
951
- console.log('hsghsghsg999',args);
952
- let {deviceId, operationType, deviceState, code, changedDeviceId} = args;
953
- let deviceName = '';
954
- let speakerData;
955
- if (!deviceId) {
956
- speakerData = await getSpeakerDeviceListTrtc();
957
- console.log('hsghsghsg0',speakerData.length);
958
- if (speakerData.length) {
959
- let hasGetSpeaker = false;
960
- console.log('hsghsghsg1',hasGetSpeaker);
961
- for (let item of speakerData) {
962
- if (item.isDefault) {
963
- deviceId = item.deviceId;
964
- deviceName = item.deviceName;
965
- hasGetSpeaker = true;
966
- break;
967
- }
968
- }
969
- console.log('hsghsghsg2',hasGetSpeaker);
970
- if (!hasGetSpeaker) {
971
- deviceId = speakerData[0].deviceId;
972
- deviceName = speakerData[0].deviceName;
973
- console.log('hsghsghsg91',deviceId);
974
- }
975
- } else {
976
- deviceId = '';
977
- }
978
- if (deviceId === '') {
979
- NOTICE.noDevice({
980
- deviceType: 'speaker'
981
- });
982
- }
983
- }
984
- console.log('hsghsghsg999',deviceId);
985
- try {
986
- dataReport.setDevice({
987
- device_type:3,
988
- device_id:deviceId,
989
- device_name:deviceListReport.speakerList[deviceId],
990
- operationType,
991
- fore_state: operationType == 'hotPlug' ? deviceState+1 : '-'
992
- });
993
- window.zbyAVSDK_device_checker_init.speaker.name = deviceListReport.speakerList[deviceId];
994
- } catch (e) {
995
- console.log(e);
996
- };
997
- console.log('hsghsghsg99911',deviceId);
998
- usingAudioDeviceId.speaker = deviceId;
999
- await setCurrentSpeakerDevice(deviceId);
1000
- if (operationType == 'hotPlug' || operationType == 'deviceError') {
1001
- if(!speakerData){
1002
- speakerData = await getSpeakerDeviceListTrtc();
1003
- deviceName = deviceListReport.speakerList[deviceId];
1004
- }
1005
- console.log('hsghsghsg99911',deviceId);
1006
- NOTICE[operationType]({
1007
- deviceType: 'speaker',
1008
- useDeviceId: deviceId,
1009
- useDeviceName: deviceName,
1010
- deviceList: speakerData,
1011
- message: code,
1012
- deviceState,
1013
- changedDeviceId
1014
- });
1015
- }
1016
- console.log('hsghsghsg999112',deviceId);
1017
- };
1018
-
1019
- /**
1020
- * @function 获取当前扬声器的音量
1021
- * @return Promise | void
1022
- */
1023
- const getCurrentSpeakerVolume = () => {
1024
- return callMethod('GetCurrentSpeakerVolume', {});
1025
- }
1026
-
1027
- /**
1028
- * @function 获取当前扬声器ID
1029
- * @return Promise | void
1030
- */
1031
- const getCurrentSpeakerId = () => {
1032
- return callMethod('GetCurrentSpeakerId', {});
1033
- }
1034
-
1035
- /**
1036
- * @function 设置当前扬声器音量
1037
- * @param volume : number 音量值
1038
- * @return Promise | void
1039
- */
1040
- const setCurrentSpeakerVolume = (volume) => {
1041
- return callMethod('SetCurrentSpeakerVolume', {volume});
1042
- }
1043
-
1044
-
1045
- /**
1046
- * @function 打开系统声音采集
1047
- * @return Promise | void
1048
- */
1049
- const startSystemAudioLoopback = () => {
1050
- return callMethod('StartSystemAudioLoopback', {});
1051
- }
1052
-
1053
- /**
1054
- * @function 关闭系统声音采集
1055
- * @return Promise | void
1056
- */
1057
- const stopSystemAudioLoopback = () => {
1058
- return callMethod('StopSystemAudioLoopback', {});
1059
- }
1060
-
1061
- //采集扬声器声音
1062
- const setAudioSpeakerCapture = (speaker) => {
1063
- if(speaker) {
1064
- return startSystemAudioLoopback();
1065
- } else {
1066
- return stopSystemAudioLoopback();
1067
- }
1068
- }
1069
-
1070
- //sei
1071
- /**
1072
- * @function sei消息
1073
- * @param data: String 数据
1074
- * @param dataSize: Number 数据长度
1075
- * @param repeatCount: Number 发送次数
1076
- * @return Promise | void
1077
- */
1078
- const sendSEIMsg = (data,dataSize,repeatCount) => {
1079
- if (typeof dataSize === 'undefined') {
1080
- dataSize = data.length;
1081
- };
1082
- if (typeof repeatCount === 'undefined') {
1083
- repeatCount = 1;
1084
- }
1085
- return callMethod('SendSEIMsg', {data,dataSize,repeatCount});
1086
- }
1087
-
1088
- //采集相关
1089
-
1090
- /**
1091
- * @function 开始屏幕采集
1092
- * @return Promise | void
1093
- */
1094
- const startScreenCapture = () => {
1095
- return callMethod('StartScreenCapture', {});
1096
- }
1097
-
1098
- /**
1099
- * @function 停止屏幕采集
1100
- * @return Promise | void
1101
- */
1102
- const stopScreenCapture = () => {
1103
- return callMethod('StopScreenCapture', {});
1104
- }
1105
-
1106
- //推拉流相关
1107
-
1108
- /**
1109
- * @function 开启本地或者远程的视频视图
1110
- * @param isLocal:Boolean 是否是本地的视频预览,必选
1111
- * @param streamId:String 要拉取的视频流的 id,可选,只有拉取远程的视频流的时候才是必选的
1112
- * @param domId:String <video> 标签的 id,可选
1113
- * 如果传了 domId,就把视频绑定到对应的 <video> 标签上
1114
- * @return Promise 可从 Promise 中获取 src,Promise.then((src) => {})
1115
- */
1116
- const startLocalOrRemotePreview = async (isLocal, streamId, domId, isTeacherNewWindowPreview) => {
1117
- defaultApi.writeLog(`${streamId}----streamId-trtc---- ${isLocal}`);
1118
- let isUpdateChromeVersion = await util.getChromeVersion();
1119
- // 预览本地视频的话,流 id 从全局配置中获取
1120
- // if (isLocal) {
1121
- // streamId = isInRoom ? (window.zbyAVSDK_init_params && window.zbyAVSDK_init_params.trtc.streamId)
1122
- // : 'deviceCheckerInit';
1123
- // }
1124
- const externalConstraints = {
1125
- audio: false,
1126
- video: {
1127
- mandatory: {
1128
- chromeMediaSource: 'external',
1129
- chromeMediaSourceId: `ems://trtc_ext/${isLocal || isTeacherNewWindowPreview ? -3 : streamId}`
1130
- }
1131
- }
1132
- };
1133
-
1134
- console.log('externalConstraints',externalConstraints);
1135
-
1136
- return new Promise((resolve, reject) => {
1137
- const handleExternalSuccess = (stream) => {
1138
- defaultApi.writeLog(`${stream}----stream-trtc----`);
1139
- stream.oninactive = () => {
1140
- defaultApi.writeLog('Stream inactive');
1141
- };
1142
- const src = isUpdateChromeVersion? stream : window.URL.createObjectURL(stream);
1143
- console.log('内核升级', isUpdateChromeVersion,domId,src);
1144
- if (domId && document.querySelector(domId)) {
1145
- if(isUpdateChromeVersion) {
1146
- document.querySelector(domId).srcObject = src;
1147
- console.log('内核升级1',domId, document.querySelector(domId),document.querySelector(domId).srcObject,src);
1148
- } else {
1149
- console.log('内核升级2');
1150
- document.querySelector(domId).src = src;
1151
- }
1152
- }
1153
- defaultApi.writeLog(`setVidoeSrc, streamId:${streamId}, domId: ${domId}, src: ${src}`);
1154
- resolve(src);
1155
- };
1156
-
1157
- const handleExternalError = (error) => {
1158
- if (error.name === 'ConstraintNotSatisfiedError') {
1159
- console.error('ConstraintNotSatisfiedError');
1160
- } else if (error.name === 'PermissionDeniedError') {
1161
- console.error(
1162
- 'Permissions have not been granted to use your camera and '
1163
- + 'microphone, you need to allow the page access to your devices in '
1164
- + 'order for the demo to work.'
1165
- );
1166
- }
1167
- console.error(`getUserMedia error: ${error.name}`, error);
1168
- if (domId) {
1169
- document.querySelector(domId).src = '';
1170
- }
1171
- reject('');
1172
- };
1173
-
1174
- if (navigator.webkitGetUserMedia) {
1175
- navigator.webkitGetUserMedia(
1176
- externalConstraints,
1177
- handleExternalSuccess,
1178
- handleExternalError
1179
- );
1180
- }
1181
- });
1182
- };
1183
-
1184
- /**
1185
- * @function 开始推流
1186
- * @param streamId: String 流id
1187
- * @return Promise | void
1188
- */
1189
- const startPush = (streamId ,channel = 0) => {
1190
- if(!streamId){
1191
- streamId = zbyAVSDK_init_params.trtc.streamId;
1192
- }
1193
- // 心跳开始
1194
- if (!isFirstHeartBeatReport) {
1195
- isFirstHeartBeatReport = true;
1196
- console.log('ppppppp上报呀')
1197
- heartBeatDataReport('start');
1198
- }
1199
- console.log('startPush',streamId,channel);
1200
- return callMethod('StartPush', {streamId, channel});
1201
- }
1202
-
1203
- /**
1204
- * @function 停止推流
1205
- * @param streamId: String 流id
1206
- * @return Promise | void
1207
- */
1208
- const stopPush = (channel) => {
1209
- // if(!streamId){
1210
- // streamId = zbyAVSDK_init_params.trtc.streamId;
1211
- // }
1212
- if(!channel){
1213
- channel = 0;
1214
- }
1215
-
1216
- return callMethod('StopPush', {channel});
1217
- }
1218
-
1219
- /**
1220
- * @function 不推/推 音频
1221
- * @param enable : bool
1222
- * @return Promise | void
1223
- */
1224
- const muteLocalAudio = (enable) => {
1225
- return callMethod('MuteLocalAudio', {enable});
1226
- }
1227
-
1228
- /**
1229
- * @function 不推/推 视频
1230
- * @param enable: bool
1231
- * @return Promise | void
1232
- */
1233
- const muteLocalVideo = (enable) => {
1234
- return callMethod('MuteLocalVideo', {enable});
1235
- }
1236
-
1237
- /**
1238
- * @function 开始转推本地摄像头TRTC流至RTMP。目前只支持转推1路
1239
- * @return Promise | void
1240
- */
1241
- const addPublishRtmpStreamUrl = () => {
1242
- return callMethod('AddPublishRtmpStreamUrl', {});
1243
- }
1244
-
1245
- /**
1246
- * @function 停止转推RTMP
1247
- * @return Promise | void
1248
- */
1249
- const removePublishStreamUrl = () => {
1250
- return callMethod('RemovePublishStreamUrl', {});
1251
- }
1252
-
1253
- /**
1254
- * @function 开始拉流
1255
- * @param streamId: String 流id
1256
- * @return Promise | void
1257
- */
1258
- const startPlay = (streamId) => {
1259
- return callMethod('StartPlay', {streamId});
1260
- }
1261
-
1262
- /**
1263
- * @function 初始化拉流
1264
- * @param streamId:String 从传来的信道消息中获取,必选
1265
- * @param domId:String <video> 标签的 id,可选
1266
- * 如果传了就把视频绑定到对应的 <video> 标签上
1267
- * @param pInfo:String 多媒体流附加信息,可选,默认为 'none'
1268
- * @return src:String 视频预览地址
1269
- */
1270
- export const initPullFlow = async (streamId, domId, mute, pInfo, notAutoPlay, audioOnly) => {
1271
- defaultApi.writeLog(`TRTC:: initPullFlow_start ,streamId :${streamId}`);
1272
- let playChannel;
1273
- resetStreamIdRtcPlayerInfo1(streamId);
1274
- resetStreamIdRtcPlayerInfo(streamId);
1275
- try {
1276
- if (!isFirstHeartBeatReport) {
1277
- isFirstHeartBeatReport = true;
1278
- heartBeatDataReport('start');
1279
- }
1280
- } catch (error) {}
1281
- if (streamIdToPreviewId[streamId] == undefined) {
1282
-
1283
- // defaultApi.writeLog(`TRTC:: The streamId queue does not contain this streamId ${uiChnIndexs}`);
1284
- playChannel = uiChnIndexs.pop();
1285
- streamIdToPreviewId[streamId] = playChannel;
1286
- console.log('streamIdIsNoExited',playChannel,streamIdToPreviewId[streamId],streamId,uiChnIndexs);
1287
- } else {
1288
- defaultApi.writeLog('TRTC:: The streamId queue contains this streamId');
1289
- playChannel = streamIdToPreviewId[streamId];
1290
- console.log('streamIdIsExited',playChannel,streamIdToPreviewId[streamId],streamId,uiChnIndexs);
1291
- await stopPlay(streamId);
1292
- resetStreamIdRtcPlayerInfo(streamId);
1293
- }
1294
- // 判断是否创建或者获取播放通道成功,不成功就删掉重试
1295
- // if (playChannel == undefined) {
1296
- // delete streamIdToPreviewId[streamId];
1297
- // defaultApi.writeLog('error', 'TRTC:: uiChnIndex is not exist');
1298
- // // delete streamIdToBlobUrl[streamId];
1299
- // return initPullFlow(streamId, domId, mute, pInfo, notAutoPlay, audioOnly);
1300
- // }
1301
- previewIdToStreamId[playChannel] = streamId;
1302
- //是否拉取声音
1303
- // await muteRemoteAudio(streamId, !!mute);
1304
- // //是否拉取画面
1305
- // await muteRemoteVideo(streamId, !!audioOnly);
1306
- if (!notAutoPlay) {
1307
- await startPlay(streamId);
1308
- }
1309
- await muteRemoteAudio(streamId, !!mute);
1310
- //是否拉取画面
1311
- await muteRemoteVideo(streamId, !!audioOnly);
1312
- const videoSrc = await startLocalOrRemotePreview(false, streamId, domId);
1313
- return {
1314
- videoSrc,
1315
- playerId: playChannel
1316
- };
1317
- };
1318
-
1319
- /**
1320
- * @function 停止拉流
1321
- * @param streamId: String 流id
1322
- * @return Promise | void
1323
- */
1324
- const stopPlay = (streamId, recovery) => {
1325
- const id = streamIdToPreviewId[streamId];
1326
- delete streamIdRtcPlayerInfo[streamId];
1327
- if (recovery && id && !uiChnIndexs.includes(id)) {
1328
- uiChnIndexs.push(id);
1329
- defaultApi.writeLog(`avsdk TRTC::stopPlayStream streamId: ${streamId} id: ${id} uiChnIndexs: ${uiChnIndexs}`);
1330
- delete streamIdToPreviewId[streamId];
1331
- }
1332
- return callMethod('StopPlay', {streamId});
1333
- }
1334
-
1335
- /**
1336
- * @function 停止拉所有的流
1337
- * @return Promise | void
1338
- */
1339
- const stopPlayAll = () => {
1340
- return callMethod('StopPlayAll', {});
1341
- }
1342
-
1343
- /**
1344
- * @function 不拉/拉 音频 true是静音/false是拉声音
1345
- * @param streamId :string
1346
- * @param mute : bool
1347
- * @return Promise | void
1348
- */
1349
- const muteRemoteAudio = (streamId, mute) => {
1350
- console.log('audio_type',mute)
1351
- streamIdRtcPlayerInfo1[streamId].audio_type = !mute;
1352
- console.log('audio_type1', streamIdRtcPlayerInfo1)
1353
- return callMethod('MuteRemoteAudio', {streamId, mute});
1354
- }
1355
-
1356
- const pullAudioFlow = async (playerId, operation, streamid) => {
1357
- defaultApi.writeLog(`pullAudioFlow -- playerId ${playerId} operation ${operation} streamId ${streamid}`);
1358
- if(streamid){
1359
- if(streamIdToPreviewId[streamid] == undefined) {
1360
- NOTICE.pullAudioFlowError({streamid})
1361
- return
1362
- }else{
1363
- playerId = streamIdToPreviewId[streamid]
1364
- console.log('pullAudioFlow-playerId',playerId,streamIdToPreviewId)
1365
- }
1366
- }
1367
- let streamId = previewIdToStreamId[playerId];
1368
- try {
1369
- streamIdRtcPlayerInfo1[streamId].audio_type = !operation;
1370
- console.log('pullAudioFlow ::previewIdToStreamId',streamId,previewIdToStreamId)
1371
- console.log('pullAudioFlow ::streamIdRtcPlayerInfo1',streamId,streamIdRtcPlayerInfo1);
1372
- } catch (e) {
1373
- console.log('pullAudioFlow ::streamIdRtcPlayerInfo1--error',e);
1374
- }
1375
- try {
1376
- dataReport.setPullVoice({
1377
- code:+!operation,
1378
- pull_uid: util.getUidByStreamId(streamId),
1379
- pull_streamid:streamId,
1380
- playerId
1381
- // operator:'client'
1382
- });
1383
- } catch (e) {};
1384
- return await muteRemoteAudio(streamId, operation);
1385
- }
1386
-
1387
- /**
1388
- * @function 不拉/拉 视频
1389
- * @param streamId :string
1390
- * @param mute: bool
1391
- * @return Promise | void
1392
- */
1393
- const muteRemoteVideo = (streamId, mute) => {
1394
- try {
1395
- streamIdRtcPlayerInfo1[streamId].video_type = !mute;
1396
- console.log('activateVideoPlayStream ::streamIdRtcPlayerInfo1',streamIdRtcPlayerInfo1);
1397
- } catch (e) {
1398
- console.log('activateVideoPlayStream ::streamIdRtcPlayerInfo1 --error',e);
1399
- }
1400
- return callMethod('MuteRemoteVideo', {streamId, mute});
1401
- }
1402
-
1403
- /**
1404
- *
1405
- * @function 全员开麦突出声音流
1406
- * @param bFocus :bool
1407
- * @param streamIdarr : array
1408
- * @return Promise | void
1409
- */
1410
- const setFocusStreams = (bFocus, streamIdarr) => {
1411
- return callMethod('SetFocusStreams', {bFocus, streamId1, streamId1, streamId1, streamId1});
1412
- }
1413
- //加载采集插件的输入数据
1414
- export const loadCollectionInputEntry = () => {
1415
- return callMethod('LoadCollectionInputEntry', {});
1416
- };
1417
-
1418
- //加载截屏插件
1419
- const LoadCollectionOutputEntry2 = () => {
1420
- defaultApi.writeLog('info', 'avsdk TRTC::LoadCollectionOutputEntry2');
1421
- return callMethod('LoadCollectionOutputEntry2', { });
1422
- };
1423
-
1424
- //开启同屏
1425
- const startMultiScreen = async () => {
1426
- defaultApi.writeLog('info', 'avsdk TRTC::startMultiScreen');
1427
- // await setVideoCaptureFactory(true,1);
1428
- await LoadCollectionOutputEntry2();
1429
- };
1430
-
1431
- //高光时刻 enable (bool)/strDomain (string)/strKey (string)
1432
- const enableHighlight = async (enable,strDomain = '',strKey = '') => {
1433
- defaultApi.writeLog('info', 'avsdk TRTC::enableHighlight');
1434
- return callMethod('EnableHighlight', {enable, strDomain, strKey});
1435
- };
1436
-
1437
- /**
1438
- * @function 获取当前视频流所有的通道
1439
- * @param streamId 流 id
1440
- * @return channelIndex
1441
- */
1442
- export const getAllChannelIndex = () => {
1443
- defaultApi.writeLog(`TRTC : getAllChannelIndex ${JSON.stringify(streamIdToPreviewId)}`);
1444
- return streamIdToPreviewId;
1445
- };
1446
-
1447
- //开始连麦
1448
- const teacherStartLinkMic = async (mode) => {
1449
- if (mode === 0) {
1450
- await startMicrophone();
1451
- } else {
1452
- //开始连麦后,需要打开麦克风,开启外部采集器后(每次都要开启)
1453
- await startMicrophone();
1454
- await setEnableExternVideoCapture(true)
1455
- }
1456
- };
1457
- //结束连麦
1458
- const teacherStopLinkMic = async (mode = 0) => {
1459
- // if (mode === 0) {
1460
- // // await muteVideoPublish(false,0);
1461
- // } else {
1462
- await stopMicrophone();
1463
- // }
1464
- };
1465
-
1466
- /**
1467
- * @function 离开教室
1468
- * @return Promise | void
1469
- */
1470
- const leaveRoom = async () => {
1471
- for (let key in streamIdToPreviewId) {
1472
- await stopPlay(key, true);//huishou
1473
- }
1474
- await stopPush();
1475
- removerListener();
1476
- await destroyEngine();
1477
- heartBeatDataReport('stop');
1478
- isFirstHeartBeatReport = false;
1479
- streamIdToPreviewId = {};
1480
- previewIdToStreamId = {};
1481
- streamIdRtcPlayerInfo = {};
1482
- streamIdRtcPlayerInfo1 = {};
1483
- // window.zegoHasPushFlow = 0;
1484
- }
1485
-
1486
- const heartBeatRealKeys = ['video_fps', 'video_bitrate', 'audio_fps', 'audio_bitrate'];
1487
-
1488
- const _heartBeatDataReport = () => {
1489
- console.log('上报吧2')
1490
- // let cpuRate = 0;
1491
- // let memRate = 0;
1492
- // let rateCount = 0;
1493
- // let appCpuRate = 0;
1494
- // let appMemUsed = 0;
1495
- // let rateTimer = setInterval(async () => {
1496
- // rateCount++;
1497
- // let {cpu_rate, mem_rate, gpus, app_cpu_rate, app_mem_used} = (await toolApi.getCurCpuMemInfo()).msg;
1498
- // cpu_rate = cpu_rate < 0 ? 0 : cpu_rate;
1499
- // cpuRate += parseFloat(cpu_rate);
1500
- // memRate += parseFloat(mem_rate);
1501
- // if (window.zbyAVSDK_init_params.zego.role === 'student') {
1502
- // appCpuRate += parseFloat(app_cpu_rate);
1503
- // appMemUsed += parseFloat(app_mem_used);
1504
- // }
1505
-
1506
- // if (rateCount >= 3) {
1507
- // heartBeatRealKeys.forEach(realKey => {
1508
- // if (heartBeatDataReportObj.hasOwnProperty(realKey) && heartBeatDataReportObj.count > 0) {
1509
- // heartBeatDataReportObj[realKey] = util.toFixed(heartBeatDataReportObj[realKey]/heartBeatDataReportObj.count);
1510
- // }
1511
- // });
1512
- const pullInfo = [];
1513
- console.log('拉流的类型1',streamIdRtcPlayerInfo)
1514
- Object.keys(streamIdRtcPlayerInfo).forEach(streamid => {
1515
- console.log('拉流的类型1.5')
1516
- heartBeatRealKeys.forEach(realKey => {
1517
- if (!streamIdRtcPlayerInfo[streamid].hasOwnProperty(realKey)) {
1518
- streamIdRtcPlayerInfo[streamid][realKey] = [];
1519
- }
1520
- // if (streamIdRtcPlayerInfo[streamid].count > 0) {
1521
- // streamIdRtcPlayerInfo[streamid][realKey] = util.toFixed(streamIdRtcPlayerInfo[streamid][realKey]/streamIdRtcPlayerInfo[streamid].count);
1522
- // }
1523
- });
1524
- console.log('拉流的类型2',streamIdRtcPlayerInfo1[streamid])
1525
- //获取拉流类型,后期可写为函数提出去
1526
- if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1527
- streamIdRtcPlayerInfo1[streamid].stream_type = 'both';
1528
- } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1529
- streamIdRtcPlayerInfo1[streamid].stream_type = 'video';
1530
- } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
1531
- streamIdRtcPlayerInfo1[streamid].stream_type = 'audio';
1532
- } else {
1533
- streamIdRtcPlayerInfo1[streamid].stream_type = 'none';
1534
- }
1535
- // console.log('hsghsghsg_type_type', streamIdRtcPlayerInfo1[streamid].stream_type);
1536
-
1537
- pullInfo.push({
1538
- streamid,
1539
- // uid: util.getUidByStreamId(streamid),
1540
- ...streamIdRtcPlayerInfo[streamid],
1541
- pull_type: streamIdRtcPlayerInfo1[streamid].stream_type,
1542
- volume: streamIdRtcPlayerInfo[streamid].volume.slice(0,streamIdRtcPlayerInfo[streamid].volume.length-1)
1543
- });
1544
- resetStreamIdRtcPlayerInfo(streamid);
1545
- });
1546
- if (isFirstHeartBeatReport) {
1547
- try {
1548
- //静音推流时过滤掉音频帧率和码率,上报为0;
1549
- // if (!isNoticeMicVolumeZego) {
1550
- // heartBeatDataReportObj['audio_fps'] = [];
1551
- // heartBeatDataReportObj['audio_bitrate'] = [];
1552
- // }
1553
-
1554
- //获取推流类型,后期可写为函数提出去
1555
- if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1556
- zbysdk.deviceStatus.stream_type = 'both';
1557
- } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1558
- zbysdk.deviceStatus.stream_type = 'audio';
1559
- } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
1560
- zbysdk.deviceStatus.stream_type = 'video';
1561
- } else {
1562
- zbysdk.deviceStatus.stream_type = 'none';
1563
- };
1564
- // console.log('push_type222',zbysdk.deviceStatus,zbysdk.deviceStatus.stream_type);
1565
- defaultApi.writeLog(`push_type_trtc,camera: ${zbysdk.deviceStatus.camera},microphone: ${zbysdk.deviceStatus.microphone},type: ${zbysdk.deviceStatus.stream_type},a_fps: ${dataTrtcCapture.fps}, a_bit: ${dataTrtcCapture.audioBitrate}, v_fps: ${dataTrtcCapture.fps}, v_bit: ${dataTrtcCapture.videoBitrate}`);
1566
- // if (window.zbyAVSDK_init_params.zego.role === 'teacher') {
1567
- dataReport.heartbeat({
1568
- ...{...heartBeatDataReportObj, push_type: zbysdk.deviceStatus.stream_type, volume: heartBeatDataReportObj.volume.slice(0,heartBeatDataReportObj.volume.length-1)},
1569
- // pull_info: JSON.stringify(pullInfo),
1570
- pull_info: pullInfo,
1571
- // cpu_rate: util.toFixed(cpuRate/rateCount),
1572
- // mem_rate: util.toFixed(memRate/rateCount),
1573
- });
1574
- // } else {
1575
- // dataReport.heartbeat({
1576
- // ...heartBeatDataReportObj,
1577
- // pull_info: JSON.stringify(pullInfo),
1578
- // cpu_rate: util.toFixed(cpuRate/rateCount),
1579
- // mem_rate: util.toFixed(memRate/rateCount),
1580
- // app_cpu: util.toFixed(appCpuRate/rateCount),
1581
- // app_mem: util.toFixed(appMemUsed/rateCount),
1582
- // video_mem: gpus
1583
- // });
1584
- // }
1585
- } catch (e) {
1586
- console.log(e);
1587
- }
1588
- }
1589
- resetHeartBeatDataReportObj();
1590
- // cpuRate = 0;
1591
- // memRate = 0;
1592
- // appCpuRate = 0;
1593
- // appMemUsed = 0;
1594
-
1595
- // clearInterval(rateTimer);
1596
- // }
1597
- // }, 10 * 1000);
1598
- };
1599
-
1600
- const heartBeatDataReport = (type) => {
1601
- try {
1602
- if (type === 'start' && !heartBeatDataReportTimer) {
1603
- _heartBeatDataReport();
1604
- heartBeatDataReportTimer = setInterval(() => {
1605
- _heartBeatDataReport();
1606
- }, 30 * 1000);
1607
- }
1608
- if (type === 'stop') {
1609
- clearInterval(heartBeatDataReportTimer);
1610
- heartBeatDataReportTimer = null;
1611
- }
1612
- } catch (error) {
1613
- console.log(error);
1614
- }
1615
- };
1616
-
1617
- const heartBeatDataReportCalc = (name, _data) => {
1618
- console.log('hsgmzk111',name,_data);
1619
- let _d = JSON.parse(_data.json);
1620
- console.log('hsgmzk222',name,_d);
1621
- // 拉流
1622
- const pullKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
1623
- if (name === 'onRemoteStatistics') {
1624
- console.log('hsgmzk222',streamIdRtcPlayerInfo,_d.streamId)
1625
- if (streamIdRtcPlayerInfo && streamIdRtcPlayerInfo.hasOwnProperty(_d.streamId)) {
1626
- console.log('hsgmzk22211');
1627
- let streamid = _d.streamId;
1628
- let isReport = true;
1629
- // streamIdRtcPlayerInfo[streamid].count++;
1630
- heartBeatRealKeys.forEach((realKey, index) => {
1631
- if (_d.hasOwnProperty(pullKeys[index])) {
1632
- if (streamIdRtcPlayerInfo[streamid][realKey] === undefined) {
1633
- streamIdRtcPlayerInfo[streamid][realKey] = [];
1634
- isReport = false;
1635
- }
1636
- // streamIdRtcPlayerInfo[streamid][realKey].push(parseFloat(parseInt(item[pullKeys[index]])));
1637
- }
1638
- });
1639
- console.log('hsgmzk333',_d)
1640
- if (isReport) {
1641
- let audio_fps_trtc_pull = 0;
1642
- let audio_bitrate_trtc_pull = 0;
1643
- let video_fps_trtc_pull = 0;
1644
- let video_bitrate_trtc_pull = 0;
1645
- console.log('hsgmzk444',_d)
1646
- if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1647
- console.log('hsgmzk555',_d)
1648
- audio_fps_trtc_pull = parseFloat(parseInt(_d.afps));
1649
- audio_bitrate_trtc_pull = parseFloat(parseInt(_d.audioBitrate));
1650
- video_fps_trtc_pull = parseFloat(parseInt(_d.fps));
1651
- video_bitrate_trtc_pull = parseFloat(parseInt(_d.videoBitrate));
1652
- } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1653
- video_fps_trtc_pull = parseFloat(parseInt(_d.fps));
1654
- video_bitrate_trtc_pull = parseFloat(parseInt(_d.videoBitrate));
1655
- } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
1656
- audio_fps_trtc_pull = parseFloat(parseInt(_d.afps));
1657
- audio_bitrate_trtc_pull = parseFloat(parseInt(_d.audioBitrate));
1658
- }
1659
- streamIdRtcPlayerInfo[streamid].audio_fps.push(audio_fps_trtc_pull);
1660
- streamIdRtcPlayerInfo[streamid].audio_bitrate.push(audio_bitrate_trtc_pull);
1661
- streamIdRtcPlayerInfo[streamid].video_fps.push(video_fps_trtc_pull);
1662
- streamIdRtcPlayerInfo[streamid].video_bitrate.push(video_bitrate_trtc_pull);
1663
- streamIdRtcPlayerInfo[streamid].pull_loss.push(_d.packetLoss);
1664
- streamIdRtcPlayerInfo[streamid].pull_delay.push(_d.rtt);
1665
-
1666
- streamIdRtcPlayerInfo[streamid].ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
1667
- streamIdRtcPlayerInfo[streamid].video_ifg += _d.videoBlockRate;
1668
- streamIdRtcPlayerInfo[streamid].audio_ifg += _d.audioBlockRate;
1669
- // console.log('hsg_tpull_keys',JSON.parse(JSON.stringify(streamIdRtcPlayerInfo[streamid])),_d.avTimestampDiff);
1670
- }
1671
- }
1672
- }
1673
- // 推流
1674
- const pushKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
1675
- if (name === 'onLocalStatistics') {
1676
- // console.log('hsgmzk',_d);
1677
- // heartBeatDataReportObj.count++;
1678
- // heartBeatRealKeys.forEach((realKey, index) => {
1679
- // if (heartBeatDataReportObj.hasOwnProperty(realKey) && _d.hasOwnProperty(pushKeys[index])) {
1680
- // heartBeatDataReportObj[realKey].push(parseFloat(parseInt(_d[pushKeys[index]])));
1681
- // }
1682
- // });
1683
- let audio_fps_trtc_push = 0;
1684
- let audio_bitrate_trtc_push = 0;
1685
- let video_fps_trtc_push = 0;
1686
- let video_bitrate_trtc_push = 0;
1687
- dataTrtcCapture =_d;
1688
-
1689
- if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1690
- audio_fps_trtc_push = parseFloat(parseInt(_d.afps));
1691
- audio_bitrate_trtc_push = parseFloat(parseInt(_d.audioBitrate));
1692
- video_fps_trtc_push = parseFloat(parseInt(_d.fps));
1693
- video_bitrate_trtc_push= parseFloat(parseInt(_d.videoBitrate));
1694
- } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1695
- audio_fps_trtc_push = parseFloat(parseInt(_d.afps));
1696
- audio_bitrate_trtc_push = parseFloat(parseInt(_d.audioBitrate));
1697
- } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
1698
- video_fps_trtc_push = parseFloat(parseInt(_d.fps));
1699
- video_bitrate_trtc_push = parseFloat(parseInt(_d.videoBitrate));
1700
- }
1701
- // console.log('hsg6666666',audio_fps_trtc_push,audio_bitrate_trtc_push,video_fps_trtc_push,video_bitrate_trtc_push);
1702
- heartBeatDataReportObj.audio_fps.push(audio_fps_trtc_push);
1703
- heartBeatDataReportObj.audio_bitrate.push(audio_bitrate_trtc_push);
1704
- heartBeatDataReportObj.video_fps.push(video_fps_trtc_push);
1705
- heartBeatDataReportObj.video_bitrate.push(video_bitrate_trtc_push);
1706
- heartBeatDataReportObj.push_loss.push(_d.packetLoss);
1707
- heartBeatDataReportObj.push_delay.push(_d.rtt);
1708
-
1709
- heartBeatDataReportObj.ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
1710
- // console.log('hsg_td5',JSON.parse(JSON.stringify(heartBeatDataReportObj)));
1711
- }
1712
- };
1713
-
1714
- //推流字段
1715
- const resetHeartBeatDataReportObj = () => {
1716
- // console.log('hsg_td55556重置');
1717
- heartBeatDataReportObj = {
1718
- ctime: [],
1719
- push_type: 'none',
1720
- video_fps: [],
1721
- video_bitrate: [],
1722
- audio_fps: [],
1723
- audio_bitrate: [],
1724
- push_loss: [],
1725
- push_delay: [],
1726
- volume: ''
1727
- };
1728
- };
1729
-
1730
- //记录拉流类型,用作数据上报
1731
- const resetStreamIdRtcPlayerInfo1 = (streamId) => {
1732
- streamIdRtcPlayerInfo1[streamId] = {
1733
- audio_type: false,
1734
- video_type: false,
1735
- stream_type: 'none'
1736
- };
1737
- // console.log('hsgshgs_heartbeat',streamIdRtcPlayerInfo1);
1738
- };
1739
-
1740
- resetHeartBeatDataReportObj();
1741
-
1742
- //拉
1743
- const resetStreamIdRtcPlayerInfo = (streamId) => {
1744
- streamIdRtcPlayerInfo[streamId] = {
1745
- pull_uid: util.getUidByStreamId(streamId),
1746
- streamid: streamId,
1747
- ctime: [],
1748
- pull_type: streamIdRtcPlayerInfo1[streamId].stream_type,
1749
- volume: '',
1750
- video_fps: [],
1751
- video_bitrate: [],
1752
- audio_fps: [],
1753
- audio_bitrate: [],
1754
- pull_loss: [],
1755
- pull_delay: [],
1756
- //音画不同步字段
1757
- avtimestampdiff:[],
1758
- // 累加
1759
- audio_ifg: 0,
1760
- video_ifg: 0
1761
- };
1762
- };
1763
-
1764
- export default {
1765
- init,
1766
- setCameraCaptureResolution,
1767
- setCameraEncodeResolution,
1768
- setCameraEncodeFps,
1769
- setCameraEncodeBiarate,
1770
- getCameraResolution,
1771
- getMicrophoneDeviceListTrtc,
1772
- setMicrophoneDevice,
1773
- openOrCloseMicrophone,
1774
- getCurrentMicrophoneVolume,
1775
- setCurrentMicrophoneVolume,
1776
- setCurrentCameraDevice,
1777
- getCameraDeviceListTrtc,
1778
- setVideoDevice,
1779
- openOrCloseCamera,
1780
- getSpeakerDeviceListTrtc,
1781
- setSpeakerDevice,
1782
- getCurrentSpeakerVolume,
1783
- setCurrentSpeakerVolume,
1784
- setAudioSpeakerCapture,
1785
- startLocalOrRemotePreview,
1786
- startPush,
1787
- stopPush,
1788
- startPlay,
1789
- stopPlay,
1790
- initPullFlow,
1791
- pullAudioFlow,
1792
- muteRemoteVideo,
1793
- muteRemoteAudio,
1794
- stopPlayAll,
1795
- sendSEIMsg,
1796
- startMultiScreen,
1797
- enableHighlight,
1798
- getAllChannelIndex,
1799
- leaveRoom,
1800
- teacherStartLinkMic,
1801
- teacherStopLinkMic
1
+ import dataReport from '../network/dataReport.js';
2
+ import defaultApi from '../default';
3
+ import NOTICE from '../notice';
4
+ import { deviceListReport } from './device.js';
5
+ import util from '../util/util';
6
+ import { compareS } from '../util/sessionStorage.js';
7
+
8
+
9
+ // 扩展标识
10
+ const extensionId = 'trtc_ext';
11
+ // 端提供的 API 入口
12
+ const EM = window.EM;
13
+ // 是否执行过 TRTC 的 AddListener 的标志
14
+ let hasAddListener = false;
15
+ // 监听id
16
+ let EMListenerId = 0;
17
+ //静音推流时,音量值是否上抛上层标志 false=没有声音不往上层通知/true=有声音有上层通知
18
+ let isNoticeMicVolumeTRTC = false;
19
+ // rtc小班课=0;rtc大班课=1
20
+ const classType = 0;
21
+ // 维护的一份拉流的 streamId 与本地预览通道的映射表
22
+ let streamIdToPreviewId = {};
23
+ let previewIdToStreamId = {};
24
+ // 拉流的播放通道初始化从 0 开始依次递增至99(共100路流),-1 为推流的本地视频预览
25
+ // const uiChnIndexs = new Array(50).fill(0).map((a, b) => b + 1).reverse();
26
+ const uiChnIndexs = new Array(100).fill(0).map((a, b) => b).reverse();
27
+ // 维护的一份当前正在使用的音频设备的 deviceId 的列表
28
+ let usingAudioDeviceId = {
29
+ speaker: '',
30
+ microphone: ''
31
+ };
32
+ // 心跳
33
+ let heartBeatDataReportObj = {};
34
+ let heartBeatDataReportTimer = null;
35
+ let isFirstHeartBeatReport = false;
36
+ let streamIdRtcPlayerInfo = {};
37
+ let streamIdRtcPlayerInfo1 = {};
38
+ //记录底层推流回调的值
39
+ let dataTrtcCapture = {};
40
+ let qualityLocalArr = [];
41
+ let qualityRemoteArr = [];
42
+
43
+ const callMethod = (name, args) => {
44
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
45
+ if (EM) {
46
+ return new Promise((resolve, reject) => {
47
+ // let noneCamera = (name === 'SetVideoDevice' && !args.pszDeviceID);
48
+ // let noneMicrophone = (name === 'SetAudioDevice' && args.deviceType === 0 && !args.pszDeviceID);
49
+ // let noneSpeaker = (name === 'SetAudioDevice' && args.deviceType === 1 && !args.pszDeviceID);
50
+ // if (noneCamera || noneMicrophone || noneSpeaker) {
51
+ // return resolve();
52
+ // }
53
+ EM.CallMethod(
54
+ extensionId,
55
+ name,
56
+ JSON.stringify({...args, classType}),
57
+ (code, msg) => {
58
+ defaultApi.writeLog(`${name} Code: ${code}\nMessage: ${msg}\nParams: ${JSON.stringify({...args, classType})}`);
59
+ resolve({
60
+ code,
61
+ msg
62
+ });
63
+ }
64
+ );
65
+ });
66
+ }
67
+ };
68
+
69
+ const loadTrtc = (extensionVersion) => {
70
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
71
+ if (EM) {
72
+ return new Promise((resolve, reject) => {
73
+ removerListener();
74
+ console.log('jjjjjjj3',extensionId,extensionVersion);
75
+ EM.Load(
76
+ extensionId,
77
+ extensionVersion,
78
+ false,
79
+ (code, msg) => {
80
+ console.log('jjjjjjj4')
81
+ defaultApi.writeLog(`loadTrtc Code: ${code}\nMessage: ${msg}`);
82
+ addListener();
83
+ resolve();
84
+ }
85
+ );
86
+ });
87
+ }
88
+ };
89
+
90
+ //卸载监听
91
+ const removerListener = () => {
92
+ hasAddListener = false;
93
+ defaultApi.writeLog(`TRTC::action--removerListener EMListenerId:${EMListenerId}`);
94
+ EM.RemoverListener(extensionId, EMListenerId, (ec, content) => {});
95
+ EMListenerId = 0;
96
+ };
97
+
98
+ //加载监听
99
+
100
+ /**
101
+ * @function 添加扩展监听机制
102
+ * @param userId:Number 用户 id,必选
103
+ * @param userName:String 用户名,必选
104
+ * @param roomId:String 频道(房间) id,必选
105
+ * @param nNetType:Number 网络类型,可选,默认为 1
106
+ * @return void
107
+ */
108
+ const addListener = () => {
109
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
110
+ if (EM && !hasAddListener) {
111
+ hasAddListener = true;
112
+ EM.AddListener(extensionId, (event, data) => {
113
+ if (data && data.indexOf(extensionId) > -1) {
114
+ try {
115
+ EMListenerId = JSON.parse(data)[extensionId];
116
+ defaultApi.writeLog(`TRTC::addListener-- EMListenerId: ${EMListenerId}`);
117
+ } catch (error) {}
118
+ }
119
+ if (!event || !data) {
120
+ return;
121
+ }
122
+ let _data = JSON.parse(data);
123
+ switch (event) {
124
+ //推流相关
125
+ //直播推流器错误通知,推流器出现错误时,会回调该通知
126
+ case 'onLocalError':
127
+ defaultApi.writeLog(`TRTC::addListener-- onLocalError: ${JSON.stringify(_data)}`);
128
+ NOTICE.pushStreamError({
129
+ errorStreamType:_data.streamType,
130
+ code: _data.code,
131
+ errorMsg: _data
132
+ })
133
+ dataReport.pushStreamError({
134
+ errorStreamType:_data.streamType,
135
+ code: _data.code
136
+ });
137
+ break;
138
+ //直播推流器警告通知
139
+ case 'onLocalWarning':
140
+ defaultApi.writeLog(`TRTC::addListener-- onLocalWarning: ${JSON.stringify(_data)}`);
141
+ NOTICE.pushStreamWarning({
142
+ warnStreamType:_data.streamType,
143
+ code: _data.code
144
+ })
145
+ dataReport.pushStreamWarning({
146
+ warnStreamType:_data.streamType,
147
+ code: _data.code
148
+ });
149
+ break;
150
+ //首帧音频推送完成的回调通知
151
+ case 'onSendLocalFirstAudioFrame':
152
+ defaultApi.writeLog(`TRTC::addListener-- onSendLocalFirstAudioFrame: ${JSON.stringify(_data)}`);
153
+ break;
154
+ //首帧视频推送完成的回调通知
155
+ case 'onSendLocalFirstVideoFrame':
156
+ defaultApi.writeLog(`TRTC::addListener-- onSendLocalFirstVideoFrame: ${JSON.stringify(_data)}`);
157
+ NOTICE.pushFlowSuccess({code:0,publish_streamid: _data.strStreamId});
158
+ dataReport.publishResult({
159
+ code: '0',
160
+ // publish_streamid: window.zbyAVSDK_init_params.trtc.streamId,
161
+ });
162
+ break;
163
+ //麦克风采集音量值回调
164
+ case 'onLocalMicrophoneVolumeUpdate':
165
+ // defaultApi.writeLog(`TRTC::addListener-- onLocalMicrophoneVolumeUpdate: ${JSON.stringify(_data)}`);
166
+ if (isNoticeMicVolumeTRTC) {
167
+ NOTICE.captureMicVolumeChanged({
168
+ volume: Math.round(_data.volume)
169
+ });
170
+ }
171
+ // heartBeatDataReportObj.volume.push(Math.round(_data.pCaptureSoundLevel[0].soundLevel));
172
+ heartBeatDataReportObj.volume = heartBeatDataReportObj.volume + Math.round(_data.volume) + ',';
173
+ break;
174
+ break;
175
+ //推流器连接状态回调通知。推流器连接状态 0 与服务器断开连接/1 正在连接服务器/2 连接服务器成功/3 重连服务器中
176
+ case 'onLocalConnectStatusUpdate':
177
+ defaultApi.writeLog(`TRTC::addListener-- onLocalConnectStatusUpdate: ${JSON.stringify(_data)}`);
178
+ dataReport.localConnectStatus({
179
+ connectStatuStreamType:_data.streamType,
180
+ state: _data.state
181
+ });
182
+ break;
183
+ //网络质量的实时统计回调
184
+ // 统计回调每间隔2秒抛出一次,用于通知 SDK 感知到的当前网络的质量。
185
+ // * SDK 会使用一组内嵌的自研算法对当前网络的延迟高低、带宽大小以及稳定情况进行评估,并计算出一个的评估结果:
186
+ // * 如果评估结果为 1(Excellent) 代表当前的网络情况非常好,如果评估结果为 6(Down)代表当前网络无法支撑 SDK 的正常使用。
187
+ // * @param quality 网络状态。0未检测;1当前网络非常好;2当前网络比较好;3当前网络一般;4当前网络较差;5当前网络很差;6当前网络不满足 TRTC 的最低要求
188
+ case 'onLocalNetworkQuality':
189
+ // defaultApi.writeLog(`TRTC::addListener-- onLocalNetworkQuality: ${_data.quality}`);
190
+ if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
191
+ NOTICE.localNetworkQuality({code: _data.quality});
192
+ dataReport.localNetworkQuality({
193
+ code: _data.quality,
194
+ // publish_streamid: window.zbyAVSDK_init_params.trtc.streamId,
195
+ });
196
+ }
197
+ qualityLocalArr.push(_data.quality);
198
+ break;
199
+ //直播推流器统计数据回调 number类型
200
+ //appCpu 当前 App 的 CPU 使用率(%)
201
+ // systemCpu 当前系统的 CPU 使用率(%)
202
+ // width 视频宽度
203
+ // height 视频高度
204
+ // fps 帧率(fps)
205
+ // audioBitrate 视频码率(Kbps)
206
+ // videoBitrate 音频码率(Kbps)
207
+ // case 'onLocalStatisticsUpdate':
208
+ // defaultApi.writeLog(`TRTC::addListener-- onLocalStatisticsUpdate: ${_data}`);
209
+ // break;
210
+
211
+ //拉流相关
212
+ //远端流错误通知,拉流出现错误时,会回调该通知
213
+ case 'onRemoteError':
214
+ defaultApi.writeLog(`TRTC::addListener-- onRemoteError: ${JSON.stringify(_data)}`);
215
+ NOTICE.pullStreamError({
216
+ strErrorStreamId: _data.strStreamId,
217
+ code:_data.code
218
+ })
219
+ //是否需要数据上报?@yuqian
220
+ break;
221
+ //远端流警告通知,拉流出现警告时,会回调该通知。
222
+ case 'onRemoteWarning':
223
+ defaultApi.writeLog(`TRTC::addListener-- onRemoteWarning: ${JSON.stringify(_data)}`);
224
+ NOTICE.pullStreamWarning({
225
+ strWarnStreamId: _data.strStreamId,
226
+ code:_data.code
227
+ })
228
+ //是否需要数据上报?@yuqian
229
+ break;
230
+ //拉到音频首帧
231
+ case 'onRemoteAudioStatus':
232
+ // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioStatus: ${_data}`);
233
+ console.log('onRemoteAudioStatus',_data);
234
+ try{
235
+ NOTICE.firstAudioSize({
236
+ streamId: _data.strStreamId,
237
+ userId: util.getUidByStreamId(_data.strStreamId)
238
+ });
239
+ dataReport.firstAudioSize({
240
+ pull_streamid: _data.strStreamId,
241
+ pull_uid: util.getUidByStreamIdDr(_data.strStreamId),
242
+ code:'0'
243
+ });
244
+ } catch (e) { };
245
+ break;
246
+ //远端流音量大小
247
+ case 'onRemoteAudioVolume':
248
+ // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioVolume: ${JSON.stringify(_data)}`);
249
+ let cbData = {
250
+ streamId: _data.strStreamId,
251
+ volume: _data.volume
252
+ }
253
+ NOTICE.playerVolumeChanged(cbData);
254
+ NOTICE.volumeChange(cbData);
255
+ // currStreamIdRtcPlayerInfo && currStreamIdRtcPlayerInfo.volume.push(Math.round(item.soundLevel));
256
+ // streamIdRtcPlayerInfo[_data.strStreamId] && streamIdRtcPlayerInfo[_data.strStreamId].volume.push(Math.round(item.soundLevel));
257
+ if(streamIdRtcPlayerInfo[_data.strStreamId]) {
258
+ streamIdRtcPlayerInfo[_data.strStreamId].volume = streamIdRtcPlayerInfo[_data.strStreamId].volume + Math.round(_data.volume)+ ',';
259
+ }
260
+ break;
261
+ // //远端流收到首帧音频
262
+ // case 'onRemoteAudioStart':
263
+ // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioStart: ${_data}`);
264
+ // break;
265
+ // //远端流停止推送音频
266
+ // case 'onRemoteAudioStop':
267
+ // defaultApi.writeLog(`TRTC::addListener-- onRemoteAudioStop: ${_data}`);
268
+ // break;
269
+ //mute 视频状态,true:远端开始推流,并拉流收到首帧;false:远端停止推流。
270
+ case 'onRemoteVideoStatus':
271
+ defaultApi.writeLog(`TRTC::addListener-- onRemoteVideoStatus: ${JSON.stringify(_data)}`);
272
+ console.log('onRemoteVideoStatus',_data);
273
+ try{
274
+ NOTICE.pullFlowResult({code:0,pull_streamid: _data.strStreamId});
275
+ NOTICE.firstVideoSize({
276
+ streamId: _data.strStreamId,
277
+ // userId: _data.strStreamId.split('_').length == 5 ? _data.strStreamId : util.getUidByStreamId(_data.strStreamId)
278
+ userId: util.getUidByStreamId(_data.strStreamId)
279
+ });
280
+ dataReport.firstVideoSize({
281
+ pull_streamid: _data.strStreamId,
282
+ pull_uid: util.getUidByStreamIdDr(_data.strStreamId),
283
+ code:'0'
284
+ });
285
+ } catch (e) { };
286
+ break;
287
+ //sei
288
+ case 'onRecvSEIMsg':
289
+ // defaultApi.writeLog(`TRTC::addListener-- onRecvSEIMsg: ${_data}`);
290
+ break;
291
+ //网络质量的实时统计回调
292
+ // * 该统计回调每间隔2秒抛出一次,用于通知 SDK 感知到的当前网络的质量。
293
+ // * SDK 会使用一组内嵌的自研算法对当前网络的延迟高低、带宽大小以及稳定情况进行评估,并计算出一个的评估结果:
294
+ // * 如果评估结果为 1(Excellent) 代表当前的网络情况非常好,如果评估结果为 6(Down)代表当前网络无法支撑 SDK 的正常使用。
295
+ // *
296
+ // * @param strStreamId 流ID,表示远端用户信息。
297
+ // * @param quality 网络状态。0未检测;1当前网络非常好;2当前网络比较好;3当前网络一般;4当前网络较差;5当前网络很差;6当前网络不满足 TRTC 的最低要求
298
+ case 'onRemoteNetworkQuality':
299
+ // defaultApi.writeLog(`TRTC::addListener-- onRemoteNetworkQuality: ${_data}`);
300
+ if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
301
+ NOTICE.remoteNetworkQuality({code: _data.quality});
302
+ dataReport.remoteNetworkQuality({
303
+ code: _data.quality,
304
+ // publish_streamid: window.zbyAVSDK_init_params.trtc.streamId,
305
+ });
306
+ }
307
+ qualityRemoteArr.push(_data.quality);
308
+ break;
309
+ // 转推 CDN 上发布音视频流的事件回调
310
+ // * @param state 0转推成功,1表示失败
311
+ // * @param err 0表示成功,其余值表示失败
312
+ // * @param errMsg 具体错误原因
313
+ case 'onRtmpStreamingStateChanged':
314
+ defaultApi.writeLog(`TRTC::addListener-- onRtmpStreamingStateChanged: ${JSON.stringify(_data)}`);
315
+ break;
316
+ //* 本地麦克风设备的通断状态发生变化(仅适用于桌面系统)
317
+ // * @param deviceId 设备 ID
318
+ // * @param state 通断状态,0:设备已添加;1:设备已被移除;2:设备已启用。
319
+ case 'onMicphoneDeviceChanged':
320
+ defaultApi.writeLog(`TRTC::addListener-- onMicphoneDeviceChanged: ${JSON.stringify(_data)}`);
321
+ setMicrophoneDevice({deviceId: _data.state ==1 ? '' : _data.deviceId, operationType: 'hotPlug', deviceState: _data.state, changedDeviceId: _data.deviceId});
322
+ break;
323
+ //* 本地摄像头设备的通断状态发生变化(仅适用于桌面系统)
324
+ // * @param deviceId 设备 ID
325
+ // * @param state 通断状态,0:设备已添加;1:设备已被移除;2:设备已启用。
326
+ case 'onVideoDeviceChanged':
327
+ defaultApi.writeLog(`TRTC::addListener-- onVideoDeviceChanged: ${JSON.stringify(_data)}`);
328
+ setVideoDevice({deviceId: _data.state ==1 ? '' : _data.deviceId, operationType: 'hotPlug', deviceState: _data.state, changedDeviceId: _data.deviceId});
329
+ break;
330
+ //扬声器热插拔
331
+ case 'onSpeakerDeviceChanged':
332
+ defaultApi.writeLog(`TRTC::addListener-- onSpeakerDeviceChanged: ${JSON.stringify(_data)}`);
333
+ setSpeakerDevice({deviceId: _data.state ==1 ? '' : _data.deviceId, operationType: 'hotPlug', deviceState: _data.state, changedDeviceId: _data.deviceId});
334
+ break;
335
+ //推流的统计数据。5秒一个周期
336
+ case 'onLocalStatistics':
337
+ // defaultApi.writeLog(`TRTC::addListener-- onLocalStatistics: ${_data}`);
338
+ console.log('onLocalStatistics',JSON.parse(_data.json));
339
+ NOTICE.pushLossAndDelay({
340
+ // userId: util.getUidByStreamId(_data.strStreamId),
341
+ delay:_data.rtt,
342
+ lostrate:_data.packetLoss
343
+ });
344
+ heartBeatDataReportCalc(event, _data);
345
+ break;
346
+ //拉流的统计数据。5秒一个周期
347
+ case 'onRemoteStatistics':
348
+ // defaultApi.writeLog(`TRTC::addListener-- onRemoteStatistics: ${_data}`);
349
+ console.log('onRemoteStatistics',JSON.parse(_data.json));
350
+ try {
351
+ NOTICE.playLossAndDelay({
352
+ userId: util.getUidByStreamId(_data.streamId),
353
+ delay:_data.rtt,
354
+ lostrate:_data.packetLoss
355
+ });
356
+ }catch(e){}
357
+ heartBeatDataReportCalc(event, _data);
358
+ break;
359
+ case 'onStatisticsUpdate':
360
+ // defaultApi.writeLog(`TRTC::addListener-- onStatisticsUpdate: ${JSON.stringify(_data)}`);
361
+ // console.log('onStatisticsUpdate',JSON.parse(_data.json).local);
362
+ // console.log('onStatisticsUpdate',JSON.parse(_data.json));
363
+ // heartBeatDataReportCalc(event, _data);
364
+ break;
365
+ }
366
+ if (zbysdk.openListenerLog) {
367
+ console.log(`[zby-live-sdk]--TRTC::Listener:: event: ${event}, data: ${data}`);
368
+ }
369
+ });
370
+ }
371
+ };
372
+
373
+
374
+ //初始化相关
375
+ const init = async (args) => {
376
+ defaultApi.writeLog(`avsdk TRTC::init ${JSON.stringify(args)}`);
377
+ const usedDevices = window.zbyAVSDK_device_checker_init;
378
+ const {devices} = args;
379
+ const _devices = {
380
+ camera: (devices && devices.camera) || (usedDevices && usedDevices.camera && usedDevices.camera.use) || '',
381
+ microphone: (devices && devices.microphone) || (usedDevices && usedDevices.microphone && usedDevices.microphone.use) || '',
382
+ speaker: (devices && devices.speaker) || (usedDevices && usedDevices.speaker && usedDevices.speaker.use) || ''
383
+ };
384
+ await loadTrtc(args.extensionVersion);
385
+ await startEngine(args.appId,args.userId, args.usersign);
386
+ if(args.role === 'teacher' && args.mode === 1){
387
+ //坐立模式开启外部采集器,站立关闭外部采集器,只有小组课坐立模式才会调用,通过这个把rtmp数据给到zego或者rtc(头像),开启了外部采集,zego不会再本地采集数据
388
+ await setEnableExternVideoCapture(true);
389
+ }
390
+ //小班课 需要加载采集插件的输入数据,为了拼接zego头像
391
+ if (args.classMode === 1 || args.classMode === 2) {
392
+ defaultApi.writeLog('loadCollectionInputEntry')
393
+ loadCollectionInputEntry();
394
+ };
395
+ await setCameraEncodeBiarate(args.encodeCaptureBitrate);
396
+ await setCameraEncodeFps(args.encodeCaptureFps);
397
+ await setCameraCaptureResolution(args.previewResolutionWidth, args.previewResolutionHeight);
398
+ await setCameraEncodeResolution(args.encodedResolutionWidth, args.encodedResolutionHeight);
399
+ // await setCameraEncodeBiarate(args.screenSameBitrate,1);
400
+ // await setCameraEncodeFps(args.screenSameFps, 1);
401
+ // await setCameraCaptureResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, 1);
402
+ // await setCameraEncodeResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, 1);
403
+
404
+ // await setCameraEncodeBiarate(500000);
405
+ // await setCameraEncodeFps(15);
406
+ // await setCameraCaptureResolution(640, 480);
407
+ // await setCameraEncodeResolution(320, 240);
408
+ await setDefaultDevice(_devices,'default');
409
+ await setMicphoneVolumInterval(500);
410
+ window.current_sdk_type = 'trtc';
411
+ defaultApi.writeLog('trtc init finished current_sdk_type : trtc');
412
+ };
413
+
414
+
415
+ /**
416
+ * @function 初始化引擎
417
+ * @param appid:String TRTC 秘钥,必选
418
+ * @param userid:String
419
+ * @param usersign:String
420
+ * @return Promise | void
421
+ */
422
+ const startEngine = (appid, userid, usersign) => {
423
+ defaultApi.writeLog(`avsdk TRTC::StartEngine appid: ${appid} userid: ${userid} usersign: ${usersign}`);
424
+ return callMethod('StartEngine', {
425
+ appid,
426
+ userid:userid + '',
427
+ usersign
428
+ });
429
+ };
430
+
431
+ const setEnableExternVideoCapture = (enable) => {
432
+ defaultApi.writeLog(`avsdk TRTC::EnableExternVideoCapture`);
433
+ return callMethod('EnableExternVideoCapture', {
434
+ enable
435
+ });
436
+ };
437
+
438
+ /**
439
+ * @function 销毁引擎
440
+ * @return Promise | void
441
+ */
442
+ const destroyEngine = () => {
443
+ return callMethod('DestroyEngine', {});
444
+ };
445
+
446
+ //设置采集分辨率
447
+ const setCameraCaptureResolution = (width, height) => {
448
+ return callMethod('SetCameraCaptureResolution',{width, height});
449
+ }
450
+ //设置编码分辨率
451
+ const setCameraEncodeResolution = (width, height) => {
452
+ return callMethod('SetCameraEncodeResolution',{width, height})
453
+ }
454
+ //设置编码帧率
455
+ const setCameraEncodeFps = (fps) => {
456
+ return callMethod('SetCameraEncodeFps',{fps})
457
+ }
458
+ //设置编码码率
459
+ const setCameraEncodeBiarate = (bitrate) => {
460
+ return callMethod('SetCameraEncodeBiarate',{bitrate})
461
+ }
462
+ //获取指定摄像头的支持的分辨率
463
+ const getCameraResolution = (deviceId) => {
464
+ return callMethod('GetCameraResolution',{deviceId})
465
+ }
466
+
467
+
468
+ /**
469
+ * @function 设置默认的硬件设备,包括摄像头、麦克风以及扬声器
470
+ * @return Promise
471
+ */
472
+ export const setDefaultDevice = async (devices, operationType) => {
473
+ // 设置默认的摄像头
474
+ if (devices && devices.camera) {
475
+ if (zbyAVSDK_device_checker_init.camera.list.length == 0) {
476
+ await getCameraDeviceListTrtc();
477
+ }
478
+ await setVideoDevice({deviceId: devices.camera, operationType:`${operationType}_1`});
479
+ } else {
480
+ const cameraData = await getCameraDeviceListTrtc();
481
+ if (cameraData.length) {
482
+ let hasSetCamera = false;
483
+ for (let item of cameraData) {
484
+ if (item.isDefault) {
485
+ await setVideoDevice({deviceId: item.deviceId, operationType:`${operationType}_2`});
486
+ hasSetCamera = true;
487
+ break;
488
+ }
489
+ }
490
+ if (!hasSetCamera) {
491
+ await setVideoDevice({deviceId: cameraData[0].deviceId, operationType:`${operationType}_3`});
492
+ }
493
+ }
494
+ }
495
+ // 设置默认的麦克风
496
+ if (devices && devices.microphone) {
497
+ if (zbyAVSDK_device_checker_init.microphone.list.length == 0) {
498
+ await getMicrophoneDeviceListTrtc();
499
+ }
500
+ console.log('pp2');
501
+ await setMicrophoneDevice({deviceId: devices.microphone, operationType:`${operationType}_1`});
502
+ } else {
503
+ console.log('pp3');
504
+ const microPhoneData = await getMicrophoneDeviceListTrtc();
505
+ try {
506
+ if (microPhoneData.length) {
507
+ let hasSetMicrophone = false;
508
+ for (let item of microPhoneData) {
509
+ if (item.isDefault) {
510
+ await setMicrophoneDevice({deviceId: item.deviceId, operationType:`${operationType}_2`});
511
+ hasSetMicrophone = true;
512
+ break;
513
+ }
514
+ }
515
+ if (!hasSetMicrophone) {
516
+ await setMicrophoneDevice({deviceId: microPhoneData[0].deviceId, operationType:`${operationType}_3`});
517
+ }
518
+ }
519
+ } catch (e){
520
+ console.log(e);
521
+ }
522
+
523
+ }
524
+ // 设置默认的扬声器
525
+ if (devices && devices.speaker) {
526
+ await setSpeakerDevice({deviceId: devices.speaker, operationType:`${operationType}_1`});
527
+ } else {
528
+ const speakerData = await getSpeakerDeviceListTrtc();
529
+ if (speakerData.length) {
530
+ let hasSetSpeaker = false;
531
+ for (let item of speakerData) {
532
+ if (item.isDefault) {
533
+ await setSpeakerDevice({deviceId: item.deviceId, operationType:`${operationType}_2`});
534
+ hasSetSpeaker = true;
535
+ break;
536
+ }
537
+ }
538
+ if (!hasSetSpeaker) {
539
+ await setSpeakerDevice({deviceId: speakerData[0].deviceId, operationType:`${operationType}_3`});
540
+ }
541
+ }
542
+ }
543
+ };
544
+
545
+ //麦克风相关
546
+ /**
547
+ * @function 开启麦克风
548
+ * @param intervalMs: Number
549
+ * @return Promise | void
550
+ */
551
+ const startMicrophone = (intervalMs = 500) => {
552
+ return callMethod('StartMicrophone', {intervalMs});
553
+ }
554
+
555
+ /**
556
+ * @function 关闭麦克风
557
+ * @return Promise | void
558
+ */
559
+ const stopMicrophone = () => {
560
+ return callMethod('StopMicrophone', {});
561
+ }
562
+
563
+ const openOrCloseMicrophone = async (operation) => {
564
+ isNoticeMicVolumeTRTC = operation;
565
+ if(operation) {
566
+ await startMicrophone();
567
+ } else {
568
+ await stopMicrophone();
569
+ }
570
+ }
571
+
572
+ /**
573
+ * @function 设置麦克风音量大小回调周期
574
+ * @param intervalMs: Number
575
+ * @return Promise | void
576
+ */
577
+ const setMicphoneVolumInterval = (intervalMs) => {
578
+ return callMethod('SetMicphoneVolumInterval',{intervalMs});
579
+ };
580
+
581
+ /**
582
+ * @function 获取麦克风设备列表
583
+ * @return Promise | void
584
+ */
585
+ const getMicrophoneDeviceList = () => {
586
+ return callMethod('GetMicrophoneDeviceList', {});
587
+ }
588
+
589
+ /**
590
+ * @function 获取麦克风列表
591
+ * @return Promise | void
592
+ */
593
+ const getMicrophoneDeviceListTrtc = async () => {
594
+ console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc_ii');
595
+ let microphoneListArr = JSON.parse(JSON.parse((await getMicrophoneDeviceList()).msg).DeviceList);
596
+ console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc_iii',microphoneListArr);
597
+ let microphoneList = [];
598
+ for (let i = 0, len = microphoneListArr.length; i < len; i++) {
599
+ microphoneList.push({
600
+ deviceId: microphoneListArr[i].id,
601
+ deviceName: microphoneListArr[i].name,
602
+ isDefault: microphoneListArr[i].default
603
+ });
604
+ deviceListReport.micList[microphoneListArr[i].szDeviceId] = microphoneListArr[i].szDeviceName;
605
+
606
+ }
607
+ if (!window.zbyAVSDK_device_checker_init) {
608
+ window.zbyAVSDK_device_checker_init = {};
609
+ }
610
+ if (!window.zbyAVSDK_device_checker_init.microphone) {
611
+ window.zbyAVSDK_device_checker_init.microphone = {};
612
+ }
613
+ window.zbyAVSDK_device_checker_init.microphone.hasTest = true;
614
+ window.zbyAVSDK_device_checker_init.microphone.list = microphoneList;
615
+ console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc',microphoneList);
616
+ return microphoneList;
617
+ }
618
+
619
+
620
+ /**
621
+ * @function 设置指定音频(当前麦克风)设备
622
+ * @param deviceId:String 音频设备 id,必选
623
+ * @return Promise | void
624
+ */
625
+ const setCurrentMicrophoneDevice = (deviceId) => {
626
+ window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
627
+ return callMethod('SetCurrentMicrophoneDevice', {
628
+ deviceId
629
+ });
630
+ }
631
+
632
+ /**
633
+ * @function 指定麦克风设备
634
+ * @param deviceId:String 麦克风设备 id,必选
635
+ * @param operationType:String 操作类型,可选
636
+ * @return Promise | void
637
+ */
638
+ const setMicrophoneDevice = async (args) => {
639
+ console.log('getMicrophoneDeviceListTrtc_microphoneListArr_trtc_i');
640
+ let {deviceId, operationType, deviceState, changedDeviceId} = args;
641
+ let deviceName = '';
642
+ let microPhoneData;
643
+ if (!deviceId) {
644
+ microPhoneData = await getMicrophoneDeviceListTrtc();
645
+ if (microPhoneData.length) {
646
+ let hasGetMicrophone = false;
647
+ for (let item of microPhoneData) {
648
+ if (item.isDefault) {
649
+ deviceId = item.deviceId;
650
+ deviceName = item.deviceName;
651
+ hasGetMicrophone = true;
652
+ break;
653
+ }
654
+ }
655
+ if (!hasGetMicrophone) {
656
+ deviceId = microPhoneData[0].deviceId;
657
+ deviceName = microPhoneData[0].deviceName;
658
+ }
659
+ } else {
660
+ deviceId = '';
661
+ }
662
+ if (deviceId === '') {
663
+ NOTICE.noDevice({
664
+ deviceType: 'microphone'
665
+ });
666
+ }
667
+ }
668
+ try {
669
+ dataReport.setDevice({
670
+ device_type:2,
671
+ device_id:deviceId,
672
+ device_name:deviceListReport.micList[deviceId],
673
+ operationType,
674
+ fore_state: operationType == 'hotPlug' ? +deviceState+1 : '-'
675
+ });
676
+ window.zbyAVSDK_device_checker_init.microphone.name = deviceListReport.micList[deviceId];
677
+ } catch (e) {};
678
+ await setCurrentMicrophoneDevice(deviceId);
679
+ if (operationType == 'hotPlug' || operationType == 'deviceError') {
680
+ if(!microPhoneData){
681
+ microPhoneData = await getMicrophoneDeviceListTrtc();
682
+ deviceName = deviceListReport.micList[deviceId];
683
+ }
684
+ NOTICE[operationType]({
685
+ deviceType: 'microphone',
686
+ useDeviceId: deviceId,
687
+ useDeviceName: deviceName,
688
+ deviceList: microPhoneData,
689
+ deviceState,
690
+ changedDeviceId
691
+ });
692
+ }
693
+ }
694
+
695
+ /**
696
+ * @function 获取当前麦克风的音量
697
+ * @return Promise | void
698
+ */
699
+ const getCurrentMicrophoneVolume = () => {
700
+ return callMethod('GetCurrentMicrophoneVolume', {}).then(ret => {
701
+ let volume = 0;
702
+ try {
703
+ volume = JSON.parse(ret.msg).microphoneVolume;
704
+ } catch (e) {
705
+ console.error(`zby-live-sdk: getCurrentMicrophoneVolume ret: ${ret}. error: ${e}`);
706
+ }
707
+ return volume;
708
+ });
709
+ }
710
+
711
+ /**
712
+ * @function 设置当前麦克风的音量
713
+ * @param volume : number 音量值
714
+ * @return Promise | void
715
+ */
716
+ const setCurrentMicrophoneVolume = (volume) => {
717
+ return callMethod('SetCurrentMicrophoneVolume', {volume});
718
+ }
719
+
720
+ /**
721
+ * @function 获取当前麦克风ID
722
+ * @return Promise | void
723
+ */
724
+ const getCurrentMicrophoneId = (volume) => {
725
+ return callMethod('GetCurrentMicrophoneId', {volume});
726
+ }
727
+
728
+ //摄像头相关
729
+ /**
730
+ * @function 打开摄像头
731
+ * @param width: Number
732
+ * @param height: Number
733
+ * @return Promise | void
734
+ */
735
+ const startCamera = (width,height) => {
736
+ return callMethod('StartCamera', {width,height});
737
+ }
738
+
739
+ /**
740
+ * @function 关闭摄像头
741
+ * @return Promise | void
742
+ */
743
+ const stopCamera = () => {
744
+ return callMethod('StopCamera', {});
745
+ }
746
+
747
+ const openOrCloseCamera = async (operation) => {
748
+ console.log('openCa',operation);
749
+ if(operation){
750
+ await startCamera();
751
+ } else {
752
+ await stopCamera();
753
+ }
754
+ }
755
+
756
+ /**
757
+ * @function 获取摄像头列表
758
+ * @return Promise | void
759
+ */
760
+ const getCameraDeviceList = () => {
761
+ return callMethod('GetCameraDeviceList', {});
762
+ }
763
+
764
+ /**
765
+ * @function 获取摄像头列表
766
+ * @return Promise | void
767
+ */
768
+ export const getCameraDeviceListTrtc= async () => {
769
+ let videoListArr = JSON.parse(JSON.parse((await getCameraDeviceList()).msg).DeviceList);
770
+ console.log('GetCameraDeviceList_cameraList_trtc_iiii',videoListArr);
771
+ let videoList = [];
772
+ for (let i = 0, len = videoListArr.length; i < len; i++) {
773
+ videoList.push({
774
+ deviceId: videoListArr[i].id,
775
+ deviceName: videoListArr[i].name,
776
+ isDefault: videoListArr[i].default
777
+ });
778
+ deviceListReport.cameraList[videoListArr[i].szDeviceId] = videoListArr[i].szDeviceName;
779
+ }
780
+ if (!window.zbyAVSDK_device_checker_init) {
781
+ window.zbyAVSDK_device_checker_init = {};
782
+ }
783
+ if (!window.zbyAVSDK_device_checker_init.camera) {
784
+ window.zbyAVSDK_device_checker_init.camera = {};
785
+ }
786
+ window.zbyAVSDK_device_checker_init.camera.hasTest = true;
787
+ window.zbyAVSDK_device_checker_init.camera.list = videoList;
788
+ console.log('GetCameraDeviceList_cameraList_trtc',videoList);
789
+ return videoList;
790
+ };
791
+
792
+ /**
793
+ * @function 设置指定视频(当前摄像头)设备
794
+ * @param deviceId:String 视频设备 id,必选
795
+ * @return Promise | void
796
+ */
797
+ const setCurrentCameraDevice = (deviceId) => {
798
+ window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
799
+ return callMethod('SetCurrentCameraDevice', {
800
+ deviceId
801
+ });
802
+ }
803
+
804
+ /**
805
+ * @function 指定使用的摄像头
806
+ * @param deviceId:String 摄像头 id,必选
807
+ * @param operationType:String 操作类型,可选
808
+ * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
809
+ * 不传即是普通的设置摄像头设备的行为
810
+ * @return Promise | void
811
+ */
812
+ const setVideoDevice = async (args) => {
813
+ defaultApi.writeLog(`${JSON.stringify(args)}----setVideoDevice-trtc----`);
814
+ let {deviceId, operationType, deviceState, code, changedDeviceId} = args;
815
+ let deviceName = '';
816
+ let cameraData;
817
+ if (!deviceId) {
818
+ cameraData = await getCameraDeviceListTrtc();
819
+ // console.log('hsghsghsg0',cameraData);
820
+ if (cameraData.length) {
821
+ let hasGetCamare = false;
822
+ // console.log('hsghsghsg1',hasGetSpeaker);
823
+ for (let item of cameraData) {
824
+ if (item.isDefault) {
825
+ deviceId = item.deviceId;
826
+ deviceName = item.deviceName;
827
+ hasGetCamare = true;
828
+ break;
829
+ }
830
+ }
831
+ // console.log('hsghsghsg2',hasGetSpeaker);
832
+ if (!hasGetCamare) {
833
+ deviceId = cameraData[0].deviceId;
834
+ deviceName = cameraData[0].deviceName;
835
+ // console.log('hsghsghsg91',deviceId);
836
+ }
837
+ } else {
838
+ deviceId = '';
839
+ }
840
+ // console.log('hsghsghsg1',deviceId);
841
+ if (deviceId === '') {
842
+ NOTICE.noDevice({
843
+ deviceType: 'camera'
844
+ });
845
+ }
846
+ }
847
+ window.zbyAVSDK_device_checker_init.camera.use = deviceId;
848
+ window.zbyAVSDK_device_checker_init.camera.name = deviceListReport.cameraList[deviceId];
849
+ try {
850
+ dataReport.setDevice({
851
+ device_type: 1,
852
+ device_id:deviceId,
853
+ device_name:deviceListReport.cameraList[deviceId],
854
+ operationType,
855
+ fore_state: operationType == 'hotPlug' ? deviceState+1 : '-'
856
+ });
857
+ } catch (e) {};
858
+ await setCurrentCameraDevice(deviceId);
859
+ if (operationType == 'hotPlug' || operationType == 'deviceError') {
860
+ if(!cameraData){
861
+ cameraData = await getCameraDeviceListTrtc();
862
+ deviceName = deviceListReport.cameraList[deviceId];
863
+ }
864
+ NOTICE[operationType]({
865
+ deviceType: 'camera',
866
+ useDeviceId: deviceId,
867
+ useDeviceName: deviceName,
868
+ deviceList: cameraData,
869
+ messge: code,
870
+ deviceState,
871
+ changedDeviceId
872
+ });
873
+ }
874
+ try {
875
+ NOTICE.useredCamera({
876
+ deviceId,
877
+ deviceName
878
+ });
879
+ } catch (e){ };
880
+ };
881
+
882
+ /**
883
+ * @function 获取当前摄像头ID
884
+ * @return Promise | void
885
+ */
886
+ const GetCurrentCameraId = () => {
887
+ return callMethod('GetCurrentCameraId', {});
888
+ }
889
+
890
+ //扬声器相关
891
+
892
+ /**
893
+ * @function 获取扬声器列表
894
+ * @return Promise | void
895
+ */
896
+ const getSpeakerDeviceList = () => {
897
+ return callMethod('GetSpeakerDeviceList', {});
898
+ }
899
+
900
+ /**
901
+ * @function 获取扬声器列表
902
+ * @return Promise | void
903
+ */
904
+ const getSpeakerDeviceListTrtc = async () => {
905
+ let speakerListArr = JSON.parse(JSON.parse((await getSpeakerDeviceList()).msg).DeviceList);
906
+ console.log('getSpeakerDeviceList_speakerListArr_trtc_ii',speakerListArr);
907
+ let speakerList = [];
908
+ for (let i = 0, len = speakerListArr.length; i < len; i++) {
909
+ speakerList.push({
910
+ deviceId: speakerListArr[i].id,
911
+ deviceName: speakerListArr[i].name,
912
+ isDefault: speakerListArr[i].default
913
+ });
914
+ deviceListReport.speakerList[speakerListArr[i].szDeviceId] = speakerListArr[i].szDeviceName;
915
+
916
+ }
917
+ if (!window.zbyAVSDK_device_checker_init) {
918
+ window.zbyAVSDK_device_checker_init = {};
919
+ }
920
+ if (!window.zbyAVSDK_device_checker_init.speaker) {
921
+ window.zbyAVSDK_device_checker_init.speaker = {};
922
+ }
923
+ window.zbyAVSDK_device_checker_init.speaker.hasTest = true;
924
+ window.zbyAVSDK_device_checker_init.speaker.list = speakerList;
925
+ console.log('getSpeakerDeviceList_speakerListArr_trtc_iii',speakerList);
926
+ return speakerList;
927
+ };
928
+
929
+ /**
930
+ * @function 设置指定音频(当前扬声器)设备
931
+ * @param deviceId:String 音频设备 id,必选
932
+ * @return Promise | void
933
+ */
934
+ const setCurrentSpeakerDevice = (deviceId) => {
935
+ window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
936
+ return callMethod('SetCurrentSpeakerDevice', {
937
+ deviceId
938
+ });
939
+ }
940
+
941
+ /**
942
+ * @function 指定扬声器
943
+ * @param deviceId:String 扬声器 id,必选
944
+ * @param operationType:String 操作类型,可选
945
+ * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
946
+ * 不传即是普通的设置扬声器设备的行为
947
+ * @return Promise | void
948
+ */
949
+ const setSpeakerDevice = async (args) => {
950
+ defaultApi.writeLog(`${JSON.stringify(args)}----setSpeakerDevice-trtc----`);
951
+ console.log('hsghsghsg999',args);
952
+ let {deviceId, operationType, deviceState, code, changedDeviceId} = args;
953
+ let deviceName = '';
954
+ let speakerData;
955
+ if (!deviceId) {
956
+ speakerData = await getSpeakerDeviceListTrtc();
957
+ console.log('hsghsghsg0',speakerData.length);
958
+ if (speakerData.length) {
959
+ let hasGetSpeaker = false;
960
+ console.log('hsghsghsg1',hasGetSpeaker);
961
+ for (let item of speakerData) {
962
+ if (item.isDefault) {
963
+ deviceId = item.deviceId;
964
+ deviceName = item.deviceName;
965
+ hasGetSpeaker = true;
966
+ break;
967
+ }
968
+ }
969
+ console.log('hsghsghsg2',hasGetSpeaker);
970
+ if (!hasGetSpeaker) {
971
+ deviceId = speakerData[0].deviceId;
972
+ deviceName = speakerData[0].deviceName;
973
+ console.log('hsghsghsg91',deviceId);
974
+ }
975
+ } else {
976
+ deviceId = '';
977
+ }
978
+ if (deviceId === '') {
979
+ NOTICE.noDevice({
980
+ deviceType: 'speaker'
981
+ });
982
+ }
983
+ }
984
+ console.log('hsghsghsg999',deviceId);
985
+ try {
986
+ dataReport.setDevice({
987
+ device_type:3,
988
+ device_id:deviceId,
989
+ device_name:deviceListReport.speakerList[deviceId],
990
+ operationType,
991
+ fore_state: operationType == 'hotPlug' ? deviceState+1 : '-'
992
+ });
993
+ window.zbyAVSDK_device_checker_init.speaker.name = deviceListReport.speakerList[deviceId];
994
+ } catch (e) {
995
+ console.log(e);
996
+ };
997
+ console.log('hsghsghsg99911',deviceId);
998
+ usingAudioDeviceId.speaker = deviceId;
999
+ await setCurrentSpeakerDevice(deviceId);
1000
+ if (operationType == 'hotPlug' || operationType == 'deviceError') {
1001
+ if(!speakerData){
1002
+ speakerData = await getSpeakerDeviceListTrtc();
1003
+ deviceName = deviceListReport.speakerList[deviceId];
1004
+ }
1005
+ console.log('hsghsghsg99911',deviceId);
1006
+ NOTICE[operationType]({
1007
+ deviceType: 'speaker',
1008
+ useDeviceId: deviceId,
1009
+ useDeviceName: deviceName,
1010
+ deviceList: speakerData,
1011
+ message: code,
1012
+ deviceState,
1013
+ changedDeviceId
1014
+ });
1015
+ }
1016
+ console.log('hsghsghsg999112',deviceId);
1017
+ };
1018
+
1019
+ /**
1020
+ * @function 获取当前扬声器的音量
1021
+ * @return Promise | void
1022
+ */
1023
+ const getCurrentSpeakerVolume = () => {
1024
+ return callMethod('GetCurrentSpeakerVolume', {});
1025
+ }
1026
+
1027
+ /**
1028
+ * @function 获取当前扬声器ID
1029
+ * @return Promise | void
1030
+ */
1031
+ const getCurrentSpeakerId = () => {
1032
+ return callMethod('GetCurrentSpeakerId', {});
1033
+ }
1034
+
1035
+ /**
1036
+ * @function 设置当前扬声器音量
1037
+ * @param volume : number 音量值
1038
+ * @return Promise | void
1039
+ */
1040
+ const setCurrentSpeakerVolume = (volume) => {
1041
+ return callMethod('SetCurrentSpeakerVolume', {volume});
1042
+ }
1043
+
1044
+
1045
+ /**
1046
+ * @function 打开系统声音采集
1047
+ * @return Promise | void
1048
+ */
1049
+ const startSystemAudioLoopback = () => {
1050
+ return callMethod('StartSystemAudioLoopback', {});
1051
+ }
1052
+
1053
+ /**
1054
+ * @function 关闭系统声音采集
1055
+ * @return Promise | void
1056
+ */
1057
+ const stopSystemAudioLoopback = () => {
1058
+ return callMethod('StopSystemAudioLoopback', {});
1059
+ }
1060
+
1061
+ //采集扬声器声音
1062
+ const setAudioSpeakerCapture = (speaker) => {
1063
+ if(speaker) {
1064
+ return startSystemAudioLoopback();
1065
+ } else {
1066
+ return stopSystemAudioLoopback();
1067
+ }
1068
+ }
1069
+
1070
+ //sei
1071
+ /**
1072
+ * @function sei消息
1073
+ * @param data: String 数据
1074
+ * @param dataSize: Number 数据长度
1075
+ * @param repeatCount: Number 发送次数
1076
+ * @return Promise | void
1077
+ */
1078
+ const sendSEIMsg = (data,dataSize,repeatCount) => {
1079
+ if (typeof dataSize === 'undefined') {
1080
+ dataSize = data.length;
1081
+ };
1082
+ if (typeof repeatCount === 'undefined') {
1083
+ repeatCount = 1;
1084
+ }
1085
+ return callMethod('SendSEIMsg', {data,dataSize,repeatCount});
1086
+ }
1087
+
1088
+ //采集相关
1089
+
1090
+ /**
1091
+ * @function 开始屏幕采集
1092
+ * @return Promise | void
1093
+ */
1094
+ const startScreenCapture = () => {
1095
+ return callMethod('StartScreenCapture', {});
1096
+ }
1097
+
1098
+ /**
1099
+ * @function 停止屏幕采集
1100
+ * @return Promise | void
1101
+ */
1102
+ const stopScreenCapture = () => {
1103
+ return callMethod('StopScreenCapture', {});
1104
+ }
1105
+
1106
+ //推拉流相关
1107
+
1108
+ /**
1109
+ * @function 开启本地或者远程的视频视图
1110
+ * @param isLocal:Boolean 是否是本地的视频预览,必选
1111
+ * @param streamId:String 要拉取的视频流的 id,可选,只有拉取远程的视频流的时候才是必选的
1112
+ * @param domId:String <video> 标签的 id,可选
1113
+ * 如果传了 domId,就把视频绑定到对应的 <video> 标签上
1114
+ * @return Promise 可从 Promise 中获取 src,Promise.then((src) => {})
1115
+ */
1116
+ const startLocalOrRemotePreview = async (isLocal, streamId, domId, isTeacherNewWindowPreview) => {
1117
+ defaultApi.writeLog(`${streamId}----streamId-trtc---- ${isLocal}`);
1118
+ let isUpdateChromeVersion = await util.getChromeVersion();
1119
+ // 预览本地视频的话,流 id 从全局配置中获取
1120
+ // if (isLocal) {
1121
+ // streamId = isInRoom ? (window.zbyAVSDK_init_params && window.zbyAVSDK_init_params.trtc.streamId)
1122
+ // : 'deviceCheckerInit';
1123
+ // }
1124
+ const externalConstraints = {
1125
+ audio: false,
1126
+ video: {
1127
+ mandatory: {
1128
+ chromeMediaSource: 'external',
1129
+ chromeMediaSourceId: `ems://trtc_ext/${isLocal || isTeacherNewWindowPreview ? -3 : streamId}`
1130
+ }
1131
+ }
1132
+ };
1133
+
1134
+ console.log('externalConstraints',externalConstraints);
1135
+
1136
+ return new Promise((resolve, reject) => {
1137
+ const handleExternalSuccess = (stream) => {
1138
+ defaultApi.writeLog(`${stream}----stream-trtc----`);
1139
+ stream.oninactive = () => {
1140
+ defaultApi.writeLog('Stream inactive');
1141
+ };
1142
+ const src = isUpdateChromeVersion? stream : window.URL.createObjectURL(stream);
1143
+ console.log('内核升级', isUpdateChromeVersion,domId,src);
1144
+ if (domId && document.querySelector(domId)) {
1145
+ if(isUpdateChromeVersion) {
1146
+ document.querySelector(domId).srcObject = src;
1147
+ console.log('内核升级1',domId, document.querySelector(domId),document.querySelector(domId).srcObject,src);
1148
+ } else {
1149
+ console.log('内核升级2');
1150
+ document.querySelector(domId).src = src;
1151
+ }
1152
+ }
1153
+ defaultApi.writeLog(`setVidoeSrc, streamId:${streamId}, domId: ${domId}, src: ${src}`);
1154
+ resolve(src);
1155
+ };
1156
+
1157
+ const handleExternalError = (error) => {
1158
+ if (error.name === 'ConstraintNotSatisfiedError') {
1159
+ console.error('ConstraintNotSatisfiedError');
1160
+ } else if (error.name === 'PermissionDeniedError') {
1161
+ console.error(
1162
+ 'Permissions have not been granted to use your camera and '
1163
+ + 'microphone, you need to allow the page access to your devices in '
1164
+ + 'order for the demo to work.'
1165
+ );
1166
+ }
1167
+ console.error(`getUserMedia error: ${error.name}`, error);
1168
+ if (domId) {
1169
+ document.querySelector(domId).src = '';
1170
+ }
1171
+ reject('');
1172
+ };
1173
+
1174
+ if (navigator.webkitGetUserMedia) {
1175
+ navigator.webkitGetUserMedia(
1176
+ externalConstraints,
1177
+ handleExternalSuccess,
1178
+ handleExternalError
1179
+ );
1180
+ }
1181
+ });
1182
+ };
1183
+
1184
+ /**
1185
+ * @function 开始推流
1186
+ * @param streamId: String 流id
1187
+ * @return Promise | void
1188
+ */
1189
+ const startPush = (streamId ,channel = 0) => {
1190
+ if(!streamId){
1191
+ streamId = zbyAVSDK_init_params.trtc.streamId;
1192
+ }
1193
+ // 心跳开始
1194
+ if (!isFirstHeartBeatReport) {
1195
+ isFirstHeartBeatReport = true;
1196
+ console.log('ppppppp上报呀')
1197
+ heartBeatDataReport('start');
1198
+ }
1199
+ console.log('startPush',streamId,channel);
1200
+ return callMethod('StartPush', {streamId, channel});
1201
+ }
1202
+
1203
+ /**
1204
+ * @function 停止推流
1205
+ * @param streamId: String 流id
1206
+ * @return Promise | void
1207
+ */
1208
+ const stopPush = (channel) => {
1209
+ // if(!streamId){
1210
+ // streamId = zbyAVSDK_init_params.trtc.streamId;
1211
+ // }
1212
+ if(!channel){
1213
+ channel = 0;
1214
+ }
1215
+
1216
+ return callMethod('StopPush', {channel});
1217
+ }
1218
+
1219
+ /**
1220
+ * @function 不推/推 音频
1221
+ * @param enable : bool
1222
+ * @return Promise | void
1223
+ */
1224
+ const muteLocalAudio = (enable) => {
1225
+ return callMethod('MuteLocalAudio', {enable});
1226
+ }
1227
+
1228
+ /**
1229
+ * @function 不推/推 视频
1230
+ * @param enable: bool
1231
+ * @return Promise | void
1232
+ */
1233
+ const muteLocalVideo = (enable) => {
1234
+ return callMethod('MuteLocalVideo', {enable});
1235
+ }
1236
+
1237
+ /**
1238
+ * @function 开始转推本地摄像头TRTC流至RTMP。目前只支持转推1路
1239
+ * @return Promise | void
1240
+ */
1241
+ const addPublishRtmpStreamUrl = () => {
1242
+ return callMethod('AddPublishRtmpStreamUrl', {});
1243
+ }
1244
+
1245
+ /**
1246
+ * @function 停止转推RTMP
1247
+ * @return Promise | void
1248
+ */
1249
+ const removePublishStreamUrl = () => {
1250
+ return callMethod('RemovePublishStreamUrl', {});
1251
+ }
1252
+
1253
+ /**
1254
+ * @function 开始拉流
1255
+ * @param streamId: String 流id
1256
+ * @return Promise | void
1257
+ */
1258
+ const startPlay = (streamId) => {
1259
+ return callMethod('StartPlay', {streamId});
1260
+ }
1261
+
1262
+ /**
1263
+ * @function 初始化拉流
1264
+ * @param streamId:String 从传来的信道消息中获取,必选
1265
+ * @param domId:String <video> 标签的 id,可选
1266
+ * 如果传了就把视频绑定到对应的 <video> 标签上
1267
+ * @param pInfo:String 多媒体流附加信息,可选,默认为 'none'
1268
+ * @return src:String 视频预览地址
1269
+ */
1270
+ export const initPullFlow = async (streamId, domId, mute, pInfo, notAutoPlay, audioOnly) => {
1271
+ defaultApi.writeLog(`TRTC:: initPullFlow_start ,streamId :${streamId}`);
1272
+ let playChannel;
1273
+ resetStreamIdRtcPlayerInfo1(streamId);
1274
+ resetStreamIdRtcPlayerInfo(streamId);
1275
+ try {
1276
+ if (!isFirstHeartBeatReport) {
1277
+ isFirstHeartBeatReport = true;
1278
+ heartBeatDataReport('start');
1279
+ }
1280
+ } catch (error) {}
1281
+ if (streamIdToPreviewId[streamId] == undefined) {
1282
+
1283
+ // defaultApi.writeLog(`TRTC:: The streamId queue does not contain this streamId ${uiChnIndexs}`);
1284
+ playChannel = uiChnIndexs.pop();
1285
+ streamIdToPreviewId[streamId] = playChannel;
1286
+ console.log('streamIdIsNoExited',playChannel,streamIdToPreviewId[streamId],streamId,uiChnIndexs);
1287
+ } else {
1288
+ defaultApi.writeLog('TRTC:: The streamId queue contains this streamId');
1289
+ playChannel = streamIdToPreviewId[streamId];
1290
+ console.log('streamIdIsExited',playChannel,streamIdToPreviewId[streamId],streamId,uiChnIndexs);
1291
+ await stopPlay(streamId);
1292
+ resetStreamIdRtcPlayerInfo(streamId);
1293
+ }
1294
+ // 判断是否创建或者获取播放通道成功,不成功就删掉重试
1295
+ // if (playChannel == undefined) {
1296
+ // delete streamIdToPreviewId[streamId];
1297
+ // defaultApi.writeLog('error', 'TRTC:: uiChnIndex is not exist');
1298
+ // // delete streamIdToBlobUrl[streamId];
1299
+ // return initPullFlow(streamId, domId, mute, pInfo, notAutoPlay, audioOnly);
1300
+ // }
1301
+ previewIdToStreamId[playChannel] = streamId;
1302
+ //是否拉取声音
1303
+ // await muteRemoteAudio(streamId, !!mute);
1304
+ // //是否拉取画面
1305
+ // await muteRemoteVideo(streamId, !!audioOnly);
1306
+ if (!notAutoPlay) {
1307
+ await startPlay(streamId);
1308
+ }
1309
+ await muteRemoteAudio(streamId, !!mute);
1310
+ //是否拉取画面
1311
+ await muteRemoteVideo(streamId, !!audioOnly);
1312
+ const videoSrc = await startLocalOrRemotePreview(false, streamId, domId);
1313
+ return {
1314
+ videoSrc,
1315
+ playerId: playChannel
1316
+ };
1317
+ };
1318
+
1319
+ /**
1320
+ * @function 停止拉流
1321
+ * @param streamId: String 流id
1322
+ * @return Promise | void
1323
+ */
1324
+ const stopPlay = (streamId, recovery) => {
1325
+ const id = streamIdToPreviewId[streamId];
1326
+ delete streamIdRtcPlayerInfo[streamId];
1327
+ if (recovery && id && !uiChnIndexs.includes(id)) {
1328
+ uiChnIndexs.push(id);
1329
+ defaultApi.writeLog(`avsdk TRTC::stopPlayStream streamId: ${streamId} id: ${id} uiChnIndexs: ${uiChnIndexs}`);
1330
+ delete streamIdToPreviewId[streamId];
1331
+ }
1332
+ return callMethod('StopPlay', {streamId});
1333
+ }
1334
+
1335
+ /**
1336
+ * @function 停止拉所有的流
1337
+ * @return Promise | void
1338
+ */
1339
+ const stopPlayAll = () => {
1340
+ return callMethod('StopPlayAll', {});
1341
+ }
1342
+
1343
+ /**
1344
+ * @function 不拉/拉 音频 true是静音/false是拉声音
1345
+ * @param streamId :string
1346
+ * @param mute : bool
1347
+ * @return Promise | void
1348
+ */
1349
+ const muteRemoteAudio = (streamId, mute) => {
1350
+ console.log('audio_type',mute)
1351
+ streamIdRtcPlayerInfo1[streamId].audio_type = !mute;
1352
+ console.log('audio_type1', streamIdRtcPlayerInfo1)
1353
+ return callMethod('MuteRemoteAudio', {streamId, mute});
1354
+ }
1355
+
1356
+ const pullAudioFlow = async (playerId, operation, streamid) => {
1357
+ defaultApi.writeLog(`pullAudioFlow -- playerId ${playerId} operation ${operation} streamId ${streamid}`);
1358
+ if(streamid){
1359
+ if(streamIdToPreviewId[streamid] == undefined) {
1360
+ NOTICE.pullAudioFlowError({streamid})
1361
+ return
1362
+ }else{
1363
+ playerId = streamIdToPreviewId[streamid]
1364
+ console.log('pullAudioFlow-playerId',playerId,streamIdToPreviewId)
1365
+ }
1366
+ }
1367
+ let streamId = previewIdToStreamId[playerId];
1368
+ try {
1369
+ streamIdRtcPlayerInfo1[streamId].audio_type = !operation;
1370
+ console.log('pullAudioFlow ::previewIdToStreamId',streamId,previewIdToStreamId)
1371
+ console.log('pullAudioFlow ::streamIdRtcPlayerInfo1',streamId,streamIdRtcPlayerInfo1);
1372
+ } catch (e) {
1373
+ console.log('pullAudioFlow ::streamIdRtcPlayerInfo1--error',e);
1374
+ }
1375
+ try {
1376
+ dataReport.setPullVoice({
1377
+ code:+!operation,
1378
+ pull_uid: util.getUidByStreamId(streamId),
1379
+ pull_streamid:streamId,
1380
+ playerId
1381
+ // operator:'client'
1382
+ });
1383
+ } catch (e) {};
1384
+ return await muteRemoteAudio(streamId, operation);
1385
+ }
1386
+
1387
+ /**
1388
+ * @function 不拉/拉 视频
1389
+ * @param streamId :string
1390
+ * @param mute: bool
1391
+ * @return Promise | void
1392
+ */
1393
+ const muteRemoteVideo = (streamId, mute) => {
1394
+ try {
1395
+ streamIdRtcPlayerInfo1[streamId].video_type = !mute;
1396
+ console.log('activateVideoPlayStream ::streamIdRtcPlayerInfo1',streamIdRtcPlayerInfo1);
1397
+ } catch (e) {
1398
+ console.log('activateVideoPlayStream ::streamIdRtcPlayerInfo1 --error',e);
1399
+ }
1400
+ return callMethod('MuteRemoteVideo', {streamId, mute});
1401
+ }
1402
+
1403
+ /**
1404
+ *
1405
+ * @function 全员开麦突出声音流
1406
+ * @param bFocus :bool
1407
+ * @param streamIdarr : array
1408
+ * @return Promise | void
1409
+ */
1410
+ const setFocusStreams = (bFocus, streamIdarr) => {
1411
+ return callMethod('SetFocusStreams', {bFocus, streamId1, streamId1, streamId1, streamId1});
1412
+ }
1413
+ //加载采集插件的输入数据
1414
+ export const loadCollectionInputEntry = () => {
1415
+ return callMethod('LoadCollectionInputEntry', {});
1416
+ };
1417
+
1418
+ //加载截屏插件
1419
+ const LoadCollectionOutputEntry2 = () => {
1420
+ defaultApi.writeLog('info', 'avsdk TRTC::LoadCollectionOutputEntry2');
1421
+ return callMethod('LoadCollectionOutputEntry2', { });
1422
+ };
1423
+
1424
+ //开启同屏
1425
+ const startMultiScreen = async () => {
1426
+ defaultApi.writeLog('info', 'avsdk TRTC::startMultiScreen');
1427
+ // await setVideoCaptureFactory(true,1);
1428
+ await LoadCollectionOutputEntry2();
1429
+ };
1430
+
1431
+ //高光时刻 enable (bool)/strDomain (string)/strKey (string)
1432
+ const enableHighlight = async (enable,strDomain = '',strKey = '') => {
1433
+ defaultApi.writeLog('info', 'avsdk TRTC::enableHighlight');
1434
+ return callMethod('EnableHighlight', {enable, strDomain, strKey});
1435
+ };
1436
+
1437
+ /**
1438
+ * @function 获取当前视频流所有的通道
1439
+ * @param streamId 流 id
1440
+ * @return channelIndex
1441
+ */
1442
+ export const getAllChannelIndex = () => {
1443
+ defaultApi.writeLog(`TRTC : getAllChannelIndex ${JSON.stringify(streamIdToPreviewId)}`);
1444
+ return streamIdToPreviewId;
1445
+ };
1446
+
1447
+ //开始连麦
1448
+ const teacherStartLinkMic = async (mode) => {
1449
+ if (mode === 0) {
1450
+ await startMicrophone();
1451
+ } else {
1452
+ //开始连麦后,需要打开麦克风,开启外部采集器后(每次都要开启)
1453
+ await startMicrophone();
1454
+ await setEnableExternVideoCapture(true)
1455
+ }
1456
+ };
1457
+ //结束连麦
1458
+ const teacherStopLinkMic = async (mode = 0) => {
1459
+ // if (mode === 0) {
1460
+ // // await muteVideoPublish(false,0);
1461
+ // } else {
1462
+ await stopMicrophone();
1463
+ // }
1464
+ };
1465
+
1466
+ /**
1467
+ * @function 离开教室
1468
+ * @return Promise | void
1469
+ */
1470
+ const leaveRoom = async () => {
1471
+ for (let key in streamIdToPreviewId) {
1472
+ await stopPlay(key, true);//huishou
1473
+ }
1474
+ await stopPush();
1475
+ removerListener();
1476
+ await destroyEngine();
1477
+ heartBeatDataReport('stop');
1478
+ isFirstHeartBeatReport = false;
1479
+ streamIdToPreviewId = {};
1480
+ previewIdToStreamId = {};
1481
+ streamIdRtcPlayerInfo = {};
1482
+ streamIdRtcPlayerInfo1 = {};
1483
+ // window.zegoHasPushFlow = 0;
1484
+ }
1485
+
1486
+ const heartBeatRealKeys = ['video_fps', 'video_bitrate', 'audio_fps', 'audio_bitrate'];
1487
+
1488
+ const _heartBeatDataReport = () => {
1489
+ console.log('上报吧2')
1490
+ // let cpuRate = 0;
1491
+ // let memRate = 0;
1492
+ // let rateCount = 0;
1493
+ // let appCpuRate = 0;
1494
+ // let appMemUsed = 0;
1495
+ // let rateTimer = setInterval(async () => {
1496
+ // rateCount++;
1497
+ // let {cpu_rate, mem_rate, gpus, app_cpu_rate, app_mem_used} = (await toolApi.getCurCpuMemInfo()).msg;
1498
+ // cpu_rate = cpu_rate < 0 ? 0 : cpu_rate;
1499
+ // cpuRate += parseFloat(cpu_rate);
1500
+ // memRate += parseFloat(mem_rate);
1501
+ // if (window.zbyAVSDK_init_params.zego.role === 'student') {
1502
+ // appCpuRate += parseFloat(app_cpu_rate);
1503
+ // appMemUsed += parseFloat(app_mem_used);
1504
+ // }
1505
+
1506
+ // if (rateCount >= 3) {
1507
+ // heartBeatRealKeys.forEach(realKey => {
1508
+ // if (heartBeatDataReportObj.hasOwnProperty(realKey) && heartBeatDataReportObj.count > 0) {
1509
+ // heartBeatDataReportObj[realKey] = util.toFixed(heartBeatDataReportObj[realKey]/heartBeatDataReportObj.count);
1510
+ // }
1511
+ // });
1512
+ const pullInfo = [];
1513
+ console.log('拉流的类型1',streamIdRtcPlayerInfo)
1514
+ Object.keys(streamIdRtcPlayerInfo).forEach(streamid => {
1515
+ console.log('拉流的类型1.5')
1516
+ heartBeatRealKeys.forEach(realKey => {
1517
+ if (!streamIdRtcPlayerInfo[streamid].hasOwnProperty(realKey)) {
1518
+ streamIdRtcPlayerInfo[streamid][realKey] = [];
1519
+ }
1520
+ // if (streamIdRtcPlayerInfo[streamid].count > 0) {
1521
+ // streamIdRtcPlayerInfo[streamid][realKey] = util.toFixed(streamIdRtcPlayerInfo[streamid][realKey]/streamIdRtcPlayerInfo[streamid].count);
1522
+ // }
1523
+ });
1524
+ console.log('拉流的类型2',streamIdRtcPlayerInfo1[streamid])
1525
+ //获取拉流类型,后期可写为函数提出去
1526
+ if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1527
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'both';
1528
+ } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1529
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'video';
1530
+ } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
1531
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'audio';
1532
+ } else {
1533
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'none';
1534
+ }
1535
+ // console.log('hsghsghsg_type_type', streamIdRtcPlayerInfo1[streamid].stream_type);
1536
+
1537
+ pullInfo.push({
1538
+ streamid,
1539
+ // uid: util.getUidByStreamId(streamid),
1540
+ ...streamIdRtcPlayerInfo[streamid],
1541
+ pull_type: streamIdRtcPlayerInfo1[streamid].stream_type,
1542
+ volume: streamIdRtcPlayerInfo[streamid].volume.slice(0,streamIdRtcPlayerInfo[streamid].volume.length-1)
1543
+ });
1544
+ resetStreamIdRtcPlayerInfo(streamid);
1545
+ });
1546
+ if (isFirstHeartBeatReport) {
1547
+ try {
1548
+ //静音推流时过滤掉音频帧率和码率,上报为0;
1549
+ // if (!isNoticeMicVolumeZego) {
1550
+ // heartBeatDataReportObj['audio_fps'] = [];
1551
+ // heartBeatDataReportObj['audio_bitrate'] = [];
1552
+ // }
1553
+
1554
+ //获取推流类型,后期可写为函数提出去
1555
+ if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1556
+ zbysdk.deviceStatus.stream_type = 'both';
1557
+ } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1558
+ zbysdk.deviceStatus.stream_type = 'audio';
1559
+ } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
1560
+ zbysdk.deviceStatus.stream_type = 'video';
1561
+ } else {
1562
+ zbysdk.deviceStatus.stream_type = 'none';
1563
+ };
1564
+ // console.log('push_type222',zbysdk.deviceStatus,zbysdk.deviceStatus.stream_type);
1565
+ defaultApi.writeLog(`push_type_trtc,camera: ${zbysdk.deviceStatus.camera},microphone: ${zbysdk.deviceStatus.microphone},type: ${zbysdk.deviceStatus.stream_type},a_fps: ${dataTrtcCapture.fps}, a_bit: ${dataTrtcCapture.audioBitrate}, v_fps: ${dataTrtcCapture.fps}, v_bit: ${dataTrtcCapture.videoBitrate}`);
1566
+ // if (window.zbyAVSDK_init_params.zego.role === 'teacher') {
1567
+ dataReport.heartbeat({
1568
+ ...{...heartBeatDataReportObj, push_type: zbysdk.deviceStatus.stream_type, volume: heartBeatDataReportObj.volume.slice(0,heartBeatDataReportObj.volume.length-1)},
1569
+ // pull_info: JSON.stringify(pullInfo),
1570
+ pull_info: pullInfo,
1571
+ // cpu_rate: util.toFixed(cpuRate/rateCount),
1572
+ // mem_rate: util.toFixed(memRate/rateCount),
1573
+ });
1574
+ // } else {
1575
+ // dataReport.heartbeat({
1576
+ // ...heartBeatDataReportObj,
1577
+ // pull_info: JSON.stringify(pullInfo),
1578
+ // cpu_rate: util.toFixed(cpuRate/rateCount),
1579
+ // mem_rate: util.toFixed(memRate/rateCount),
1580
+ // app_cpu: util.toFixed(appCpuRate/rateCount),
1581
+ // app_mem: util.toFixed(appMemUsed/rateCount),
1582
+ // video_mem: gpus
1583
+ // });
1584
+ // }
1585
+ } catch (e) {
1586
+ console.log(e);
1587
+ }
1588
+ }
1589
+ resetHeartBeatDataReportObj();
1590
+ // cpuRate = 0;
1591
+ // memRate = 0;
1592
+ // appCpuRate = 0;
1593
+ // appMemUsed = 0;
1594
+
1595
+ // clearInterval(rateTimer);
1596
+ // }
1597
+ // }, 10 * 1000);
1598
+ };
1599
+
1600
+ const heartBeatDataReport = (type) => {
1601
+ try {
1602
+ if (type === 'start' && !heartBeatDataReportTimer) {
1603
+ _heartBeatDataReport();
1604
+ heartBeatDataReportTimer = setInterval(() => {
1605
+ _heartBeatDataReport();
1606
+ }, 30 * 1000);
1607
+ }
1608
+ if (type === 'stop') {
1609
+ clearInterval(heartBeatDataReportTimer);
1610
+ heartBeatDataReportTimer = null;
1611
+ }
1612
+ } catch (error) {
1613
+ console.log(error);
1614
+ }
1615
+ };
1616
+
1617
+ const heartBeatDataReportCalc = (name, _data) => {
1618
+ console.log('hsgmzk111',name,_data);
1619
+ let _d = JSON.parse(_data.json);
1620
+ console.log('hsgmzk222',name,_d);
1621
+ // 拉流
1622
+ const pullKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
1623
+ if (name === 'onRemoteStatistics') {
1624
+ console.log('hsgmzk222',streamIdRtcPlayerInfo,_d.streamId)
1625
+ if (streamIdRtcPlayerInfo && streamIdRtcPlayerInfo.hasOwnProperty(_d.streamId)) {
1626
+ console.log('hsgmzk22211');
1627
+ let streamid = _d.streamId;
1628
+ let isReport = true;
1629
+ // streamIdRtcPlayerInfo[streamid].count++;
1630
+ heartBeatRealKeys.forEach((realKey, index) => {
1631
+ if (_d.hasOwnProperty(pullKeys[index])) {
1632
+ if (streamIdRtcPlayerInfo[streamid][realKey] === undefined) {
1633
+ streamIdRtcPlayerInfo[streamid][realKey] = [];
1634
+ isReport = false;
1635
+ }
1636
+ // streamIdRtcPlayerInfo[streamid][realKey].push(parseFloat(parseInt(item[pullKeys[index]])));
1637
+ }
1638
+ });
1639
+ console.log('hsgmzk333',_d)
1640
+ if (isReport) {
1641
+ let audio_fps_trtc_pull = 0;
1642
+ let audio_bitrate_trtc_pull = 0;
1643
+ let video_fps_trtc_pull = 0;
1644
+ let video_bitrate_trtc_pull = 0;
1645
+ console.log('hsgmzk444',_d)
1646
+ if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1647
+ console.log('hsgmzk555',_d)
1648
+ audio_fps_trtc_pull = parseFloat(parseInt(_d.afps));
1649
+ audio_bitrate_trtc_pull = parseFloat(parseInt(_d.audioBitrate));
1650
+ video_fps_trtc_pull = parseFloat(parseInt(_d.fps));
1651
+ video_bitrate_trtc_pull = parseFloat(parseInt(_d.videoBitrate));
1652
+ } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
1653
+ video_fps_trtc_pull = parseFloat(parseInt(_d.fps));
1654
+ video_bitrate_trtc_pull = parseFloat(parseInt(_d.videoBitrate));
1655
+ } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
1656
+ audio_fps_trtc_pull = parseFloat(parseInt(_d.afps));
1657
+ audio_bitrate_trtc_pull = parseFloat(parseInt(_d.audioBitrate));
1658
+ }
1659
+ streamIdRtcPlayerInfo[streamid].audio_fps.push(audio_fps_trtc_pull);
1660
+ streamIdRtcPlayerInfo[streamid].audio_bitrate.push(audio_bitrate_trtc_pull);
1661
+ streamIdRtcPlayerInfo[streamid].video_fps.push(video_fps_trtc_pull);
1662
+ streamIdRtcPlayerInfo[streamid].video_bitrate.push(video_bitrate_trtc_pull);
1663
+ streamIdRtcPlayerInfo[streamid].pull_loss.push(_d.packetLoss);
1664
+ streamIdRtcPlayerInfo[streamid].pull_delay.push(_d.rtt);
1665
+
1666
+ streamIdRtcPlayerInfo[streamid].ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
1667
+ streamIdRtcPlayerInfo[streamid].video_ifg += _d.videoBlockRate;
1668
+ streamIdRtcPlayerInfo[streamid].audio_ifg += _d.audioBlockRate;
1669
+ // console.log('hsg_tpull_keys',JSON.parse(JSON.stringify(streamIdRtcPlayerInfo[streamid])),_d.avTimestampDiff);
1670
+ }
1671
+ }
1672
+ }
1673
+ // 推流
1674
+ const pushKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
1675
+ if (name === 'onLocalStatistics') {
1676
+ // console.log('hsgmzk',_d);
1677
+ // heartBeatDataReportObj.count++;
1678
+ // heartBeatRealKeys.forEach((realKey, index) => {
1679
+ // if (heartBeatDataReportObj.hasOwnProperty(realKey) && _d.hasOwnProperty(pushKeys[index])) {
1680
+ // heartBeatDataReportObj[realKey].push(parseFloat(parseInt(_d[pushKeys[index]])));
1681
+ // }
1682
+ // });
1683
+ let audio_fps_trtc_push = 0;
1684
+ let audio_bitrate_trtc_push = 0;
1685
+ let video_fps_trtc_push = 0;
1686
+ let video_bitrate_trtc_push = 0;
1687
+ dataTrtcCapture =_d;
1688
+
1689
+ if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1690
+ audio_fps_trtc_push = parseFloat(parseInt(_d.afps));
1691
+ audio_bitrate_trtc_push = parseFloat(parseInt(_d.audioBitrate));
1692
+ video_fps_trtc_push = parseFloat(parseInt(_d.fps));
1693
+ video_bitrate_trtc_push= parseFloat(parseInt(_d.videoBitrate));
1694
+ } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
1695
+ audio_fps_trtc_push = parseFloat(parseInt(_d.afps));
1696
+ audio_bitrate_trtc_push = parseFloat(parseInt(_d.audioBitrate));
1697
+ } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
1698
+ video_fps_trtc_push = parseFloat(parseInt(_d.fps));
1699
+ video_bitrate_trtc_push = parseFloat(parseInt(_d.videoBitrate));
1700
+ }
1701
+ // console.log('hsg6666666',audio_fps_trtc_push,audio_bitrate_trtc_push,video_fps_trtc_push,video_bitrate_trtc_push);
1702
+ heartBeatDataReportObj.audio_fps.push(audio_fps_trtc_push);
1703
+ heartBeatDataReportObj.audio_bitrate.push(audio_bitrate_trtc_push);
1704
+ heartBeatDataReportObj.video_fps.push(video_fps_trtc_push);
1705
+ heartBeatDataReportObj.video_bitrate.push(video_bitrate_trtc_push);
1706
+ heartBeatDataReportObj.push_loss.push(_d.packetLoss);
1707
+ heartBeatDataReportObj.push_delay.push(_d.rtt);
1708
+
1709
+ heartBeatDataReportObj.ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
1710
+ // console.log('hsg_td5',JSON.parse(JSON.stringify(heartBeatDataReportObj)));
1711
+ }
1712
+ };
1713
+
1714
+ //推流字段
1715
+ const resetHeartBeatDataReportObj = () => {
1716
+ // console.log('hsg_td55556重置');
1717
+ heartBeatDataReportObj = {
1718
+ ctime: [],
1719
+ push_type: 'none',
1720
+ video_fps: [],
1721
+ video_bitrate: [],
1722
+ audio_fps: [],
1723
+ audio_bitrate: [],
1724
+ push_loss: [],
1725
+ push_delay: [],
1726
+ volume: ''
1727
+ };
1728
+ };
1729
+
1730
+ //记录拉流类型,用作数据上报
1731
+ const resetStreamIdRtcPlayerInfo1 = (streamId) => {
1732
+ streamIdRtcPlayerInfo1[streamId] = {
1733
+ audio_type: false,
1734
+ video_type: false,
1735
+ stream_type: 'none'
1736
+ };
1737
+ // console.log('hsgshgs_heartbeat',streamIdRtcPlayerInfo1);
1738
+ };
1739
+
1740
+ resetHeartBeatDataReportObj();
1741
+
1742
+ //拉
1743
+ const resetStreamIdRtcPlayerInfo = (streamId) => {
1744
+ streamIdRtcPlayerInfo[streamId] = {
1745
+ pull_uid: util.getUidByStreamId(streamId),
1746
+ streamid: streamId,
1747
+ ctime: [],
1748
+ pull_type: streamIdRtcPlayerInfo1[streamId].stream_type,
1749
+ volume: '',
1750
+ video_fps: [],
1751
+ video_bitrate: [],
1752
+ audio_fps: [],
1753
+ audio_bitrate: [],
1754
+ pull_loss: [],
1755
+ pull_delay: [],
1756
+ //音画不同步字段
1757
+ avtimestampdiff:[],
1758
+ // 累加
1759
+ audio_ifg: 0,
1760
+ video_ifg: 0
1761
+ };
1762
+ };
1763
+
1764
+ export default {
1765
+ init,
1766
+ setCameraCaptureResolution,
1767
+ setCameraEncodeResolution,
1768
+ setCameraEncodeFps,
1769
+ setCameraEncodeBiarate,
1770
+ getCameraResolution,
1771
+ getMicrophoneDeviceListTrtc,
1772
+ setMicrophoneDevice,
1773
+ openOrCloseMicrophone,
1774
+ getCurrentMicrophoneVolume,
1775
+ setCurrentMicrophoneVolume,
1776
+ setCurrentCameraDevice,
1777
+ getCameraDeviceListTrtc,
1778
+ setVideoDevice,
1779
+ openOrCloseCamera,
1780
+ getSpeakerDeviceListTrtc,
1781
+ setSpeakerDevice,
1782
+ getCurrentSpeakerVolume,
1783
+ setCurrentSpeakerVolume,
1784
+ setAudioSpeakerCapture,
1785
+ startLocalOrRemotePreview,
1786
+ startPush,
1787
+ stopPush,
1788
+ startPlay,
1789
+ stopPlay,
1790
+ initPullFlow,
1791
+ pullAudioFlow,
1792
+ muteRemoteVideo,
1793
+ muteRemoteAudio,
1794
+ stopPlayAll,
1795
+ sendSEIMsg,
1796
+ startMultiScreen,
1797
+ enableHighlight,
1798
+ getAllChannelIndex,
1799
+ leaveRoom,
1800
+ teacherStartLinkMic,
1801
+ teacherStopLinkMic
1802
1802
  }