zby-live-sdk 1.0.49-beta-talrtc0922 → 1.0.49-beta-talrtc1014

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/.babelrc +5 -5
  2. package/.editorconfig +13 -13
  3. package/.eslintrc.js +29 -29
  4. package/CHANGELOG.md +381 -370
  5. package/README.md +276 -276
  6. package/dist/zby-live-sdk.cjs.js +4 -3
  7. package/dist/zby-live-sdk.esm.js +4 -3
  8. package/dist/zby-live-sdk.umd.js +4 -3
  9. package/package.json +1 -1
  10. package/src/channel/getSendMsgParams.js +66 -66
  11. package/src/channel/index.js +138 -138
  12. package/src/channel/pomelo/index.js +184 -184
  13. package/src/channel/pomelo/latestQueue.js +151 -151
  14. package/src/channel/pomelo/polemo.js +749 -749
  15. package/src/channel/pomelo/util.js +54 -54
  16. package/src/channel/sdk-cb.js +73 -73
  17. package/src/channel/stream-msg.js +97 -97
  18. package/src/channel/zby/index.js +74 -74
  19. package/src/channel/zby/interactWithChannel.js +4 -4
  20. package/src/channel/zby/interactWithChannelControl.js +1568 -1568
  21. package/src/channel/zby/interactWithChannelEntry.js +318 -318
  22. package/src/config/config.js +153 -153
  23. package/src/default/base.js +70 -70
  24. package/src/default/extend.js +36 -36
  25. package/src/default/index.js +9 -9
  26. package/src/live/base.js +42 -42
  27. package/src/live/call-method.js +9 -9
  28. package/src/live/extend.js +53 -53
  29. package/src/live/index.js +9 -9
  30. package/src/network/api.js +50 -50
  31. package/src/network/commonFetch.js +66 -66
  32. package/src/network/dataReport.js +429 -429
  33. package/src/notice.js +394 -394
  34. package/src/tool/base.js +74 -74
  35. package/src/tool/call-method.js +9 -9
  36. package/src/tool/extend.js +42 -42
  37. package/src/tool/index.js +9 -9
  38. package/src/util/bridge.js +87 -87
  39. package/src/util/bridge1.js +46 -46
  40. package/src/util/dict.js +51 -51
  41. package/src/util/sessionStorage.js +29 -29
  42. package/src/util/sha256.js +482 -482
  43. package/src/util/util.js +308 -308
  44. package/src/zby-av-sdk/agora-sdk.js +711 -711
  45. package/src/zby-av-sdk/device.js +145 -145
  46. package/src/zby-av-sdk/rtc-sdk.js +2839 -2839
  47. package/src/zby-av-sdk/talrtc-sdk.js +2392 -2348
  48. package/src/zby-av-sdk/trtc-sdk.js +1801 -1801
  49. package/src/zby-av-sdk/zby-av-sdk.js +1891 -1891
  50. package/src/zby-av-sdk/zego-sdk.js +2987 -2987
  51. package/src/zby-live-sdk.js +1561 -1557
@@ -1,2348 +1,2392 @@
1
- import dataReport from '../network/dataReport.js';
2
- import defaultApi from '../default';
3
- import NOTICE from '../notice';
4
- import zbysdk from '../zby-live-sdk.js';
5
- import { deviceListReport } from './device.js';
6
- import util from '../util/util';
7
-
8
-
9
- // 扩展标识
10
- const extensionId = 'talrtc_ext';
11
- // 端提供的 API 入口
12
- const EM = window.EM;
13
- // 是否执行过 TALRTC 的 AddListener 的标志
14
- let hasAddListener = false;
15
- // 监听id
16
- let EMListenerId = 0;
17
- // rtc小班课=0;rtc大班课=1
18
- const classType = 0;
19
- // 0: 小组课 1: 小班课 2: 新小班课
20
- let classMode = 0;
21
- // 心跳
22
- let heartBeatDataReportObj = {};
23
- let isFirstHeartBeatReport = false;
24
- // 维护的一份拉流的 streamId 与本地预览通道的映射表
25
- let streamIdToPreviewId = {};
26
- let previewIdToStreamId = {};
27
- let heartBeatDataReportTimer = null;
28
- let streamIdRtcPlayerInfo = {};
29
- let streamIdRtcPlayerInfo1 = {};
30
- // 拉流的播放通道初始化从 0 开始依次递增至44(共45路流),-1 为推流的本地视频预览
31
- const uiChnIndexs = new Array(100).fill(0).map((a, b) => b).reverse();
32
- // 维护的一份当前正在使用的音频设备的 deviceId 的列表
33
- let usingAudioDeviceId = {
34
- speaker: '',
35
- microphone: ''
36
- };
37
- let localStreamId = '';
38
- let screenStreamId = '';
39
- let isScreenSharing = false;
40
- let isSpeakerCapturing = false;
41
- let sdkVersion = null;
42
- const streamMuteStatus = {};
43
- const localStreamMuteStatus = {
44
- video: false,
45
- audio: false,
46
- };
47
- //记录底层推流回调的值
48
- let dataTalrtcCapture = {};
49
- let qualityLocalArr = [];
50
- let qualityRemoteArr = [];
51
- let localRoomId = null;
52
- let userId = null;
53
- let isNoticeMicVolume = false;
54
- let enableLocalCapture = true; // 本地摄像头采集,默认允许
55
- let teacherId = null;
56
- // /**
57
- // * 过滤直播云sdk中deviceId前缀
58
- // * @param {string} deviceId
59
- // * @returns {string}
60
- // */
61
- // const formatDeviceId = (deviceId) => {
62
- // return deviceId.replace(/^@device:\w+:/, '');
63
- // };
64
- const getStreamMuteStatus = (streamId, type) => {
65
- const streamStatus = streamMuteStatus[streamId] || {};
66
- return streamStatus[type] || false;
67
- };
68
- const setStreamMuteStatus = (streamId, type, status) => {
69
- const streamStatus = streamMuteStatus[streamId];
70
- if (streamStatus) {
71
- streamStatus[type] = status;
72
- return;
73
- }
74
- switch (type) {
75
- case 'video':
76
- streamMuteStatus[streamId] = {
77
- video: status,
78
- audio: false,
79
- };
80
- break;
81
- case 'audio':
82
- streamMuteStatus[streamId] = {
83
- video: false,
84
- audio: status,
85
- };
86
- break;
87
- default:break;
88
- }
89
- };
90
-
91
- const callMethod = (name, args) => {
92
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
93
- if (EM) {
94
- return new Promise((resolve, reject) => {
95
- // let noneCamera = (name === 'SetCameraDevice' && !args.pszDeviceID);
96
- // let noneMicrophone = (name === 'SetAudioDevice' && args.deviceType === 0 && !args.pszDeviceID);
97
- // let noneSpeaker = (name === 'SetAudioDevice' && args.deviceType === 1 && !args.pszDeviceID);
98
- // if (noneCamera || noneMicrophone || noneSpeaker) {
99
- // return resolve();
100
- // }
101
- EM.CallMethod(
102
- extensionId,
103
- name,
104
- JSON.stringify({ ...args, classType }),
105
- (code, msg) => {
106
- defaultApi.writeLog(`${name} Code: ${code}\nMessage: ${msg}\nParams: ${JSON.stringify({ ...args, classType })}`);
107
- resolve({
108
- code,
109
- msg
110
- });
111
- }
112
- );
113
- });
114
- }
115
- };
116
-
117
- const loadTalrtc = (extensionVersion) => {
118
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
119
- if (EM) {
120
- return new Promise((resolve, reject) => {
121
- removerListener();
122
- console.log('-> load tal rtc sdk', extensionId, extensionVersion);
123
- EM.Load(
124
- extensionId,
125
- extensionVersion,
126
- false,
127
- (code, msg) => {
128
- defaultApi.writeLog(`loadTalrtc Code: ${code}\nMessage: ${msg}`);
129
- addListener();
130
- resolve();
131
- }
132
- );
133
- });
134
- }
135
- };
136
-
137
- //卸载监听
138
- const removerListener = () => {
139
- hasAddListener = false;
140
- defaultApi.writeLog(`TALRTC::action--removerListener EMListenerId:${EMListenerId}`);
141
- EM.RemoverListener(extensionId, EMListenerId, (ec, content) => {
142
- });
143
- EMListenerId = 0;
144
- };
145
-
146
- //加载监听
147
-
148
- /**
149
- * @function 添加扩展监听机制
150
- * @param userId:Number 用户 id,必选
151
- * @param userName:String 用户名,必选
152
- * @param roomId:String 频道(房间) id,必选
153
- * @param nNetType:Number 网络类型,可选,默认为 1
154
- * @return void
155
- */
156
- const addListener = () => {
157
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
158
- if (EM && !hasAddListener) {
159
- hasAddListener = true;
160
- EM.AddListener(extensionId, (event, data) => {
161
- if (zbysdk.openListenerLog) {
162
- console.log(`avsdk TALRTC::Listener:: start event: ${event}, data: ${JSON.stringify(data)}`);
163
- }
164
- if (data && data.indexOf(extensionId) > -1) {
165
- try {
166
- EMListenerId = JSON.parse(data)[extensionId];
167
- defaultApi.writeLog(`TALRTC::addListener-- EMListenerId: ${EMListenerId}`);
168
- } catch (error) {
169
- }
170
- }
171
- if (!event || !data) {
172
- return;
173
- }
174
- let _data = JSON.parse(data);
175
- // defaultApi.writeLog(`TALRTC: event:: ${event} : ${data}`);
176
- switch (event) {
177
- // 推流器出现错误
178
- case 'onLocalError':
179
- defaultApi.writeLog(`TALRTC::addListener-- onLocalError: ${data}`);
180
- NOTICE.pushStreamError({
181
- errorStreamType:_data.streamType,
182
- code: _data.code,
183
- errorMsg: _data
184
- });
185
- dataReport.pushStreamError({
186
- errorStreamType:_data.streamType,
187
- code: _data.code
188
- });
189
- // defaultApi.writeLog(`TALRTC: event:: onLocalError: ${_data}`);
190
- break;
191
- // 直播推流器警告通知
192
- case 'onLocalWarning':
193
- defaultApi.writeLog(`TALRTC::addListener-- onLocalWarning: ${data}`);
194
- NOTICE.pushStreamWarning({
195
- warnStreamType:_data.streamType,
196
- code: _data.code
197
- });
198
- dataReport.pushStreamWarning({
199
- warnStreamType:_data.streamType,
200
- code: _data.code
201
- });
202
- break;
203
- // 首帧音频推送完成的回调通知
204
- case 'onSendLocalFirstAudioFrame':
205
- defaultApi.writeLog(`TALRTC: event:: onSendLocalFirstAudioFrame: ${data}`);
206
- break;
207
- // 采集首帧事件
208
- case 'OnCaptureVideoFirstFrame':
209
- defaultApi.writeLog(`TALRTC: event:: OnCaptureVideoFirstFrame: ${data}`);
210
- NOTICE.pushFlowSuccess({code:0,publish_streamid: _data.pszStreamID});
211
- dataReport.publishResult({
212
- code: '0',
213
- });
214
- break;
215
- // 本地音量
216
- case 'onLocalMicrophoneVolumeUpdate':
217
- // defaultApi.writeLog(`TALRTC: event:: onLocalMicrophoneVolumeUpdate: ${data}`);
218
- if (isNoticeMicVolume) {
219
- NOTICE.captureMicVolumeChanged({
220
- volume: Math.round(_data.volume)
221
- });
222
- }
223
- heartBeatDataReportObj.volume = heartBeatDataReportObj.volume + Math.round(_data.volume) + ',';
224
- break;
225
- // 推流器连接状态回调通知。推流器连接状态 1与服务器断开连接/2 正在连接服务器/3 连接服务器成功/4 重连服务器中 5
226
- case 'onLocalConnectStatusUpdate':
227
- defaultApi.writeLog(`TALRTC: event:: onLocalConnectStatusUpdate: ${data}`);
228
- dataReport.localConnectStatus({
229
- connectStatuStreamType:_data.streamType,
230
- state: _data.state
231
- });
232
- if (_data.state === 2 || _data.state === 1) {
233
- NOTICE.networkError();
234
- } else if (_data.state === 3) {
235
- NOTICE.networkRecovery();
236
- }
237
- break;
238
- // 网络质量的实时统计回调
239
- case 'onLocalNetworkQuality':
240
- // defaultApi.writeLog(`TALRTC: event:: onLocalNetworkQuality: ${data}`);
241
- if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
242
- NOTICE.localNetworkQuality({code: _data.quality});
243
- dataReport.localNetworkQuality({
244
- code: _data.quality,
245
- });
246
- }
247
- qualityLocalArr.push(_data.quality);
248
- break;
249
-
250
- // 远端流错误通知,拉流出现错误时,会回调该通知
251
- case 'onRemoteError':
252
- defaultApi.writeLog(`TALRTC: event:: onRemoteError: ${data}`);
253
- NOTICE.pullStreamError({
254
- strErrorStreamId: _data.streamId,
255
- code:_data.code
256
- });
257
- break;
258
- // 远端流警告通知,拉流出现警告时,会回调该通知
259
- case 'onRemoteWarning':
260
- defaultApi.writeLog(`TALRTC: event:: onRemoteWarning: ${data}`);
261
- NOTICE.pullStreamWarning({
262
- strWarnStreamId: _data.streamId,
263
- code:_data.code
264
- });
265
- break;
266
- // 卡顿事件,用于事件上报
267
- case 'stuckevent':
268
- defaultApi.writeLog(`TALRTC: event:: stuckevent: ${data}`);
269
- break;
270
- // 远端音频首帧
271
- case 'onRecvRemoteAudioFirstFrame':
272
- try{
273
- NOTICE.firstAudioSize({
274
- streamId: _data.streamId,
275
- userId: util.getUidByStreamId(_data.streamId)
276
- });
277
- dataReport.firstAudioSize({
278
- pull_streamid: _data.streamId,
279
- pull_uid: util.getUidByStreamIdDr(_data.streamId),
280
- code:'0'
281
- });
282
- } catch (e) { };
283
- defaultApi.writeLog(`TALRTC: event:: OnRecvRemoteAudioFirstFrame: ${data}`);
284
- break;
285
- // 远端流音量大小
286
- case 'onRemoteAudioVolume':
287
- // defaultApi.writeLog(`TALRTC: event:: onRemoteAudioVolume: ${data}}`);
288
- let cbData = {
289
- streamid: _data.streamId,
290
- streamId: _data.streamId,
291
- userId: util.getUidByStreamId(_data.streamId),
292
- volume: _data.volume
293
- };
294
- NOTICE.playerVolumeChanged(cbData);
295
- NOTICE.volumeChange(cbData);
296
- if(streamIdRtcPlayerInfo[_data.streamId]) {
297
- streamIdRtcPlayerInfo[_data.streamId].volume = streamIdRtcPlayerInfo[_data.streamId].volume + Math.round(_data.volume)+ ',';
298
- }
299
- break;
300
- // 远端视频首帧
301
- case 'onRenderRemoteVideoFirstFrame':
302
- try{
303
- // Test Code: 临时增加
304
- setTimeout(() => {
305
- NOTICE.pullFlowResult({code:0,pull_streamid: _data.streamId});
306
- NOTICE.firstVideoSize({
307
- streamId: _data.streamId,
308
- userId: util.getUidByStreamId(_data.streamId)
309
- });
310
- dataReport.firstVideoSize({
311
- pull_streamid: _data.streamId,
312
- pull_uid: util.getUidByStreamIdDr(_data.streamId),
313
- code:'0'
314
- });
315
- }, 100);
316
- } catch (e) { };
317
- defaultApi.writeLog(`TALRTC: event:: OnRenderRemoteVideoFirstFrame: ${data}`);
318
- break;
319
- // SEI消息
320
- case 'onRecvSEIMsg':
321
- defaultApi.writeLog(`TALRTC: event:: onRecvSEIMsg: ${data}`);
322
- break;
323
- // 网络质量的实时统计回调
324
- case 'onRemoteNetworkQuality':
325
- // defaultApi.writeLog(`TALRTC: event:: onRemoteNetworkQuality: ${data}`);
326
- if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
327
- NOTICE.remoteNetworkQuality({code: _data.quality});
328
- dataReport.remoteNetworkQuality({
329
- code: _data.quality,
330
- });
331
- }
332
- qualityRemoteArr.push(_data.quality);
333
- break;
334
-
335
- // 转推 CDN 上发布音视频流的事件回调
336
- case 'onRtmpStreamingStateChanged':
337
- defaultApi.writeLog(`TALRTC: event:: onRtmpStreamingStateChanged: ${data}`);
338
- break;
339
- // 设备异常
340
- case 'onDeviceError':
341
- defaultApi.writeLog(`TALRTC: event:: OnDeviceError: ${data}`);
342
- break;
343
- // 拉流的结果通知
344
- case 'onPlayStateUpdate':
345
- defaultApi.writeLog(`TALRTC: event:: onPlayStateUpdate: ${data}`);
346
- break;
347
- // 推流的结果通知
348
- case 'onPushStateUpdate':
349
- defaultApi.writeLog(`TALRTC: event:: onPushStateUpdate: ${data}`);
350
- if (_data.state === 3) {
351
- NOTICE.pushFlowSuccess({code:0,publish_streamid: _data.streamId});
352
- }
353
- break;
354
- // 推、拉流的统计数据。
355
- case 'onLocalStatistics':
356
- try {
357
- NOTICE.playLossAndDelay({
358
- userId,
359
- delay:_data.rtt,
360
- lostrate:_data.packetLoss
361
- });
362
- }catch(e){}
363
- heartBeatDataReportCalc('onLocalStatistics', _data);
364
- // defaultApi.writeLog(`TALRTC: event:: onStatisticsUpdate: ${_data}`);
365
- break;
366
- // 本地摄像头设备的通断状态发生变化
367
- case 'onRemoteStatistics':
368
- try {
369
- NOTICE.playLossAndDelay({
370
- userId: util.getUidByStreamId(_data.streamId),
371
- delay:_data.rtt,
372
- lostrate:_data.packetLoss
373
- });
374
- }catch(e){}
375
- heartBeatDataReportCalc('onRemoteStatistics', _data);
376
- // defaultApi.writeLog(`TALRTC: event:: onStatisticsUpdate: ${_data}`);
377
- break;
378
- // 本地摄像头设备的通断状态发生变化
379
- case 'onVideoDeviceChanged':
380
- defaultApi.writeLog(`TALRTC: event:: onVideoDeviceChanged: ${data}`);
381
- setCameraDevice({deviceId: _data.state == 1 ? _data.deviceId : '', operationType: 'hotPlug', deviceState: _data.state == 1 ? 0 : 1, changedDeviceId: _data.deviceId});
382
- break;
383
- /* 本地麦克风设备的通断状态发生变化
384
- * state:1:使用中 2:被禁用 4:一开始就没有?(我也不懂) 8:设备移除
385
- * deviceType:-1:未知 0:播放器 1:麦克风 2:视频显示设备 3:摄像头
386
- */
387
- case 'onAudioDeviceChanged':
388
- defaultApi.writeLog(`TALRTC: event:: onAudioDeviceChanged: ${data}`);
389
- if (_data.deviceType === 0) {
390
- setSpeakerDevice({deviceId: _data.state == 1 ? _data.deviceId : '', operationType: 'hotPlug', deviceState: _data.state == 1 ? 0 : 1, changedDeviceId: _data.deviceId});
391
- }
392
- if (_data.deviceType === 1) {
393
- setMicrophoneDevice({deviceId: _data.state == 1 ? _data.deviceId : '', operationType: 'hotPlug', deviceState: _data.state == 1 ? 0 : 1, changedDeviceId: _data.deviceId});
394
- }
395
- break;
396
-
397
- // mute 视频状态,true:远端开始推流,并拉流收到首帧;false:远端停止推流。
398
- case 'onRemoteVideoStatus':
399
- defaultApi.writeLog(`TALRTC: event:: onRemoteVideoStatus: ${data}`);
400
- break;
401
-
402
- // mute 音频状态,true:远端开始推流,并拉流收到首帧;false:远端停止推流。
403
- case 'onRemoteAudioStatus':
404
- defaultApi.writeLog(`TALRTC: event:: onRemoteAudioStatus: ${data}`);
405
- break;
406
- // 首帧视频推送完成的回调通知
407
- case 'onSendLocalFirstVideoFrame':
408
- defaultApi.writeLog(`TALRTC: event:: onSendLocalFirstVideoFrame: ${data}`);
409
- break;
410
- default:
411
- console.warn('warning: uncaught listener:', event);
412
- }
413
- if (zbysdk.openListenerLog) {
414
- console.log(`avsdk TALRTC::Listener:: event: ${event}, data: ${data}`);
415
- }
416
- });
417
- }
418
- };
419
-
420
-
421
- // 初始化相关
422
- const init = async (args) => {
423
- defaultApi.writeLog(`avsdk TALRTC::init ${JSON.stringify(args)}`);
424
- const usedDevices = window.zbyAVSDK_device_checker_init;
425
- const { devices } = args;
426
- localRoomId = args.roomId;
427
- userId = args.userId;
428
- localStreamId = args.streamId;
429
- teacherId = args.teacherId;
430
- const _devices = {
431
- camera: (devices && devices.camera) || (usedDevices && usedDevices.camera && usedDevices.camera.use) || '',
432
- microphone: (devices && devices.microphone) || (usedDevices && usedDevices.microphone && usedDevices.microphone.use) || '',
433
- speaker: (devices && devices.speaker) || (usedDevices && usedDevices.speaker && usedDevices.speaker.use) || ''
434
- };
435
- console.log('-> load talrtc sdk. teacherId: ', teacherId);
436
- await loadTalrtc(args.extensionVersion);
437
- console.log('-> load talrtc sdk finished ');
438
- const resp = await startEngine(args.appId, args.userId, args.usersign || '67890', 7, args.live_id);
439
- console.log('----> start engine ', resp);
440
- console.log('-> start talrtc sdk success');
441
-
442
- classMode = args.classMode >> 0;
443
- // 0: 小组课 1: 小班课 2: 新小班课
444
- if (classMode === 1 || classMode === 2){
445
- if(args.role === 'teacher') {
446
- //小班课 需要加载采集插件的输入数据,为了拼接talrtc头像
447
- await loadCollectionInputEntry();
448
-
449
- //屏幕流通道
450
- screenStreamId = `${window.zby_sdk_init_params.institutionId}_${localRoomId}_${userId}_${args.live_id}_1`;
451
- await enableExternalVideoSource(true, screenStreamId);
452
- await setCameraEncodeFps(args.screenSameFps, screenStreamId);
453
- await setCameraCaptureResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, screenStreamId);
454
- await setCameraEncodeResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, screenStreamId);
455
- await setCameraEncodeBitrate(args.screenSameBitrate, screenStreamId);
456
-
457
- //rtc同屏以及高光时刻,都开启音频外部采集,默认传1
458
- await setAudioAuxSource(1);
459
- }
460
-
461
- if(args.noiseSuppressMode == -2) {
462
- console.log('noiseSuppressMode 关闭噪声消除',args.noiseSuppressMode);
463
- await setEnableNoiseSuppress(false);
464
- } else if(args.noiseSuppressMode == -1) {
465
- console.log('noiseSuppressMode 由底层进行配置,js层不进行配置',args.noiseSuppressMode);
466
- } else {
467
- console.log('noiseSuppressMode 启动噪音消除,模式为:',args.noiseSuppressMode);
468
- await setEnableNoiseSuppress(true);
469
- await setNoiseSuppressMode(args.noiseSuppressMode >> 0);
470
- await enableTransientNoiseSuppress(true);
471
- };
472
- //aecMode为-1是关闭回音消除,云控下传
473
- if(args.aecMode == -2) {
474
- console.log('aecMode 关闭回音消除',args.aecMode);
475
- await setEnableAEC(false);
476
- } else if(args.aecMode == -1) {
477
- console.log('aecMode 由底层进行配置,js层不进行配置',args.aecMode);
478
- } else {
479
- console.log('aecMode 启动回音消除,模式为:',args.aecMode);
480
- await setEnableAEC(true);
481
- await setAECMode(args.aecMode >> 0);
482
- };
483
- };
484
-
485
- if((args.role === 'teacher' || args.role === 'assistant') && classMode === 0){
486
- //坐立模式开启外部采集器,站立关闭外部采集器,只有小组课坐立模式才会调用,通过这个把rtmp数据给到talrtc(头像),开启了外部采集,talrtc不会再本地采集数据
487
- // V2 不区分坐立站立,老师和辅导老师都启动外部视频源
488
- await enableExternalVideoSource(true, localStreamId);
489
- enableLocalCapture = false; // 禁用本地摄像头采集,防止与其他进程抢占摄像头
490
- } else {
491
- await setDefaultDevice(_devices, 'default');
492
- }
493
- // 主通道
494
- await setCameraEncodeFps(args.encodeCaptureFps, localStreamId);
495
- await setCameraCaptureResolution(args.previewResolutionWidth, args.previewResolutionHeight, localStreamId);
496
- await setCameraEncodeResolution(args.encodedResolutionWidth, args.encodedResolutionHeight, localStreamId);
497
- await setCameraEncodeBitrate(args.encodeCaptureBitrate, localStreamId);
498
-
499
- await setMicphoneVolumInterval(500);
500
- await setMediaSideFlags();
501
- window.current_sdk_type = 'talrtc';
502
- defaultApi.writeLog('avsdk TALRTC::init finished current_sdk_type: talrtc');
503
- };
504
-
505
-
506
- /**
507
- * @function 初始化引擎
508
- * @param appid:String TalRTC 秘钥,必选
509
- * @param userid:String
510
- * @param usersign:String
511
- * @return Promise | void
512
- */
513
- const startEngine = (appid, userid, usersign, type, liveID) => {
514
- defaultApi.writeLog(`avsdk TALRTC::StartEngine appid: ${appid} userid: ${userid} usersign: ${usersign}, liveID: ${liveID}`);
515
- return callMethod('StartEngine', {
516
- appid,
517
- userid,
518
- usersign,
519
- type,
520
- liveID: '' + liveID
521
- });
522
- };
523
-
524
- //开启消除回声
525
- export const setEnableAEC = (enable) => {
526
- return callMethod('EnableAEC', {enable});
527
- };
528
-
529
- //设置回声消除模式 mode :激进模式 =0/中等模式 =1/轻度模式 =2
530
- export const setAECMode = (mode) => {
531
- return callMethod('SetAECMode', {mode});
532
- };
533
-
534
- //开启噪音消除
535
- export const setEnableNoiseSuppress = (enable) => {
536
- return callMethod('EnableNoiseSuppress', {enable});
537
- };
538
-
539
- //设置噪音消除模式 mode :0:低 1:中 2高
540
- export const setNoiseSuppressMode = (mode) => {
541
- return callMethod('SetNoiseSuppressMode', {mode});
542
- };
543
-
544
- //新增瞬时降噪接口 瞬时降噪 可以处理掉 敲键盘、敲桌子、敲门等瞬态噪声。非瞬态噪声,此接口无法处理,比如划桌子等摩擦噪音。
545
- export const enableTransientNoiseSuppress = (enable) => {
546
- return callMethod('EnableTransientNoiseSuppress',{enable});
547
- };
548
-
549
- /**
550
- * 加载采集插件的输入数据 //调用后,talrtc 可以向采集插件传送摄像头数据,用于拼接
551
- * @return {Promise}
552
- */
553
- export const loadCollectionInputEntry = () => {
554
- return callMethod('LoadCollectionInputEntry', {});
555
- };
556
-
557
- /**
558
- * 1、设置需要拼接的学生头像流id,按数组顺序拼接
559
- * 2、加载截屏插件,获取截屏插件设置拉流头像回调函数
560
- * @param {Array} stream_ids 拉流id数组,最大有效数量7个
561
- * @returns {Promise}
562
- */
563
- export const loadCollectionInputEntry2 = (stream_ids) => {
564
- return callMethod('LoadCollectionInputEntry2', {stream_ids});
565
- };
566
- //设置是否发送sei
567
- const setMediaSideFlags = (start = true, onlyAudioPublish = false, mediaInfoType = 2, seiSendType = 1, idx = 0) => {
568
- return callMethod('SetMediaSideFlags',{start,onlyAudioPublish, mediaInfoType, seiSendType, idx});
569
- };
570
-
571
- /**
572
- * @function 销毁引擎
573
- * @return Promise | void
574
- */
575
- const destroyEngine = () => {
576
- return callMethod('DestroyEngine', {}).then((...args) => {
577
- window.zbyAVSDK_init_sdk_type = 'rtc';
578
- return args;
579
- });
580
- };
581
-
582
- // 设置采集分辨率
583
- const setCameraCaptureResolution = (width, height, streamId = localStreamId) => {
584
- return callMethod('SetCameraCaptureResolution', { width, height, streamId });
585
- };
586
-
587
- // 设置编码分辨率
588
- const setCameraEncodeResolution = (width, height, streamId) => {
589
- return callMethod('SetCameraEncodeResolution', { width, height, streamId });
590
- };
591
-
592
- // 设置编码帧率
593
- const setCameraEncodeFps = (fps, streamId) => {
594
- return callMethod('SetCameraEncodeFps', { fps, streamId });
595
- };
596
-
597
- // 设置编码码率
598
- const setCameraEncodeBitrate = (bitrate, streamId) => {
599
- return callMethod('SetCameraEncodeBitrate', { bitrate, streamId });
600
- };
601
-
602
- // 获取指定摄像头的支持的分辨率
603
- const getCameraResolution = async (deviceId) => {
604
- const resp = await callMethod('GetCameraResolution', { deviceId });
605
- const cameraArr = JSON.parse(resp.msg || '{}').SupportedResolution;
606
- return {
607
- code: resp.code,
608
- msg: JSON.stringify(cameraArr)
609
- };
610
- };
611
-
612
- // 获取 rtc_sdk.dll 版本号
613
- const getSDKVersion = async () => {
614
- if (sdkVersion) {
615
- return sdkVersion;
616
- }
617
- return callMethod('GetSDKVersion', {}).then((e) => {
618
- sdkVersion = e;
619
- });
620
- };
621
-
622
- /**
623
- * @function 设置默认的硬件设备,包括摄像头、麦克风以及扬声器
624
- * @return Promise
625
- */
626
- export const setDefaultDevice = async (devices, operationType) => {
627
- // 设置默认的摄像头
628
- if (devices && devices.camera) {
629
- if (zbyAVSDK_device_checker_init.camera.list.length == 0) {
630
- await getCameraDeviceList();
631
- }
632
- await setCameraDevice({ deviceId: devices.camera, operationType: `${operationType}_1` });
633
- } else {
634
- const cameraData = await getCameraDeviceList();
635
- if (cameraData.length) {
636
- let hasSetCamera = false;
637
- for (let item of cameraData) {
638
- if (item.isDefault) {
639
- await setCameraDevice({ deviceId: item.deviceId, operationType: `${operationType}_2` });
640
- hasSetCamera = true;
641
- break;
642
- }
643
- }
644
- if (!hasSetCamera) {
645
- await setCameraDevice({ deviceId: cameraData[0].deviceId, operationType: `${operationType}_3` });
646
- }
647
- }
648
- }
649
- // 设置默认的麦克风
650
- if (devices && devices.microphone) {
651
- if (zbyAVSDK_device_checker_init.microphone.list.length == 0) {
652
- await getMicrophoneDeviceList();
653
- }
654
- console.log('pp2');
655
- await setMicrophoneDevice({ deviceId: devices.microphone, operationType: `${operationType}_1` });
656
- } else {
657
- console.log('pp3');
658
- const microPhoneData = await getMicrophoneDeviceList();
659
- try {
660
- if (microPhoneData.length) {
661
- let hasSetMicrophone = false;
662
- for (let item of microPhoneData) {
663
- if (item.isDefault) {
664
- await setMicrophoneDevice({ deviceId: item.deviceId, operationType: `${operationType}_2` });
665
- hasSetMicrophone = true;
666
- break;
667
- }
668
- }
669
- if (!hasSetMicrophone) {
670
- await setMicrophoneDevice({ deviceId: microPhoneData[0].deviceId, operationType: `${operationType}_3` });
671
- }
672
- }
673
- } catch (e) {
674
- console.log(e);
675
- }
676
-
677
- }
678
- // 设置默认的扬声器
679
- if (devices && devices.speaker) {
680
- await setSpeakerDevice({ deviceId: devices.speaker, operationType: `${operationType}_1` });
681
- } else {
682
- const speakerData = await getSpeakerDeviceListInternal();
683
- if (speakerData.length) {
684
- let hasSetSpeaker = false;
685
- for (let item of speakerData) {
686
- if (item.isDefault) {
687
- await setSpeakerDevice({ deviceId: item.deviceId, operationType: `${operationType}_2` });
688
- hasSetSpeaker = true;
689
- break;
690
- }
691
- }
692
- if (!hasSetSpeaker) {
693
- await setSpeakerDevice({ deviceId: speakerData[0].deviceId, operationType: `${operationType}_3` });
694
- }
695
- }
696
- }
697
- };
698
-
699
- // 麦克风相关
700
- /**
701
- * @function 开启麦克风
702
- * @param intervalMs: Number
703
- * @return Promise | void
704
- */
705
- const startMicrophone = async (intervalMs = 500) => {
706
- isNoticeMicVolume = true;
707
- await callMethod('MuteAudioCollect', { mute: false });
708
- return callMethod('StartMicrophone', { intervalMs });
709
- };
710
-
711
- /**
712
- * @function 关闭麦克风
713
- * @return Promise | void
714
- */
715
- const stopMicrophone = () => {
716
- isNoticeMicVolume = false;
717
- // 当小班课并且屏幕共享时,不要关闭麦克风,只要静音就可以,要不然底层会停止推所有音频流
718
- if (isScreenSharing) {
719
- return callMethod('MuteAudioCollect', { mute: true });
720
- } else {
721
- return callMethod('StopMicrophone', {});
722
- }
723
- };
724
-
725
- const openOrCloseMicrophone = async (operation) => {
726
- if (operation) {
727
- await startMicrophone();
728
- } else {
729
- await stopMicrophone();
730
- }
731
- };
732
-
733
- /**
734
- * @function 设置麦克风音量大小回调周期
735
- * @param intervalMs: Number
736
- * @return Promise | void
737
- */
738
- const setMicphoneVolumInterval = (intervalMs) => {
739
- return callMethod('SetMicphoneVolumInterval', { intervalMs });
740
- };
741
-
742
- /**
743
- * @function 获取麦克风设备列表
744
- * @return Promise | void
745
- */
746
- const getMicrophoneDeviceListInternal = () => {
747
- return callMethod('GetMicrophoneDeviceList', {});
748
- };
749
-
750
- /**
751
- * @function 获取麦克风列表
752
- * @return Promise | void
753
- */
754
- const getMicrophoneDeviceList = async () => {
755
- const resp = await getMicrophoneDeviceListInternal();
756
- let microphoneListArr = JSON.parse(JSON.parse(resp.msg).DeviceList) || [];
757
- console.log('getMicrophoneDeviceList from native', microphoneListArr);
758
- let microphoneList = [];
759
- for (let i = 0, len = microphoneListArr.length; i < len; i++) {
760
- let nameForChecking = microphoneListArr[i].name.toLowerCase();
761
- let checkIfIsDefaultFromName = (nameForChecking.indexOf('built-in') >= 0) ||
762
- (nameForChecking.indexOf('builtin') >= 0) ||
763
- (nameForChecking.indexOf('default') >= 0) ||
764
- (nameForChecking.indexOf('默认') >= 0) ||
765
- (nameForChecking.indexOf('默認') >= 0);
766
- microphoneList.push({
767
- deviceId: microphoneListArr[i].id,
768
- deviceName: microphoneListArr[i].name,
769
- isDefault: microphoneListArr[i].default || checkIfIsDefaultFromName
770
- });
771
- deviceListReport.micList[microphoneListArr[i].szDeviceId] = microphoneListArr[i].szDeviceName;
772
- }
773
- if (!window.zbyAVSDK_device_checker_init) {
774
- window.zbyAVSDK_device_checker_init = {};
775
- }
776
- if (!window.zbyAVSDK_device_checker_init.microphone) {
777
- window.zbyAVSDK_device_checker_init.microphone = {};
778
- }
779
- window.zbyAVSDK_device_checker_init.microphone.hasTest = true;
780
- window.zbyAVSDK_device_checker_init.microphone.list = microphoneList;
781
- console.log('getMicrophoneDeviceList', microphoneList);
782
- return microphoneList;
783
- };
784
-
785
- /**
786
- * @function 设置指定音频(当前麦克风)设备
787
- * @param deviceId: String 音频设备 id,必选
788
- * @return Promise | void
789
- */
790
- const setCurrentMicrophoneDevice = (deviceId) => {
791
- window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
792
- return callMethod('SetCurrentMicrophoneDevice', {
793
- deviceId
794
- });
795
- };
796
-
797
- /**
798
- * @function 指定麦克风设备
799
- * @param deviceId: String 麦克风设备 id,必选
800
- * @param operationType: String 操作类型,可选
801
- * @return Promise | void
802
- */
803
- const setMicrophoneDevice = async (args) => {
804
- console.log('-> getMicrophoneDeviceList_microphoneListArr_tal_rtc_i');
805
- let { deviceId, operationType, deviceState, changedDeviceId } = args;
806
- let deviceName = '';
807
- let microPhoneData;
808
- if (!deviceId) {
809
- microPhoneData = await getMicrophoneDeviceList();
810
- if (microPhoneData.length) {
811
- let hasGetMicrophone = false;
812
- for (let item of microPhoneData) {
813
- if (item.isDefault) {
814
- deviceId = item.deviceId;
815
- deviceName = item.deviceName;
816
- hasGetMicrophone = true;
817
- break;
818
- }
819
- }
820
- if (!hasGetMicrophone) {
821
- deviceId = microPhoneData[0].deviceId;
822
- deviceName = microPhoneData[0].deviceName;
823
- }
824
- } else {
825
- deviceId = '';
826
- }
827
- if (deviceId === '') {
828
- NOTICE.noDevice({
829
- deviceType: 'microphone'
830
- });
831
- }
832
- }
833
- try {
834
- dataReport.setDevice({
835
- device_type: 2,
836
- device_id: deviceId,
837
- device_name: deviceListReport.micList[deviceId],
838
- operationType,
839
- fore_state: operationType == 'hotPlug' ? +deviceState + 1 : '-'
840
- });
841
- window.zbyAVSDK_device_checker_init.microphone.name = deviceListReport.micList[deviceId];
842
- } catch (e) {
843
- };
844
- await setCurrentMicrophoneDevice(deviceId);
845
- if (operationType == 'hotPlug' || operationType == 'deviceError') {
846
- if (!microPhoneData) {
847
- microPhoneData = await getMicrophoneDeviceList();
848
- deviceName = deviceListReport.micList[deviceId];
849
- }
850
- NOTICE[operationType]({
851
- deviceType: 'microphone',
852
- useDeviceId: deviceId,
853
- useDeviceName: deviceName,
854
- deviceList: microPhoneData,
855
- deviceState,
856
- changedDeviceId
857
- });
858
- }
859
- };
860
-
861
- /**
862
- * @function 获取当前麦克风的音量
863
- * @return Promise | void
864
- */
865
- const getCurrentMicrophoneVolume = () => {
866
- return callMethod('GetCurrentMicrophoneVolume', {}).then(ret => {
867
- let volume = 0;
868
- try {
869
- volume = Math.round(JSON.parse(ret.msg).microphoneVolume / 255 * 100);
870
- } catch (e) {
871
- console.error(`zby-live-sdk: getCurrentMicrophoneVolume ret: ${ret}. error: ${e}`);
872
- }
873
- return volume;
874
- });
875
- };
876
-
877
- /**
878
- * @function 设置当前麦克风的音量
879
- * @param volume: number 音量值
880
- * @return Promise | void
881
- */
882
- const setCurrentMicrophoneVolume = (volume) => {
883
- return callMethod('SetCurrentMicrophoneVolume', {
884
- volume: Math.round(volume / 100 * 255)
885
- });
886
- };
887
-
888
- /**
889
- * @function 获取当前麦克风ID
890
- * @return Promise | void
891
- */
892
- const getCurrentMicrophoneId = (volume) => {
893
- return callMethod('GetCurrentMicrophoneId', { volume });
894
- };
895
-
896
- // 摄像头相关
897
- /**
898
- * @function 打开摄像头
899
- * @param width: Number
900
- * @param height: Number
901
- * @return Promise | void
902
- */
903
- const startCamera = (width, height) => {
904
- return callMethod('StartCamera', { width, height, streamId: localStreamId });
905
- };
906
-
907
- /**
908
- * @function 关闭摄像头
909
- * @return Promise | void
910
- */
911
- const stopCamera = () => {
912
- return callMethod('StopCamera', { streamId: localStreamId });
913
- };
914
-
915
- const openOrCloseCamera = async (operation) => {
916
- console.log('openOrCloseCamera', operation);
917
- if (operation) {
918
- await startCamera();
919
- } else {
920
- await stopCamera();
921
- }
922
- };
923
-
924
- /**
925
- * @function 获取摄像头列表
926
- * @return Promise | void
927
- */
928
- const getCameraDeviceListInternal = () => {
929
- return callMethod('GetCameraDeviceList', {});
930
- };
931
-
932
- /**
933
- * @function 获取摄像头列表
934
- * @return Promise | void
935
- */
936
- export const getCameraDeviceList = async () => {
937
- const resp = await getCameraDeviceListInternal();
938
- let videoListArr = JSON.parse(JSON.parse(resp.msg).DeviceList) || [];
939
- console.log('getCameraDeviceList from native', videoListArr);
940
- let videoList = [];
941
- for (let i = 0, len = videoListArr.length; i < len; i++) {
942
- let nameForChecking = videoListArr[i].name.toLowerCase();
943
- let checkIfIsDefaultFromName = (nameForChecking.indexOf('built-in') >= 0) ||
944
- (nameForChecking.indexOf('builtin') >= 0) ||
945
- (nameForChecking.indexOf('default') >= 0) ||
946
- (nameForChecking.indexOf('默认') >= 0) ||
947
- (nameForChecking.indexOf('默認') >= 0);
948
- videoList.push({
949
- deviceId: videoListArr[i].id,
950
- deviceName: videoListArr[i].name,
951
- isDefault: videoListArr[i].default || checkIfIsDefaultFromName
952
- });
953
- deviceListReport.cameraList[videoListArr[i].szDeviceId] = videoListArr[i].szDeviceName;
954
- }
955
- if (!window.zbyAVSDK_device_checker_init) {
956
- window.zbyAVSDK_device_checker_init = {};
957
- }
958
- if (!window.zbyAVSDK_device_checker_init.camera) {
959
- window.zbyAVSDK_device_checker_init.camera = {};
960
- }
961
- window.zbyAVSDK_device_checker_init.camera.hasTest = true;
962
- window.zbyAVSDK_device_checker_init.camera.list = videoList;
963
- console.log('getCameraDeviceList', videoList);
964
- return videoList;
965
- };
966
-
967
- /**
968
- * @function 设置指定视频(当前摄像头)设备
969
- * @param deviceId: String 视频设备 id,必选
970
- * @return Promise | void
971
- */
972
- const setCurrentCameraDeviceInternal = (deviceId) => {
973
- window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
974
- return callMethod('SetCurrentCameraDevice', {
975
- streamId: localStreamId,
976
- deviceId
977
- });
978
- };
979
-
980
- /**
981
- * @function 指定使用的摄像头
982
- * @param deviceId: String 摄像头 id,必选
983
- * @param operationType: String 操作类型,可选
984
- * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
985
- * 不传即是普通的设置摄像头设备的行为
986
- * @return Promise | void
987
- */
988
- const setCameraDevice = async (args) => {
989
- defaultApi.writeLog(`setCameraDevice ${JSON.stringify(args)} enableLocalCapture: ${enableLocalCapture}`);
990
- if (!enableLocalCapture) {
991
- return;
992
- }
993
- let { deviceId, operationType, deviceState, code, changedDeviceId } = args;
994
- let deviceName = '';
995
- let cameraData;
996
- if (!deviceId) {
997
- cameraData = await getCameraDeviceList();
998
-
999
- if (cameraData.length) {
1000
- let hasGetCamare = false;
1001
- for (let item of cameraData) {
1002
- if (item.isDefault) {
1003
- deviceId = item.deviceId;
1004
- deviceName = item.deviceName;
1005
- hasGetCamare = true;
1006
- break;
1007
- }
1008
- }
1009
- if (!hasGetCamare) {
1010
- deviceId = cameraData[0].deviceId;
1011
- deviceName = cameraData[0].deviceName;
1012
- }
1013
- } else {
1014
- deviceId = '';
1015
- }
1016
-
1017
- if (deviceId === '') {
1018
- NOTICE.noDevice({
1019
- deviceType: 'camera'
1020
- });
1021
- }
1022
- }
1023
- // deviceId = formatDeviceId(deviceId);
1024
-
1025
- window.zbyAVSDK_device_checker_init.camera.use = deviceId;
1026
- window.zbyAVSDK_device_checker_init.camera.name = deviceListReport.cameraList[deviceId];
1027
- try {
1028
- dataReport.setDevice({
1029
- device_type: 1,
1030
- device_id: deviceId,
1031
- device_name: deviceListReport.cameraList[deviceId],
1032
- operationType,
1033
- fore_state: operationType == 'hotPlug' ? deviceState + 1 : '-'
1034
- });
1035
- } catch (e) {
1036
- };
1037
-
1038
- await setCurrentCameraDeviceInternal(deviceId);
1039
-
1040
- if (operationType == 'hotPlug' || operationType == 'deviceError') {
1041
- if (!cameraData) {
1042
- cameraData = await getCameraDeviceList();
1043
- deviceName = deviceListReport.cameraList[deviceId];
1044
- }
1045
- NOTICE[operationType]({
1046
- deviceType: 'camera',
1047
- useDeviceId: deviceId,
1048
- useDeviceName: deviceName,
1049
- deviceList: cameraData,
1050
- messge: code,
1051
- deviceState,
1052
- changedDeviceId
1053
- });
1054
- }
1055
- try {
1056
- NOTICE.useredCamera({
1057
- deviceId,
1058
- deviceName
1059
- });
1060
- } catch (e) {
1061
- };
1062
- };
1063
-
1064
- /**
1065
- * @function 获取当前摄像头 id
1066
- * @return Promise | void
1067
- */
1068
- const GetCurrentCameraId = () => {
1069
- return callMethod('GetCurrentCameraId', {});
1070
- };
1071
-
1072
- // 扬声器相关
1073
-
1074
- /**
1075
- * @function 获取扬声器列表
1076
- * @return Promise | void
1077
- */
1078
- const getAudioDeviceList = () => {
1079
- return callMethod('GetAudioDeviceList', {});
1080
- };
1081
-
1082
- /**
1083
- * @function 获取系统默认的音频设备
1084
- * @param deviceType: Number 音频设备类型,0 -> 麦克风,1 -> 扬声器,必选
1085
- * @return Promise | void
1086
- */
1087
- const getDefaultAudioDeviceId = (deviceType) => {
1088
- return callMethod('GetDefaultAudioDeviceId', {
1089
- deviceType,
1090
- // 以下两个参数仅仅是为了向下向后兼容,使用 hardcore 即可
1091
- deviceId: '',
1092
- deviceIdLength: 0
1093
- });
1094
- };
1095
-
1096
- /**
1097
- * @function 获取系统默认的扬声器设备 id
1098
- * @return Promise
1099
- */
1100
- const getDefaultSpeaker = () => {
1101
- return getDefaultAudioDeviceId(1);
1102
- };
1103
-
1104
- /**
1105
- * @function 获取扬声器设备列表
1106
- * @return Promise | void
1107
- */
1108
- const getSpeakerDeviceListInternal = () => {
1109
- return callMethod('GetSpeakerDeviceList', {});
1110
- };
1111
-
1112
- /**
1113
- * @function 获取扬声器列表
1114
- * @return Promise | void
1115
- */
1116
- const getSpeakerDeviceList = async () => {
1117
- const resp = await getSpeakerDeviceListInternal();
1118
- let speakerListArr = JSON.parse(JSON.parse(resp.msg).DeviceList) || [];
1119
- console.log('getSpeakerDeviceList from native', speakerListArr);
1120
- let speakerList = [];
1121
- for (let i = 0, len = speakerListArr.length; i < len; i++) {
1122
- let nameForChecking = speakerListArr[i].name.toLowerCase();
1123
- let checkIfIsDefaultFromName = (nameForChecking.indexOf('built-in') >= 0) ||
1124
- (nameForChecking.indexOf('builtin') >= 0) ||
1125
- (nameForChecking.indexOf('default') >= 0) ||
1126
- (nameForChecking.indexOf('默认') >= 0) ||
1127
- (nameForChecking.indexOf('默認') >= 0);
1128
- speakerList.push({
1129
- deviceId: speakerListArr[i].id,
1130
- deviceName: speakerListArr[i].name,
1131
- isDefault: speakerListArr[i].default || checkIfIsDefaultFromName
1132
- });
1133
- deviceListReport.speakerList[speakerListArr[i].szDeviceId] = speakerListArr[i].szDeviceName;
1134
- }
1135
- if (!window.zbyAVSDK_device_checker_init) {
1136
- window.zbyAVSDK_device_checker_init = {};
1137
- }
1138
- if (!window.zbyAVSDK_device_checker_init.speaker) {
1139
- window.zbyAVSDK_device_checker_init.speaker = {};
1140
- }
1141
- window.zbyAVSDK_device_checker_init.speaker.hasTest = true;
1142
- window.zbyAVSDK_device_checker_init.speaker.list = speakerList;
1143
- console.log('getSpeakerDeviceList', speakerList);
1144
- return speakerList;
1145
- };
1146
-
1147
- /**
1148
- * @function 设置指定音频(当前扬声器)设备
1149
- * @param deviceId: String 音频设备 id,必选
1150
- * @return Promise | void
1151
- */
1152
- const setCurrentSpeakerDevice = (deviceId) => {
1153
- window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
1154
- return callMethod('SetCurrentSpeakerDevice', {
1155
- deviceId
1156
- });
1157
- };
1158
-
1159
- /**
1160
- * @function 指定扬声器
1161
- * @param deviceId: String 扬声器 id,必选
1162
- * @param operationType:String 操作类型,可选
1163
- * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
1164
- * 不传即是普通的设置扬声器设备的行为
1165
- * @return Promise | void
1166
- */
1167
- const setSpeakerDevice = async (args) => {
1168
- defaultApi.writeLog(`${JSON.stringify(args)}----setSpeakerDevice-talrtc----`);
1169
- console.log('hsghsghsg999', args);
1170
- let { deviceId, operationType, deviceState, code, changedDeviceId } = args;
1171
- let deviceName = '';
1172
- let speakerData;
1173
- if (!deviceId) {
1174
- speakerData = await getSpeakerDeviceList();
1175
- console.log('hsghsghsg0', speakerData.length);
1176
- if (speakerData.length) {
1177
- let hasGetSpeaker = false;
1178
- console.log('hsghsghsg1', hasGetSpeaker);
1179
- for (let item of speakerData) {
1180
- if (item.isDefault) {
1181
- deviceId = item.deviceId;
1182
- deviceName = item.deviceName;
1183
- hasGetSpeaker = true;
1184
- break;
1185
- }
1186
- }
1187
- console.log('hsghsghsg2', hasGetSpeaker);
1188
- if (!hasGetSpeaker) {
1189
- deviceId = speakerData[0].deviceId;
1190
- deviceName = speakerData[0].deviceName;
1191
- console.log('hsghsghsg91', deviceId);
1192
- }
1193
- } else {
1194
- deviceId = '';
1195
- }
1196
- if (deviceId === '') {
1197
- NOTICE.noDevice({
1198
- deviceType: 'speaker'
1199
- });
1200
- }
1201
- }
1202
- console.log('hsghsghsg999', deviceId);
1203
- try {
1204
- dataReport.setDevice({
1205
- device_type: 3,
1206
- device_id: deviceId,
1207
- device_name: deviceListReport.speakerList[deviceId],
1208
- operationType,
1209
- fore_state: operationType == 'hotPlug' ? deviceState + 1 : '-'
1210
- });
1211
- window.zbyAVSDK_device_checker_init.speaker.name = deviceListReport.speakerList[deviceId];
1212
- } catch (e) {
1213
- console.log(e);
1214
- };
1215
- console.log('-> set deviceId:', deviceId);
1216
- usingAudioDeviceId.speaker = deviceId;
1217
- await setCurrentSpeakerDevice(deviceId);
1218
- if (operationType == 'hotPlug' || operationType == 'deviceError') {
1219
- if (!speakerData) {
1220
- speakerData = await getSpeakerDeviceList();
1221
- deviceName = deviceListReport.speakerList[deviceId];
1222
- }
1223
- console.log('hsghsghsg99911', deviceId);
1224
- NOTICE[operationType]({
1225
- deviceType: 'speaker',
1226
- useDeviceId: deviceId,
1227
- useDeviceName: deviceName,
1228
- deviceList: speakerData,
1229
- message: code,
1230
- deviceState,
1231
- changedDeviceId
1232
- });
1233
- }
1234
- console.log('hsghsghsg999112', deviceId);
1235
- };
1236
-
1237
- /**
1238
- * @function 获取当前扬声器的音量
1239
- * @returns {Promise<number>}
1240
- */
1241
- const getCurrentSpeakerVolume = () => {
1242
- return callMethod('GetCurrentSpeakerVolume', {}).then(ret => {
1243
- let volume = 0;
1244
- try {
1245
- volume = Math.round(JSON.parse(ret.msg).speakerVolume / 255 * 100);
1246
- } catch (e) {
1247
- console.error(`zby-live-sdk: getCurrentSpeakerVolume ret: ${ret}. error: ${e}`);
1248
- }
1249
- return volume;
1250
- });
1251
- };
1252
-
1253
- /**
1254
- * @function 获取当前扬声器ID
1255
- * @return Promise | void
1256
- */
1257
- const getCurrentSpeakerId = () => {
1258
- return callMethod('GetCurrentSpeakerId', {});
1259
- };
1260
-
1261
- /**
1262
- * @function 是否存在媒体流
1263
- * @param streamId
1264
- * @returns {boolean}
1265
- */
1266
- export const hasStream = streamId => {
1267
- return Object.keys(streamIdToPreviewId).includes(streamId);
1268
- };
1269
-
1270
-
1271
- /**
1272
- * @function 获取当前视频流的通道
1273
- * @param streamId id
1274
- * @return channelIndex
1275
- */
1276
- export const getChannelIndex = (streamId) => {
1277
- defaultApi.writeLog(`avsdk TALSDK::getChannelIndex streamId: ${streamId} ${JSON.stringify(streamIdToPreviewId)}`);
1278
- return streamIdToPreviewId[streamId];
1279
- };
1280
-
1281
- /**
1282
- * @function 获取当前视频流所有的通道
1283
- * @param streamId id
1284
- * @return channelIndex
1285
- */
1286
- export const getAllChannelIndex = () => {
1287
- defaultApi.writeLog(`avsdk TALSDK::getAllChannelIndex ${JSON.stringify(streamIdToPreviewId)}`);
1288
- return streamIdToPreviewId;
1289
- };
1290
-
1291
-
1292
- /**
1293
- * @function 设置当前扬声器音量
1294
- * @param volume: number 音量值
1295
- * @return Promise | void
1296
- */
1297
- const setCurrentSpeakerVolume = (volume) => {
1298
- return callMethod('SetCurrentSpeakerVolume', {
1299
- volume: Math.round(volume / 100 * 255)
1300
- });
1301
- };
1302
-
1303
- /**
1304
- * @function 设置扬声器静音
1305
- * @param mute: Boolean 必选
1306
- * @return Promise | void
1307
- */
1308
- const setSpeakerDeviceMute = (mute) => {
1309
- return callMethod('SetSpeakerDeviceMute', {
1310
- deviceId: usingAudioDeviceId.speaker,
1311
- mute
1312
- });
1313
- };
1314
-
1315
- /**
1316
- * @function 获取当前应用程序音量
1317
- * @return Promise | void
1318
- */
1319
- const getSpeakerSimpleVolume = () => {
1320
- return callMethod('GetSpeakerSimpleVolume', {
1321
- deviceId: usingAudioDeviceId.speaker
1322
- }).then(ret => {
1323
- let volume = 0;
1324
- try {
1325
- volume = Math.round(JSON.parse(ret.msg).speakerVolume / 255 * 100);
1326
- } catch (e) {
1327
- console.error(`zby-live-sdk: getSpeakerSimpleVolume ret: ${ret}. error: ${e}`);
1328
- }
1329
- return volume;
1330
- });
1331
- };
1332
-
1333
- /**
1334
- * @function 设置当前应用程序音量
1335
- * @param volume: Number 应用程序音量,必选
1336
- * @return Promise | void
1337
- */
1338
- const setSpeakerSimpleVolume = (volume) => {
1339
- return callMethod('SetSpeakerSimpleVolume', {
1340
- deviceId: usingAudioDeviceId.speaker,
1341
- volume: Math.round(volume / 100 * 255)
1342
- });
1343
- };
1344
-
1345
- /**
1346
- * @function 设置当前应用程序静音
1347
- * @param mute: Boolean 是否静音,必选
1348
- * @return Promise | void
1349
- */
1350
- const setSpeakerSimpleMute = (mute) => {
1351
- return callMethod('SetSpeakerSimpleMute', {
1352
- deviceId: usingAudioDeviceId.speaker,
1353
- mute
1354
- });
1355
- };
1356
-
1357
- const enableAudioSpeakerCapture = (capture) => {
1358
- setSystemAudioLoopback(capture);
1359
- };
1360
-
1361
- /**
1362
- * @function 打开系统声音采集
1363
- * @enable true|false
1364
- * @return Promise | void
1365
- */
1366
- const setSystemAudioLoopback = (enable) => {
1367
- isSpeakerCapturing = enable;
1368
- return callMethod('SetAudioSpeakerCapture', {
1369
- enable,
1370
- });
1371
- };
1372
-
1373
- /**
1374
- * @function 录制转推对外接口
1375
- * @param type: String start/stop 必选
1376
- * @param targetUrl: String 转推地址(支持 RTMP) 必选
1377
- * @return Promise | void
1378
- */
1379
- const controlCdnStreaming = async (type) => {
1380
- const targetUrl = 'rtmp://media-push-ali-test.livecloud.eaydu.com/testlive/210926-11?key=75c868672113df0063430e9e47fb2a60&appId=zt10001&bizId=zt10001AI&cdn=1';
1381
- defaultApi.writeLog(`avsdk TALRTC::controlCdnStreaming targetUrl: ${targetUrl} type: ${type}`);
1382
- if (type === 'start') {
1383
- return addPublishRtmpStreamUrl(targetUrl);
1384
- }
1385
- if (type === 'stop') {
1386
- return removePublishStreamUrl(targetUrl);
1387
- }
1388
- };
1389
-
1390
- /**
1391
- * @function SEI 消息
1392
- * @param data: String 数据
1393
- * @param dataSize: Number 数据长度
1394
- * @param repeatCount: Number 重试次数
1395
- * @return Promise | void
1396
- */
1397
- const sendSEIMsg = (data) => {
1398
- return callMethod('SendSEIMsg', { data: data, dataSize: data.length, repeatCount: 3 });
1399
- };
1400
-
1401
- /**
1402
- * @function 开始屏幕采集
1403
- * @return Promise | void
1404
- */
1405
- const startScreenCapture = () => {
1406
- return callMethod('StartScreenCapture', {});
1407
- };
1408
-
1409
- /**
1410
- * @function 停止屏幕采集
1411
- * @return Promise | void
1412
- */
1413
- const stopScreenCapture = () => {
1414
- return callMethod('StopScreenCapture', {});
1415
- };
1416
-
1417
-
1418
- // 推拉流相关
1419
- /**
1420
- * @function 开启本地或者远程的视频视图
1421
- * @param isLocal: Boolean 是否是本地的视频预览,必选
1422
- * @param streamId: String 要拉取的视频流的 id,可选,只有拉取远程的视频流的时候才是必选的
1423
- * @param domId: String <video> 标签的 id,可选
1424
- * 如果传了 domId,就把视频绑定到对应的 <video> 标签上
1425
- * @return Promise 可从 Promise 中获取 src,Promise.then((src) => {})
1426
- */
1427
- const startLocalOrRemotePreview = async (isLocal, streamId, domId, isTeacherNewWindowPreview) => {
1428
- defaultApi.writeLog(`avsdk TALSDK::startLocalOrRemotePreview streamId: ${streamId}, isLocal: ${isLocal}`);
1429
- let isUpdateChromeVersion = await util.getChromeVersion();
1430
- const externalConstraints = {
1431
- audio: false,
1432
- video: {
1433
- mandatory: {
1434
- chromeMediaSource: 'external',
1435
- chromeMediaSourceId: `ems://talrtc_ext/${isLocal || isTeacherNewWindowPreview ? -2 : streamId}`
1436
- }
1437
- }
1438
- };
1439
-
1440
- console.log('externalConstraints', externalConstraints);
1441
-
1442
- return new Promise((resolve, reject) => {
1443
- const handleExternalSuccess = (stream) => {
1444
- defaultApi.writeLog(`${stream}----stream-talrtc----`);
1445
- stream.oninactive = () => {
1446
- defaultApi.writeLog('Stream inactive');
1447
- };
1448
- const src = isUpdateChromeVersion ? stream : window.URL.createObjectURL(stream);
1449
- console.log('内核升级', isUpdateChromeVersion, domId, src);
1450
- if (domId && document.querySelector(domId)) {
1451
- if (isUpdateChromeVersion) {
1452
- document.querySelector(domId).srcObject = stream;
1453
- console.log('内核升级1', domId, document.querySelector(domId), document.querySelector(domId).srcObject, src);
1454
- } else {
1455
- console.log('内核升级2');
1456
- document.querySelector(domId).src = window.URL.createObjectURL(stream);
1457
- }
1458
- }
1459
- defaultApi.writeLog(`-> setVidoeSrc, streamId:${streamId}, domId: ${domId}, src: ${src}`);
1460
- resolve(src);
1461
- };
1462
-
1463
- const handleExternalError = (error) => {
1464
- if (error.name === 'ConstraintNotSatisfiedError') {
1465
- console.error('ConstraintNotSatisfiedError');
1466
- } else if (error.name === 'PermissionDeniedError') {
1467
- console.error(
1468
- 'Permissions have not been granted to use your camera and '
1469
- + 'microphone, you need to allow the page access to your devices in '
1470
- + 'order for the demo to work.'
1471
- );
1472
- }
1473
- console.error(`getUserMedia error: ${error.name}`, error);
1474
- if (domId) {
1475
- document.querySelector(domId).src = '';
1476
- }
1477
- reject('');
1478
- };
1479
-
1480
- if (navigator.webkitGetUserMedia) {
1481
- navigator.webkitGetUserMedia(
1482
- externalConstraints,
1483
- handleExternalSuccess,
1484
- handleExternalError
1485
- );
1486
- }
1487
- });
1488
- };
1489
-
1490
- /**
1491
- * @function 开始推流
1492
- * @param streamId: String 流 id
1493
- * @return Promise | void
1494
- */
1495
- const startPush = async (streamId) => {
1496
- if (!streamId) {
1497
- streamId = localStreamId;
1498
- }
1499
-
1500
- if (streamId === screenStreamId) {
1501
- isScreenSharing = true;
1502
- }
1503
- setAudioType(4, localStreamId); // 设置采集类型,不用await,避免接口挂起导致未推流
1504
-
1505
- defaultApi.writeLog(`avsdk TALSDK::startPush streamId: ${streamId}`);
1506
- try {
1507
- if (!isFirstHeartBeatReport) {
1508
- isFirstHeartBeatReport = true;
1509
- heartBeatDataReport('start');
1510
- }
1511
- } catch (error) {}
1512
- const muteAudio = localStreamMuteStatus.audio;
1513
- const muteVideo = localStreamMuteStatus.video;
1514
- return callMethod('StartPush', {
1515
- streamId,
1516
- muteAudio,
1517
- muteVideo,
1518
- muteDefaultAudio: classMode === 1 || classMode === 2,
1519
- muteDefaultVideo: classMode === 1 || classMode === 2
1520
- });
1521
- };
1522
-
1523
- /**
1524
- * @function 停止推流
1525
- * @param streamId: String 流 id
1526
- * @return Promise | void
1527
- */
1528
- const stopPush = (streamId) => {
1529
- if (!streamId) {
1530
- streamId = localStreamId;
1531
- }
1532
-
1533
- if (streamId === screenStreamId) {
1534
- isScreenSharing = false;
1535
- }
1536
- defaultApi.writeLog(`avsdk TALSDK::stopPush streamId: ${streamId}`);
1537
- return callMethod('StopPush', { streamId });
1538
- };
1539
-
1540
- /**
1541
- * @function 不推/推 音频
1542
- * @param mute: bool
1543
- * @return Promise | void
1544
- */
1545
- const muteLocalAudio = (mute) => {
1546
- defaultApi.writeLog(`avsdk TALSDK::muteLocalAudio mute: ${mute}`);
1547
- return callMethod('MuteLocalAudio', { mute }).then((...args) => {
1548
- localStreamMuteStatus.audio = mute;
1549
- return args;
1550
- });
1551
- };
1552
-
1553
- /**
1554
- * @function 不推/推 视频
1555
- * @param mute: bool
1556
- * @return Promise | void
1557
- */
1558
- const muteLocalVideo = (mute, streamId) => {
1559
- defaultApi.writeLog(`avsdk TALSDK::muteLocalVideo mute: ${mute}`);
1560
- return callMethod('MuteLocalVideo', { mute, streamId }).then((...args) => {
1561
- localStreamMuteStatus.video = mute;
1562
- return args;
1563
- });
1564
- };
1565
-
1566
- /**
1567
- * @function 开始转推本地摄像头 TALRTC 流至 RTMP。目前只支持转推 1 路
1568
- * @return Promise | void
1569
- */
1570
- const addPublishRtmpStreamUrl = (url) => {
1571
- defaultApi.writeLog(`avsdk TALSDK::addPublishRtmpStreamUrl url: ${url}`);
1572
- return callMethod('AddPublishRtmpStreamUrl', { url });
1573
- };
1574
-
1575
- /**
1576
- * @function 停止转推 RTMP
1577
- * @return Promise | void
1578
- */
1579
- const removePublishStreamUrl = (url) => {
1580
- defaultApi.writeLog(`avsdk TALSDK::removePublishStreamUrl url: ${url}`);
1581
- return callMethod('RemovePublishStreamUrl', { url });
1582
- };
1583
-
1584
- /**
1585
- * @function 开始拉流
1586
- * @param {string} streamId id
1587
- * @return {Promise} Promise | void
1588
- */
1589
- const startPlay = (streamId, muteAudio, muteVideo) => {
1590
- if (typeof muteAudio === 'undefined') {
1591
- muteAudio = getStreamMuteStatus(streamId, 'audio');
1592
- }
1593
-
1594
- if (typeof muteVideo === 'undefined') {
1595
- muteVideo = getStreamMuteStatus(streamId, 'video');
1596
- }
1597
- defaultApi.writeLog(`avsdk TALSDK::startPlay streamId: ${streamId}, muteAudio: ${muteAudio}, muteVideo: ${muteVideo}`);
1598
- return callMethod('StartPlay', {
1599
- myStreamId: localStreamId,
1600
- streamId,
1601
- muteAudio,
1602
- muteVideo,
1603
- muteDefaultAudio: classMode === 1 || classMode === 2,
1604
- muteDefaultVideo: classMode === 1 || classMode === 2
1605
- }).then((...args) => {
1606
- if (args[0].code !== 0 && streamIdToPreviewId[streamId]) {
1607
- console.log('retry startPlay----', streamId);
1608
- return startPlay(streamId, muteAudio, muteVideo);
1609
- }
1610
- setStreamMuteStatus(streamId, 'audio', muteAudio);
1611
- setStreamMuteStatus(streamId, 'video', muteVideo);
1612
- return args;
1613
- });
1614
- };
1615
-
1616
- /**
1617
- * @function 初始化拉流
1618
- * @param streamId:String 从传来的信道消息中获取,必选
1619
- * @param domId:String <video> 标签的 id,可选
1620
- * 如果传了就把视频绑定到对应的 <video> 标签上
1621
- * @param pInfo:String 多媒体流附加信息,可选,默认为 'none'
1622
- * @return src:String 视频预览地址
1623
- */
1624
- export const initPullFlow = async (streamId, domId, mute, pInfo, notAutoPlay, audioOnly) => {
1625
- defaultApi.writeLog(`TALRTC::initPullFlow_start , streamId :${streamId}, notAutoPlay: ${notAutoPlay}, audioOnly: ${audioOnly}`);
1626
- let playChannel;
1627
- resetStreamIdRtcPlayerInfo1(streamId);
1628
- resetStreamIdRtcPlayerInfo(streamId);
1629
- try {
1630
- if (!isFirstHeartBeatReport) {
1631
- isFirstHeartBeatReport = true;
1632
- heartBeatDataReport('start');
1633
- }
1634
- } catch (error) {}
1635
- if (streamIdToPreviewId[streamId] == undefined) {
1636
-
1637
- defaultApi.writeLog(`TALRTC:: The streamId queue does not contain this streamId ${uiChnIndexs}`);
1638
- playChannel = uiChnIndexs.pop();
1639
- streamIdToPreviewId[streamId] = playChannel;
1640
- // console.log('streamIdIsNoExited',playChannel,streamIdToPreviewId[streamId],uiChnIndexs);
1641
- } else {
1642
- defaultApi.writeLog(`TALRTC::The streamId queue contains this streamId: ${streamId}`);
1643
- playChannel = streamIdToPreviewId[streamId];
1644
- // console.log('streamIdIsExited',playChannel,streamIdToPreviewId[streamId],uiChnIndexs);
1645
- await stopPlay(streamId);
1646
- resetStreamIdRtcPlayerInfo(streamId);
1647
- }
1648
- // 判断是否创建或者获取播放通道成功,不成功就删掉重试
1649
- if (playChannel == undefined) {
1650
- delete streamIdToPreviewId[streamId];
1651
- defaultApi.writeLog('error', 'TALRTC:: uiChnIndex is not exist');
1652
- return initPullFlow(streamId, domId, mute, pInfo, notAutoPlay, audioOnly);
1653
- }
1654
- previewIdToStreamId[playChannel] = streamId;
1655
- /**
1656
- * 将 muteRemote 参数耦合进startPlay,减少异步调用,降低黑屏概率
1657
- */
1658
- // 是否拉取声音
1659
- // await muteRemoteAudio(streamId, !!mute);
1660
- // // 是否拉取画面
1661
- // await muteRemoteVideo(streamId, !!audioOnly);
1662
- const videoSrc = await startLocalOrRemotePreview(false, streamId, domId);
1663
-
1664
- if (!notAutoPlay) {
1665
- await startPlay(streamId, !!mute, !!audioOnly);
1666
- }
1667
- return {
1668
- videoSrc,
1669
- playerId: playChannel
1670
- };
1671
- };
1672
-
1673
-
1674
-
1675
- /**
1676
- * @function 开始连麦
1677
- * @param mode: number 采集器默认, 默认0=站立 1=坐立
1678
- * @return Promise | void
1679
- */
1680
- export const teacherStartLinkMic = async (mode = 0) => {
1681
- await startMicrophone();
1682
- if (mode === 0) {
1683
- await muteLocalVideo(true, localStreamId);
1684
- await setMicrophoneDevice({});
1685
- // await muteStreamAudio(false);
1686
- await muteLocalAudio(false);
1687
- } else {
1688
- // await muteLocalVideo(false, localStreamId);
1689
- await muteLocalAudio(false);
1690
- }
1691
- };
1692
-
1693
-
1694
- /**
1695
- * @function 结束连麦
1696
- * @param mode:number 采集器默认, 默认0=站立 1=坐立
1697
- * @return Promise | void
1698
- */
1699
- export const teacherStopLinkMic = async (mode = 0) => {
1700
- await stopMicrophone();
1701
- if (mode === 0) {
1702
- await muteLocalAudio(true);
1703
- } else {
1704
- await setSystemAudioLoopback(false);
1705
- }
1706
- };
1707
-
1708
- /**
1709
- * @function 停止拉流
1710
- * @param streamId: String 流 id
1711
- * @return Promise | void
1712
- */
1713
- const stopPlay = (streamId, recovery) => {
1714
- const id = streamIdToPreviewId[streamId];
1715
- delete streamIdRtcPlayerInfo[streamId];
1716
- if (recovery && id && !uiChnIndexs.includes(id)) {
1717
- uiChnIndexs.push(id);
1718
- defaultApi.writeLog(`avsdk TALRTC::stopPlayStream streamId: ${streamId} id: ${id} uiChnIndexs: ${uiChnIndexs}`);
1719
- delete streamIdToPreviewId[streamId];
1720
- }
1721
- return callMethod('StopPlay', { streamId });
1722
- };
1723
-
1724
-
1725
- /**
1726
- * @function 停止拉所有的流
1727
- * @return Promise | void
1728
- */
1729
- const stopPlayAll = () => {
1730
- return callMethod('StopPlayAll', {});
1731
- };
1732
-
1733
- /**
1734
- * @function 切换播放流
1735
- * @param streamId: String 需要停止流的 id,必选
1736
- * @param toStreamId: String 被拉取的流的 id,必选
1737
- * @param toDomId: String <video> 标签的 id,可选
1738
- * @return Promise | void
1739
- */
1740
- const changePullFlow = async (streamId, toStreamId, toDomId) => {
1741
- await stopPlay(streamId);
1742
- return initPullFlow(toStreamId, toDomId);
1743
- };
1744
-
1745
- /**
1746
- * @function 是否拉取音频流--通过播放器id控制
1747
- * @param mute: Boolean,必选,true -> 不拉取,false -> 拉取,必选
1748
- * @param playerId: Number,必选,播放器 id
1749
- * @return Promise | void
1750
- */
1751
- const pullAudioFlow = (playerId, mute, streamid) => {
1752
- defaultApi.writeLog(`pullAudioFlow -- playerId ${playerId} operation ${mute} streamId ${streamid}`);
1753
- if(streamid){
1754
- if(streamIdToPreviewId[streamid] == undefined) {
1755
- NOTICE.pullAudioFlowError({streamid})
1756
- return
1757
- }else{
1758
- playerId = streamIdToPreviewId[streamid]
1759
- console.log('pullAudioFlow-playerId',playerId,streamIdToPreviewId)
1760
- }
1761
- }
1762
- let streamId = previewIdToStreamId[playerId];
1763
- try {
1764
- streamIdRtcPlayerInfo1[streamId].audio_type = !mute;
1765
- console.log('pullAudioFlow ::previewIdToStreamId', streamId, previewIdToStreamId);
1766
- console.log('pullAudioFlow ::streamIdRtcPlayerInfo1', streamId, streamIdRtcPlayerInfo1);
1767
- } catch (e) {
1768
- console.log('pullAudioFlow ::streamIdRtcPlayerInfo1--error', e);
1769
- }
1770
- try {
1771
- dataReport.setPullVoice({
1772
- code: +!mute,
1773
- pull_uid: util.getUidByStreamId(streamId),
1774
- pull_streamid: streamId,
1775
- playerId
1776
- // operator:'client'
1777
- });
1778
- } catch (e) { };
1779
- return muteRemoteAudio(streamId, mute);
1780
- };
1781
-
1782
- /**
1783
- * @function 设置镜像状态(预览)
1784
- * @param { boolean } type true=开启 false=关闭
1785
- * @return: Promise
1786
- */
1787
- const setMirrorStatus = (isMirror) => {
1788
- return callMethod('SetMirrorStatus', { isMirror, streamId: localStreamId });
1789
- };
1790
-
1791
- /**
1792
- * @function 设置拉流镜像状态
1793
- * @param { boolean } type true=开启 false=关闭
1794
- * @return: Promise
1795
- */
1796
- const setPlayViewMirror = (isMirror, id) => {
1797
- return callMethod('SetPlayViewMirror', { isMirror, streamId: id });
1798
- };
1799
-
1800
- /**
1801
- * @function 开启预览
1802
- * @param idx: Number 推流通道,可选,默认为 0
1803
- * @return Promise | void
1804
- */
1805
- const startPreview = (idx) => {
1806
- if (typeof idx === 'undefined') {
1807
- idx = 0;
1808
- }
1809
- return callMethod('StartPreview', {
1810
- idx
1811
- });
1812
- };
1813
-
1814
- /**
1815
- * @function 停止预览
1816
- * @param idx: Number 推流通道,可选,默认为 0
1817
- * @return Promise | void
1818
- */
1819
- const stopPreview = (idx) => {
1820
- if (typeof idx === 'undefined') {
1821
- idx = 0;
1822
- }
1823
- return callMethod('StopPreview', {
1824
- idx
1825
- });
1826
- };
1827
-
1828
- /**
1829
- * @function 不拉/拉 音频
1830
- * @param streamId: string
1831
- * @param mute: bool
1832
- * @return Promise | void
1833
- */
1834
- const muteRemoteAudio = (streamId, mute) => {
1835
- return callMethod('MuteRemoteAudio', { streamId, mute }).then((...args) => {
1836
- setStreamMuteStatus(streamId, 'audio', mute);
1837
- return args;
1838
- });
1839
- };
1840
-
1841
- /**
1842
- * @function 不拉/拉 视频
1843
- * @param streamId: string
1844
- * @param mute: bool
1845
- * @return Promise | void
1846
- */
1847
- const muteRemoteVideo = (streamId, mute) => {
1848
- return callMethod('MuteRemoteVideo', { streamId, mute }).then((...args) => {
1849
- setStreamMuteStatus(streamId, 'video', mute);
1850
- return args;
1851
- });
1852
- };
1853
- /**
1854
- * @function 设置外部采集设备模块
1855
- * @param enable 是否开启外部采集设备模块
1856
- * @attention 必须在 InitSDK 前调用,置空必须在UninitSDK之后
1857
- * @return Promise | void
1858
- */
1859
- const enableExternalVideoSource = (enable = false, streamId = localStreamId) => {
1860
- defaultApi.writeLog('info', 'avsdk TALRTC::EnableExternalVideoSource');
1861
- return callMethod('EnableExternalVideoSource', {enable, streamId});
1862
- };
1863
-
1864
- //设置音频数据来源
1865
- const setAudioAuxSource = (source) => {
1866
- defaultApi.writeLog('info', 'avsdk TALRTC::setAudioAuxSource');
1867
- return callMethod('SetAudioAuxSource', {
1868
- source
1869
- });
1870
- };
1871
-
1872
- /**
1873
- * RTC流音频类型
1874
- * @param {number} type 音频类型 0:静音 / 1:麦克风 / 2:拉流的声音 / 3:1+2 / 4:麦克风+扬声器 5: 扬声器
1875
- * @param {string} streamId
1876
- * @returns {Promise<void>}
1877
- */
1878
- const setAudioType = (type, streamId) => {
1879
- defaultApi.writeLog('info', 'avsdk TALRTC::SetAudioType');
1880
- if (!streamId) {
1881
- streamId = screenStreamId;
1882
- }
1883
- return callMethod('SetAudioType', {
1884
- streamId,
1885
- type
1886
- });
1887
- };
1888
-
1889
- //开启音频外部采集 参数:channel 参数类型:int 通道(传1使用辅通道)
1890
- const startAudioExCapture = (channel) => {
1891
- defaultApi.writeLog('info', 'avsdk TALRTC::startAudioExCapture');
1892
- return callMethod('StartAudioExCapture', {
1893
- channel
1894
- });
1895
- };
1896
- /**
1897
- * @function 加载采集插件的输出数据 //采集屏幕并且拼接了连麦学生头像的数据
1898
- * @returns {Promise}
1899
- */
1900
- export const loadCollectionOutputEntry = () => {
1901
- return callMethod('LoadCollectionOutputEntry', {
1902
- streamId: localStreamId
1903
- });
1904
- };
1905
- /**
1906
- * @function 加载采集插件的输出数据 //只采集屏幕数据
1907
- * @returns {Promise}
1908
- */
1909
- const LoadCollectionOutputEntry2 = () => {
1910
- defaultApi.writeLog('info', 'avsdk TALRTC::LoadCollectionOutputEntry2');
1911
- return callMethod('LoadCollectionOutputEntry2', {
1912
- streamId: screenStreamId
1913
- });
1914
- };
1915
-
1916
- //开启同屏
1917
- const startMultiScreen = async () => {
1918
- defaultApi.writeLog('info', 'avsdk TALRTC::startMultiScreen');
1919
- // await enableExternalVideoSource(true,1);
1920
- await LoadCollectionOutputEntry2();
1921
- };
1922
-
1923
- /**
1924
- * talrtc40路拉流混音接口
1925
- * @param {number} mixMode //混流模式 0=关闭混流 1=开启混流
1926
- * @param {Array<string>} streamIdArr //表示要突出声音的channel
1927
- * @returns {Promise}
1928
- */
1929
- export const setAudioMixMode = async (mixMode, streamIdArr) => {
1930
- // let channelsArr = [];
1931
- // if(streamIdArr.length > 0){
1932
- // streamIdArr.forEach( item => {
1933
- // channelsArr.push(getChannelIndex(item));
1934
- // });
1935
- // }
1936
- if (!Array.isArray(streamIdArr)) return;
1937
-
1938
- return callMethod('SetAudioMixMode', {mixMode: mixMode, streamIdArr, num: streamIdArr.length});
1939
- };
1940
-
1941
-
1942
- /**
1943
- * @function 离开教室
1944
- * @return Promise | void
1945
- */
1946
- const leaveRoom = async () => {
1947
- for (let key in streamIdToPreviewId) {
1948
- await stopPlay(key);//huishou
1949
- }
1950
-
1951
- if (screenStreamId) {
1952
- screenStreamId = null;
1953
- }
1954
-
1955
- await stopPush(localStreamId);
1956
-
1957
- if (isSpeakerCapturing) {
1958
- await setSystemAudioLoopback(false);
1959
- }
1960
- if(window.zby_sdk_init_params.role === 'teacher' && window.zby_sdk_init_params.mode === 1){
1961
- //坐立模式关闭外部采集器,站立关闭外部采集器
1962
- await enableExternalVideoSource(false);
1963
- }
1964
- localStreamId = null;
1965
- removerListener();
1966
- await destroyEngine();
1967
- heartBeatDataReport('stop');
1968
- // isFirstHeartBeatReport = false;
1969
- streamIdToPreviewId = {};
1970
- previewIdToStreamId = {};
1971
- streamIdRtcPlayerInfo = {};
1972
- streamIdRtcPlayerInfo1 = {};
1973
- uiChnIndexs.length = 100;
1974
- uiChnIndexs.fill(0).forEach((value, index, array) => {
1975
- array[index] = array.length - (index + 1);
1976
- });
1977
- };
1978
-
1979
-
1980
- /**
1981
- * @function 卸载 Talrtc 扩展
1982
- * @return Promise | void
1983
- */
1984
- export const unloadTalrtc = () => {
1985
- // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
1986
- if (EM) {
1987
- return new Promise((resolve, reject) => {
1988
- EM.UnLoad(
1989
- extensionId,
1990
- (code, msg) => {
1991
- defaultApi.writeLog(`unloadTalrtc Code: ${code}\nMessage: ${msg}`);
1992
- resolve();
1993
- }
1994
- );
1995
- });
1996
- }
1997
- };
1998
-
1999
-
2000
- const heartBeatRealKeys = ['video_fps', 'video_bitrate', 'audio_fps', 'audio_bitrate'];
2001
-
2002
- const _heartBeatDataReport = () => {
2003
- console.log('上报吧2');
2004
- // let cpuRate = 0;
2005
- // let memRate = 0;
2006
- // let rateCount = 0;
2007
- // let appCpuRate = 0;
2008
- // let appMemUsed = 0;
2009
- // let rateTimer = setInterval(async () => {
2010
- // rateCount++;
2011
- // let {cpu_rate, mem_rate, gpus, app_cpu_rate, app_mem_used} = (await toolApi.getCurCpuMemInfo()).msg;
2012
- // cpu_rate = cpu_rate < 0 ? 0 : cpu_rate;
2013
- // cpuRate += parseFloat(cpu_rate);
2014
- // memRate += parseFloat(mem_rate);
2015
- // if (window.zbyAVSDK_init_params.zego.role === 'student') {
2016
- // appCpuRate += parseFloat(app_cpu_rate);
2017
- // appMemUsed += parseFloat(app_mem_used);
2018
- // }
2019
-
2020
- // if (rateCount >= 3) {
2021
- // heartBeatRealKeys.forEach(realKey => {
2022
- // if (heartBeatDataReportObj.hasOwnProperty(realKey) && heartBeatDataReportObj.count > 0) {
2023
- // heartBeatDataReportObj[realKey] = util.toFixed(heartBeatDataReportObj[realKey]/heartBeatDataReportObj.count);
2024
- // }
2025
- // });
2026
- const pullInfo = [];
2027
- console.log('拉流的类型1',streamIdRtcPlayerInfo);
2028
- Object.keys(streamIdRtcPlayerInfo).forEach(streamid => {
2029
- console.log('拉流的类型1.5');
2030
- heartBeatRealKeys.forEach(realKey => {
2031
- if (!streamIdRtcPlayerInfo[streamid].hasOwnProperty(realKey)) {
2032
- streamIdRtcPlayerInfo[streamid][realKey] = [];
2033
- }
2034
- // if (streamIdRtcPlayerInfo[streamid].count > 0) {
2035
- // streamIdRtcPlayerInfo[streamid][realKey] = util.toFixed(streamIdRtcPlayerInfo[streamid][realKey]/streamIdRtcPlayerInfo[streamid].count);
2036
- // }
2037
- });
2038
- console.log('拉流的类型2',streamIdRtcPlayerInfo1[streamid]);
2039
- //获取拉流类型,后期可写为函数提出去
2040
- if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2041
- streamIdRtcPlayerInfo1[streamid].stream_type = 'both';
2042
- } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2043
- streamIdRtcPlayerInfo1[streamid].stream_type = 'video';
2044
- } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
2045
- streamIdRtcPlayerInfo1[streamid].stream_type = 'audio';
2046
- } else {
2047
- streamIdRtcPlayerInfo1[streamid].stream_type = 'none';
2048
- }
2049
- // console.log('hsghsghsg_type_type', streamIdRtcPlayerInfo1[streamid].stream_type);
2050
-
2051
- pullInfo.push({
2052
- streamid,
2053
- // uid: util.getUidByStreamId(streamid),
2054
- ...streamIdRtcPlayerInfo[streamid],
2055
- pull_type: streamIdRtcPlayerInfo1[streamid].stream_type,
2056
- volume: streamIdRtcPlayerInfo[streamid].volume.slice(0,streamIdRtcPlayerInfo[streamid].volume.length-1)
2057
- });
2058
- resetStreamIdRtcPlayerInfo(streamid);
2059
- });
2060
- if (isFirstHeartBeatReport) {
2061
- try {
2062
- //静音推流时过滤掉音频帧率和码率,上报为0;
2063
- // if (!isNoticeMicVolumeZego) {
2064
- // heartBeatDataReportObj['audio_fps'] = [];
2065
- // heartBeatDataReportObj['audio_bitrate'] = [];
2066
- // }
2067
-
2068
- //获取推流类型,后期可写为函数提出去
2069
- if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2070
- zbysdk.deviceStatus.stream_type = 'both';
2071
- } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2072
- zbysdk.deviceStatus.stream_type = 'audio';
2073
- } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
2074
- zbysdk.deviceStatus.stream_type = 'video';
2075
- } else {
2076
- zbysdk.deviceStatus.stream_type = 'none';
2077
- };
2078
- // console.log('push_type222',zbysdk.deviceStatus,zbysdk.deviceStatus.stream_type);
2079
- defaultApi.writeLog(`push_type_talrtc,camera: ${zbysdk.deviceStatus.camera},microphone: ${zbysdk.deviceStatus.microphone},type: ${zbysdk.deviceStatus.stream_type},a_fps: ${dataTalrtcCapture.fps}, a_bit: ${dataTalrtcCapture.audioBitrate}, v_fps: ${dataTalrtcCapture.fps}, v_bit: ${dataTalrtcCapture.videoBitrate}`);
2080
- // if (window.zbyAVSDK_init_params.zego.role === 'teacher') {
2081
- dataReport.heartbeat({
2082
- ...{...heartBeatDataReportObj, push_type: zbysdk.deviceStatus.stream_type, volume: heartBeatDataReportObj.volume.slice(0,heartBeatDataReportObj.volume.length-1)},
2083
- // pull_info: JSON.stringify(pullInfo),
2084
- pull_info: pullInfo,
2085
- // cpu_rate: util.toFixed(cpuRate/rateCount),
2086
- // mem_rate: util.toFixed(memRate/rateCount),
2087
- });
2088
- // } else {
2089
- // dataReport.heartbeat({
2090
- // ...heartBeatDataReportObj,
2091
- // pull_info: JSON.stringify(pullInfo),
2092
- // cpu_rate: util.toFixed(cpuRate/rateCount),
2093
- // mem_rate: util.toFixed(memRate/rateCount),
2094
- // app_cpu: util.toFixed(appCpuRate/rateCount),
2095
- // app_mem: util.toFixed(appMemUsed/rateCount),
2096
- // video_mem: gpus
2097
- // });
2098
- // }
2099
- } catch (e) {
2100
- console.log(e);
2101
- }
2102
- }
2103
- resetHeartBeatDataReportObj();
2104
- // cpuRate = 0;
2105
- // memRate = 0;
2106
- // appCpuRate = 0;
2107
- // appMemUsed = 0;
2108
-
2109
- // clearInterval(rateTimer);
2110
- // }
2111
- // }, 10 * 1000);
2112
- };
2113
-
2114
- const heartBeatDataReport = (type) => {
2115
- try {
2116
- if (type === 'start' && !heartBeatDataReportTimer) {
2117
- console.log('start heart beat report');
2118
- _heartBeatDataReport();
2119
- heartBeatDataReportTimer = setInterval(() => {
2120
- _heartBeatDataReport();
2121
- }, 30 * 1000);
2122
- }
2123
- if (type === 'stop') {
2124
- clearInterval(heartBeatDataReportTimer);
2125
- heartBeatDataReportTimer = null;
2126
- }
2127
- } catch (error) {
2128
- console.log(error);
2129
- }
2130
- };
2131
- const lastStreamReportTimestamps = {};
2132
-
2133
- const heartBeatDataReportCalc = (name, _data) => {
2134
- console.log('hsgmzk111',name,_data);
2135
- let _d = JSON.parse(_data.stats);
2136
- console.log('hsgmzk222',name,_d);
2137
-
2138
- if (lastStreamReportTimestamps[_d.streamId] && Date.now() - lastStreamReportTimestamps[_d.streamId] < 4999) {
2139
- return;
2140
- } else {
2141
- lastStreamReportTimestamps[_d.streamId] = Date.now();
2142
- }
2143
- // 拉流
2144
- const pullKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
2145
- if (name === 'onRemoteStatistics') {
2146
- console.log('hsgmzk222',streamIdRtcPlayerInfo,_d.streamId);
2147
- if (streamIdRtcPlayerInfo && streamIdRtcPlayerInfo.hasOwnProperty(_d.streamId)) {
2148
- console.log('hsgmzk22211');
2149
- let streamid = _d.streamId;
2150
- let isReport = true;
2151
- // streamIdRtcPlayerInfo[streamid].count++;
2152
- heartBeatRealKeys.forEach((realKey, index) => {
2153
- if (_d.hasOwnProperty(pullKeys[index])) {
2154
- if (streamIdRtcPlayerInfo[streamid][realKey] === undefined) {
2155
- streamIdRtcPlayerInfo[streamid][realKey] = [];
2156
- isReport = false;
2157
- }
2158
- // streamIdRtcPlayerInfo[streamid][realKey].push(parseFloat(parseInt(item[pullKeys[index]])));
2159
- }
2160
- });
2161
- console.log('hsgmzk333',_d);
2162
- if (isReport) {
2163
- let audio_fps_talrtc_pull = 0;
2164
- let audio_bitrate_talrtc_pull = 0;
2165
- let video_fps_talrtc_pull = 0;
2166
- let video_bitrate_talrtc_pull = 0;
2167
- console.log('hsgmzk444',_d);
2168
- if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2169
- console.log('hsgmzk555',_d);
2170
- audio_fps_talrtc_pull = parseFloat(parseInt(_d.afps));
2171
- audio_bitrate_talrtc_pull = parseFloat(parseInt(_d.audioBitrate));
2172
- video_fps_talrtc_pull = parseFloat(parseInt(_d.fps));
2173
- video_bitrate_talrtc_pull = parseFloat(parseInt(_d.videoBitrate));
2174
- } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2175
- video_fps_talrtc_pull = parseFloat(parseInt(_d.fps));
2176
- video_bitrate_talrtc_pull = parseFloat(parseInt(_d.videoBitrate));
2177
- } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
2178
- audio_fps_talrtc_pull = parseFloat(parseInt(_d.afps));
2179
- audio_bitrate_talrtc_pull = parseFloat(parseInt(_d.audioBitrate));
2180
- }
2181
- streamIdRtcPlayerInfo[streamid].audio_fps.push(audio_fps_talrtc_pull);
2182
- streamIdRtcPlayerInfo[streamid].audio_bitrate.push(audio_bitrate_talrtc_pull);
2183
- streamIdRtcPlayerInfo[streamid].video_fps.push(video_fps_talrtc_pull);
2184
- streamIdRtcPlayerInfo[streamid].video_bitrate.push(video_bitrate_talrtc_pull);
2185
- streamIdRtcPlayerInfo[streamid].pull_loss.push(_d.packetLoss);
2186
- streamIdRtcPlayerInfo[streamid].pull_delay.push(_d.rtt);
2187
-
2188
- streamIdRtcPlayerInfo[streamid].ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
2189
- streamIdRtcPlayerInfo[streamid].video_ifg += _d.videoBlockRate;
2190
- streamIdRtcPlayerInfo[streamid].audio_ifg += _d.audioBlockRate;
2191
- // console.log('hsg_tpull_keys',JSON.parse(JSON.stringify(streamIdRtcPlayerInfo[streamid])),_d.avTimestampDiff);
2192
- console.log('streamIdRtcPlayerInfo::: ', streamIdRtcPlayerInfo);
2193
- }
2194
- }
2195
- }
2196
- // 推流
2197
- const pushKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
2198
- if (name === 'onLocalStatistics') {
2199
- // console.log('hsgmzk',_d);
2200
- // heartBeatDataReportObj.count++;
2201
- // heartBeatRealKeys.forEach((realKey, index) => {
2202
- // if (heartBeatDataReportObj.hasOwnProperty(realKey) && _d.hasOwnProperty(pushKeys[index])) {
2203
- // heartBeatDataReportObj[realKey].push(parseFloat(parseInt(_d[pushKeys[index]])));
2204
- // }
2205
- // });
2206
- let audio_fps_talrtc_push = 0;
2207
- let audio_bitrate_talrtc_push = 0;
2208
- let video_fps_talrtc_push = 0;
2209
- let video_bitrate_talrtc_push = 0;
2210
- dataTalrtcCapture =_d;
2211
-
2212
- if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2213
- audio_fps_talrtc_push = parseFloat(parseInt(_d.afps));
2214
- audio_bitrate_talrtc_push = parseFloat(parseInt(_d.audioBitrate));
2215
- video_fps_talrtc_push = parseFloat(parseInt(_d.fps));
2216
- video_bitrate_talrtc_push= parseFloat(parseInt(_d.videoBitrate));
2217
- } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2218
- audio_fps_talrtc_push = parseFloat(parseInt(_d.afps));
2219
- audio_bitrate_talrtc_push = parseFloat(parseInt(_d.audioBitrate));
2220
- } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
2221
- video_fps_talrtc_push = parseFloat(parseInt(_d.fps));
2222
- video_bitrate_talrtc_push = parseFloat(parseInt(_d.videoBitrate));
2223
- }
2224
- heartBeatDataReportObj.audio_fps.push(audio_fps_talrtc_push);
2225
- heartBeatDataReportObj.audio_bitrate.push(audio_bitrate_talrtc_push);
2226
- heartBeatDataReportObj.video_fps.push(video_fps_talrtc_push);
2227
- heartBeatDataReportObj.video_bitrate.push(video_bitrate_talrtc_push);
2228
- heartBeatDataReportObj.push_loss.push(_d.packetLoss);
2229
- heartBeatDataReportObj.push_delay.push(_d.rtt);
2230
- heartBeatDataReportObj.ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
2231
- }
2232
- };
2233
-
2234
- //推流字段
2235
- const resetHeartBeatDataReportObj = () => {
2236
- console.log('resetHeartBeatDataReportObj重置');
2237
- heartBeatDataReportObj = {
2238
- ctime: [],
2239
- push_type: 'none',
2240
- video_fps: [],
2241
- video_bitrate: [],
2242
- audio_fps: [],
2243
- audio_bitrate: [],
2244
- push_loss: [],
2245
- push_delay: [],
2246
- volume: ''
2247
- };
2248
- };
2249
-
2250
- resetHeartBeatDataReportObj();
2251
- //记录拉流类型,用作数据上报
2252
- const resetStreamIdRtcPlayerInfo1 = (streamId) => {
2253
- streamIdRtcPlayerInfo1[streamId] = {
2254
- audio_type: false,
2255
- video_type: false,
2256
- stream_type: 'none'
2257
- };
2258
- // console.log('hsgshgs_heartbeat',streamIdRtcPlayerInfo1);
2259
- };
2260
- //拉流字段
2261
- const resetStreamIdRtcPlayerInfo = (streamId) => {
2262
- console.log('reset stream info ----------', streamId);
2263
- streamIdRtcPlayerInfo[streamId] = {
2264
- pull_uid: util.getUidByStreamId(streamId),
2265
- streamid: streamId,
2266
- ctime: [],
2267
- pull_type: streamIdRtcPlayerInfo1[streamId].stream_type,
2268
- volume: '',
2269
- // 平均值
2270
- // count: 0,
2271
- // video_fps: 0,
2272
- // video_bitrate: 0,
2273
- // audio_fps: 0,
2274
- // audio_bitrate: 0,
2275
- video_fps: [],
2276
- video_bitrate: [],
2277
- audio_fps: [],
2278
- audio_bitrate: [],
2279
- pull_loss: [],
2280
- pull_delay: [],
2281
- //音画不同步字段
2282
- avtimestampdiff: [],
2283
- // 累加
2284
- audio_ifg: 0,
2285
- video_ifg: 0
2286
- };
2287
-
2288
- console.log('reset stream info ----------', streamId, streamIdRtcPlayerInfo[streamId]);
2289
- };
2290
-
2291
- export default {
2292
- init,
2293
- setCameraCaptureResolution,
2294
- setCameraEncodeResolution,
2295
- setCameraEncodeFps, // 未实现调用
2296
- setCameraEncodeBitrate,
2297
- getCameraResolution,
2298
- getMicrophoneDeviceList,
2299
- setMicrophoneDevice,
2300
- openOrCloseMicrophone,
2301
- getCurrentMicrophoneVolume,
2302
- setCurrentMicrophoneVolume,
2303
- getCameraDeviceList,
2304
- setCameraDevice,
2305
- openOrCloseCamera,
2306
- getSpeakerDeviceList,
2307
- setSpeakerDevice,
2308
- getCurrentSpeakerVolume,
2309
- setCurrentSpeakerVolume,
2310
- setSpeakerDeviceMute,
2311
- muteRemoteAudio,
2312
- muteRemoteVideo,
2313
- getSpeakerSimpleVolume,
2314
- setSpeakerSimpleVolume,
2315
- setSpeakerSimpleMute,
2316
- startLocalOrRemotePreview,
2317
- enableAudioSpeakerCapture,
2318
- startPush,
2319
- stopPush,
2320
- startPlay,
2321
- stopPlay,
2322
- initPullFlow,
2323
- stopPlayAll,
2324
- changePullFlow,
2325
- startPreview,
2326
- stopPreview,
2327
- sendSEIMsg,
2328
- loadCollectionInputEntry2,
2329
- controlCdnStreaming, // addPublishRtmpStreamUrl or removePublishStreamUrl
2330
- leaveRoom,
2331
- destroyEngine,
2332
- unloadTalrtc,
2333
- muteLocalVideo,
2334
- muteLocalAudio,
2335
- pullAudioFlow,
2336
- hasStream,
2337
- teacherStartLinkMic,
2338
- teacherStopLinkMic,
2339
- setMirrorStatus,
2340
- setPlayViewMirror,
2341
- setAudioAuxSource,
2342
- setAudioType,
2343
- startAudioExCapture,
2344
- startMultiScreen,
2345
- setAudioMixMode,
2346
- getChannelIndex,
2347
- getSDKVersion
2348
- };
1
+ import dataReport from '../network/dataReport.js';
2
+ import defaultApi from '../default';
3
+ import NOTICE from '../notice';
4
+ import zbysdk from '../zby-live-sdk.js';
5
+ import { deviceListReport } from './device.js';
6
+ import util from '../util/util';
7
+
8
+
9
+ // 扩展标识
10
+ const extensionId = 'talrtc_ext';
11
+ // 端提供的 API 入口
12
+ const EM = window.EM;
13
+ // 是否执行过 TALRTC 的 AddListener 的标志
14
+ let hasAddListener = false;
15
+ // 监听id
16
+ let EMListenerId = 0;
17
+ // rtc小班课=0;rtc大班课=1
18
+ const classType = 0;
19
+ // 0: 小组课 1: 小班课 2: 新小班课
20
+ let classMode = 0;
21
+ // 心跳
22
+ let heartBeatDataReportObj = {};
23
+ let isFirstHeartBeatReport = false;
24
+ // 维护的一份拉流的 streamId 与本地预览通道的映射表
25
+ let streamIdToPreviewId = {};
26
+ let previewIdToStreamId = {};
27
+ let heartBeatDataReportTimer = null;
28
+ let streamIdRtcPlayerInfo = {};
29
+ let streamIdRtcPlayerInfo1 = {};
30
+ // 拉流的播放通道初始化从 0 开始依次递增至44(共45路流),-1 为推流的本地视频预览
31
+ const uiChnIndexs = new Array(100).fill(0).map((a, b) => b).reverse();
32
+ // 维护的一份当前正在使用的音频设备的 deviceId 的列表
33
+ let usingAudioDeviceId = {
34
+ speaker: '',
35
+ microphone: ''
36
+ };
37
+ let localStreamId = '';
38
+ let screenStreamId = '';
39
+ let isScreenSharing = false;
40
+ let isSpeakerCapturing = false;
41
+ let sdkVersion = null;
42
+ const streamMuteStatus = {};
43
+ const localStreamMuteStatus = {
44
+ video: false,
45
+ audio: false,
46
+ };
47
+ //记录底层推流回调的值
48
+ let dataTalrtcCapture = {};
49
+ let qualityLocalArr = [];
50
+ let qualityRemoteArr = [];
51
+ let localRoomId = null;
52
+ let userId = null;
53
+ let isNoticeMicVolume = false;
54
+ let enableLocalCapture = true; // 本地摄像头采集,默认允许
55
+ let teacherId = null;
56
+ // /**
57
+ // * 过滤直播云sdk中deviceId前缀
58
+ // * @param {string} deviceId
59
+ // * @returns {string}
60
+ // */
61
+ // const formatDeviceId = (deviceId) => {
62
+ // return deviceId.replace(/^@device:\w+:/, '');
63
+ // };
64
+ const getStreamMuteStatus = (streamId, type) => {
65
+ const streamStatus = streamMuteStatus[streamId] || {};
66
+ return streamStatus[type] || false;
67
+ };
68
+ const setStreamMuteStatus = (streamId, type, status) => {
69
+ const streamStatus = streamMuteStatus[streamId];
70
+ if (streamStatus) {
71
+ streamStatus[type] = status;
72
+ return;
73
+ }
74
+ switch (type) {
75
+ case 'video':
76
+ streamMuteStatus[streamId] = {
77
+ video: status,
78
+ audio: false,
79
+ };
80
+ break;
81
+ case 'audio':
82
+ streamMuteStatus[streamId] = {
83
+ video: false,
84
+ audio: status,
85
+ };
86
+ break;
87
+ default:break;
88
+ }
89
+ };
90
+ /**
91
+ * 判断主流还是辅流
92
+ * @param {string} streamId
93
+ * @returns {boolean} true: 主流 false: 辅流
94
+ */
95
+ const isMainStream = (streamId) => {
96
+ return streamId.split('_').length === 4;
97
+ };
98
+
99
+ const callMethod = (name, args) => {
100
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
101
+ if (EM) {
102
+ return new Promise((resolve, reject) => {
103
+ // let noneCamera = (name === 'SetCameraDevice' && !args.pszDeviceID);
104
+ // let noneMicrophone = (name === 'SetAudioDevice' && args.deviceType === 0 && !args.pszDeviceID);
105
+ // let noneSpeaker = (name === 'SetAudioDevice' && args.deviceType === 1 && !args.pszDeviceID);
106
+ // if (noneCamera || noneMicrophone || noneSpeaker) {
107
+ // return resolve();
108
+ // }
109
+ EM.CallMethod(
110
+ extensionId,
111
+ name,
112
+ JSON.stringify({ ...args, classType }),
113
+ (code, msg) => {
114
+ defaultApi.writeLog(`${name} Code: ${code}\nMessage: ${msg}\nParams: ${JSON.stringify({ ...args, classType })}`);
115
+ resolve({
116
+ code,
117
+ msg
118
+ });
119
+ }
120
+ );
121
+ });
122
+ }
123
+ };
124
+
125
+ const loadTalrtc = (extensionVersion) => {
126
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
127
+ if (EM) {
128
+ return new Promise((resolve, reject) => {
129
+ removerListener();
130
+ console.log('-> load tal rtc sdk', extensionId, extensionVersion);
131
+ EM.Load(
132
+ extensionId,
133
+ extensionVersion,
134
+ false,
135
+ (code, msg) => {
136
+ defaultApi.writeLog(`loadTalrtc Code: ${code}\nMessage: ${msg}`);
137
+ addListener();
138
+ resolve();
139
+ }
140
+ );
141
+ });
142
+ }
143
+ };
144
+
145
+ //卸载监听
146
+ const removerListener = () => {
147
+ hasAddListener = false;
148
+ defaultApi.writeLog(`TALRTC::action--removerListener EMListenerId:${EMListenerId}`);
149
+ EM.RemoverListener(extensionId, EMListenerId, (ec, content) => {
150
+ });
151
+ EMListenerId = 0;
152
+ };
153
+
154
+ //加载监听
155
+
156
+ /**
157
+ * @function 添加扩展监听机制
158
+ * @param userId:Number 用户 id,必选
159
+ * @param userName:String 用户名,必选
160
+ * @param roomId:String 频道(房间) id,必选
161
+ * @param nNetType:Number 网络类型,可选,默认为 1
162
+ * @return void
163
+ */
164
+ const addListener = () => {
165
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
166
+ if (EM && !hasAddListener) {
167
+ hasAddListener = true;
168
+ EM.AddListener(extensionId, (event, data) => {
169
+ if (zbysdk.openListenerLog) {
170
+ console.log(`avsdk TALRTC::Listener:: start event: ${event}, data: ${JSON.stringify(data)}`);
171
+ }
172
+ if (data && data.indexOf(extensionId) > -1) {
173
+ try {
174
+ EMListenerId = JSON.parse(data)[extensionId];
175
+ defaultApi.writeLog(`TALRTC::addListener-- EMListenerId: ${EMListenerId}`);
176
+ } catch (error) {
177
+ }
178
+ }
179
+ if (!event || !data) {
180
+ return;
181
+ }
182
+ let _data = JSON.parse(data);
183
+ // defaultApi.writeLog(`TALRTC: event:: ${event} : ${data}`);
184
+ switch (event) {
185
+ // 推流器出现错误
186
+ case 'onLocalError':
187
+ defaultApi.writeLog(`TALRTC::addListener-- onLocalError: ${data}`);
188
+ NOTICE.pushStreamError({
189
+ errorStreamType:_data.streamType,
190
+ code: _data.code,
191
+ errorMsg: _data
192
+ });
193
+ dataReport.pushStreamError({
194
+ errorStreamType:_data.streamType,
195
+ code: _data.code
196
+ });
197
+ // defaultApi.writeLog(`TALRTC: event:: onLocalError: ${_data}`);
198
+ break;
199
+ // 直播推流器警告通知
200
+ case 'onLocalWarning':
201
+ defaultApi.writeLog(`TALRTC::addListener-- onLocalWarning: ${data}`);
202
+ NOTICE.pushStreamWarning({
203
+ warnStreamType:_data.streamType,
204
+ code: _data.code
205
+ });
206
+ dataReport.pushStreamWarning({
207
+ warnStreamType:_data.streamType,
208
+ code: _data.code
209
+ });
210
+ break;
211
+ // 首帧音频推送完成的回调通知
212
+ case 'onSendLocalFirstAudioFrame':
213
+ defaultApi.writeLog(`TALRTC: event:: onSendLocalFirstAudioFrame: ${data}`);
214
+ break;
215
+ // 采集首帧事件
216
+ case 'OnCaptureVideoFirstFrame':
217
+ defaultApi.writeLog(`TALRTC: event:: OnCaptureVideoFirstFrame: ${data}`);
218
+ NOTICE.pushFlowSuccess({code:0,publish_streamid: _data.pszStreamID});
219
+ dataReport.publishResult({
220
+ code: '0',
221
+ });
222
+ break;
223
+ // 本地音量
224
+ case 'onLocalMicrophoneVolumeUpdate':
225
+ // defaultApi.writeLog(`TALRTC: event:: onLocalMicrophoneVolumeUpdate: ${data}`);
226
+ if (isNoticeMicVolume) {
227
+ NOTICE.captureMicVolumeChanged({
228
+ volume: Math.round(_data.volume)
229
+ });
230
+ }
231
+ heartBeatDataReportObj.volume = heartBeatDataReportObj.volume + Math.round(_data.volume) + ',';
232
+ break;
233
+ // 推流器连接状态回调通知。推流器连接状态 1与服务器断开连接/2 正在连接服务器/3 连接服务器成功/4 重连服务器中 5
234
+ case 'onLocalConnectStatusUpdate':
235
+ defaultApi.writeLog(`TALRTC: event:: onLocalConnectStatusUpdate: ${data}`);
236
+ dataReport.localConnectStatus({
237
+ connectStatuStreamType:_data.streamType,
238
+ state: _data.state
239
+ });
240
+ if (_data.state === 2 || _data.state === 1) {
241
+ NOTICE.networkError();
242
+ } else if (_data.state === 3) {
243
+ NOTICE.networkRecovery();
244
+ }
245
+ break;
246
+ // 网络质量的实时统计回调
247
+ case 'onLocalNetworkQuality':
248
+ // defaultApi.writeLog(`TALRTC: event:: onLocalNetworkQuality: ${data}`);
249
+ if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
250
+ NOTICE.localNetworkQuality({code: _data.quality});
251
+ dataReport.localNetworkQuality({
252
+ code: _data.quality,
253
+ });
254
+ }
255
+ qualityLocalArr.push(_data.quality);
256
+ break;
257
+
258
+ // 远端流错误通知,拉流出现错误时,会回调该通知
259
+ case 'onRemoteError':
260
+ defaultApi.writeLog(`TALRTC: event:: onRemoteError: ${data}`);
261
+ NOTICE.pullStreamError({
262
+ strErrorStreamId: _data.streamId,
263
+ code:_data.code
264
+ });
265
+ break;
266
+ // 远端流警告通知,拉流出现警告时,会回调该通知
267
+ case 'onRemoteWarning':
268
+ defaultApi.writeLog(`TALRTC: event:: onRemoteWarning: ${data}`);
269
+ NOTICE.pullStreamWarning({
270
+ strWarnStreamId: _data.streamId,
271
+ code:_data.code
272
+ });
273
+ break;
274
+ // 卡顿事件,用于事件上报
275
+ case 'stuckevent':
276
+ defaultApi.writeLog(`TALRTC: event:: stuckevent: ${data}`);
277
+ break;
278
+ // 远端音频首帧
279
+ case 'onRecvRemoteAudioFirstFrame':
280
+ try{
281
+ NOTICE.firstAudioSize({
282
+ streamId: _data.streamId,
283
+ userId: util.getUidByStreamId(_data.streamId)
284
+ });
285
+ dataReport.firstAudioSize({
286
+ pull_streamid: _data.streamId,
287
+ pull_uid: util.getUidByStreamIdDr(_data.streamId),
288
+ code:'0'
289
+ });
290
+ } catch (e) { };
291
+ defaultApi.writeLog(`TALRTC: event:: OnRecvRemoteAudioFirstFrame: ${data}`);
292
+ break;
293
+ // 远端流音量大小
294
+ case 'onRemoteAudioVolume':
295
+ // defaultApi.writeLog(`TALRTC: event:: onRemoteAudioVolume: ${data}}`);
296
+ let cbData = {
297
+ streamid: _data.streamId,
298
+ streamId: _data.streamId,
299
+ userId: util.getUidByStreamId(_data.streamId),
300
+ volume: _data.volume
301
+ };
302
+ NOTICE.playerVolumeChanged(cbData);
303
+ NOTICE.volumeChange(cbData);
304
+ if(streamIdRtcPlayerInfo[_data.streamId]) {
305
+ streamIdRtcPlayerInfo[_data.streamId].volume = streamIdRtcPlayerInfo[_data.streamId].volume + Math.round(_data.volume)+ ',';
306
+ }
307
+ break;
308
+ case 'onUserEnableAudio':
309
+ try {
310
+ if (
311
+ teacherId &&
312
+ _data.streamId.match(teacherId) &&
313
+ isMainStream(_data.streamId) &&
314
+ _data.inuse
315
+ ) {
316
+ setChiefAudioStream(_data.streamId, util.getUidByStreamIdDr(_data.streamId));
317
+ }
318
+ } catch (e) {
319
+ defaultApi.writeLog(`TALRTC: event:: onUserEnableAudio: ${e}`);
320
+ }
321
+ defaultApi.writeLog(`TALRTC: event:: onUserEnableAudio: ${data}`);
322
+ break;
323
+ // 远端视频首帧
324
+ case 'onRenderRemoteVideoFirstFrame':
325
+ try{
326
+ // Test Code: 临时增加
327
+ setTimeout(() => {
328
+ NOTICE.pullFlowResult({code:0,pull_streamid: _data.streamId});
329
+ NOTICE.firstVideoSize({
330
+ streamId: _data.streamId,
331
+ userId: util.getUidByStreamId(_data.streamId)
332
+ });
333
+ dataReport.firstVideoSize({
334
+ pull_streamid: _data.streamId,
335
+ pull_uid: util.getUidByStreamIdDr(_data.streamId),
336
+ code:'0'
337
+ });
338
+ }, 100);
339
+ } catch (e) { };
340
+ defaultApi.writeLog(`TALRTC: event:: OnRenderRemoteVideoFirstFrame: ${data}`);
341
+ break;
342
+ // SEI消息
343
+ case 'onRecvSEIMsg':
344
+ defaultApi.writeLog(`TALRTC: event:: onRecvSEIMsg: ${data}`);
345
+ break;
346
+ // 网络质量的实时统计回调
347
+ case 'onRemoteNetworkQuality':
348
+ // defaultApi.writeLog(`TALRTC: event:: onRemoteNetworkQuality: ${data}`);
349
+ if(_data.quality == 4 || _data.quality == 5 || _data.quality == 6) {
350
+ NOTICE.remoteNetworkQuality({code: _data.quality});
351
+ dataReport.remoteNetworkQuality({
352
+ code: _data.quality,
353
+ });
354
+ }
355
+ qualityRemoteArr.push(_data.quality);
356
+ break;
357
+
358
+ // 转推 CDN 上发布音视频流的事件回调
359
+ case 'onRtmpStreamingStateChanged':
360
+ defaultApi.writeLog(`TALRTC: event:: onRtmpStreamingStateChanged: ${data}`);
361
+ break;
362
+ // 设备异常
363
+ case 'onDeviceError':
364
+ defaultApi.writeLog(`TALRTC: event:: OnDeviceError: ${data}`);
365
+ break;
366
+ // 拉流的结果通知
367
+ case 'onPlayStateUpdate':
368
+ defaultApi.writeLog(`TALRTC: event:: onPlayStateUpdate: ${data}`);
369
+ break;
370
+ // 推流的结果通知
371
+ case 'onPushStateUpdate':
372
+ defaultApi.writeLog(`TALRTC: event:: onPushStateUpdate: ${data}`);
373
+ if (_data.state === 3) {
374
+ NOTICE.pushFlowSuccess({code:0,publish_streamid: _data.streamId});
375
+ }
376
+ break;
377
+ // 推、拉流的统计数据。
378
+ case 'onLocalStatistics':
379
+ try {
380
+ NOTICE.playLossAndDelay({
381
+ userId,
382
+ delay:_data.rtt,
383
+ lostrate:_data.packetLoss
384
+ });
385
+ }catch(e){}
386
+ heartBeatDataReportCalc('onLocalStatistics', _data);
387
+ // defaultApi.writeLog(`TALRTC: event:: onStatisticsUpdate: ${_data}`);
388
+ break;
389
+ // 本地摄像头设备的通断状态发生变化
390
+ case 'onRemoteStatistics':
391
+ try {
392
+ NOTICE.playLossAndDelay({
393
+ userId: util.getUidByStreamId(_data.streamId),
394
+ delay:_data.rtt,
395
+ lostrate:_data.packetLoss
396
+ });
397
+ }catch(e){}
398
+ heartBeatDataReportCalc('onRemoteStatistics', _data);
399
+ // defaultApi.writeLog(`TALRTC: event:: onStatisticsUpdate: ${_data}`);
400
+ break;
401
+ // 本地摄像头设备的通断状态发生变化
402
+ case 'onVideoDeviceChanged':
403
+ defaultApi.writeLog(`TALRTC: event:: onVideoDeviceChanged: ${data}`);
404
+ setCameraDevice({deviceId: _data.state == 1 ? _data.deviceId : '', operationType: 'hotPlug', deviceState: _data.state == 1 ? 0 : 1, changedDeviceId: _data.deviceId});
405
+ break;
406
+ /* 本地麦克风设备的通断状态发生变化
407
+ * state:1:使用中 2:被禁用 4:一开始就没有?(我也不懂) 8:设备移除
408
+ * deviceType:-1:未知 0:播放器 1:麦克风 2:视频显示设备 3:摄像头
409
+ */
410
+ case 'onAudioDeviceChanged':
411
+ defaultApi.writeLog(`TALRTC: event:: onAudioDeviceChanged: ${data}`);
412
+ if (_data.deviceType === 0) {
413
+ setSpeakerDevice({deviceId: _data.state == 1 ? _data.deviceId : '', operationType: 'hotPlug', deviceState: _data.state == 1 ? 0 : 1, changedDeviceId: _data.deviceId});
414
+ }
415
+ if (_data.deviceType === 1) {
416
+ setMicrophoneDevice({deviceId: _data.state == 1 ? _data.deviceId : '', operationType: 'hotPlug', deviceState: _data.state == 1 ? 0 : 1, changedDeviceId: _data.deviceId});
417
+ }
418
+ break;
419
+
420
+ // mute 视频状态,true:远端开始推流,并拉流收到首帧;false:远端停止推流。
421
+ case 'onRemoteVideoStatus':
422
+ defaultApi.writeLog(`TALRTC: event:: onRemoteVideoStatus: ${data}`);
423
+ break;
424
+
425
+ // mute 音频状态,true:远端开始推流,并拉流收到首帧;false:远端停止推流。
426
+ case 'onRemoteAudioStatus':
427
+ defaultApi.writeLog(`TALRTC: event:: onRemoteAudioStatus: ${data}`);
428
+ break;
429
+ // 首帧视频推送完成的回调通知
430
+ case 'onSendLocalFirstVideoFrame':
431
+ defaultApi.writeLog(`TALRTC: event:: onSendLocalFirstVideoFrame: ${data}`);
432
+ break;
433
+ default:
434
+ console.warn('warning: uncaught listener:', event);
435
+ }
436
+ if (zbysdk.openListenerLog) {
437
+ console.log(`avsdk TALRTC::Listener:: event: ${event}, data: ${data}`);
438
+ }
439
+ });
440
+ }
441
+ };
442
+
443
+
444
+ // 初始化相关
445
+ const init = async (args) => {
446
+ defaultApi.writeLog(`avsdk TALRTC::init ${JSON.stringify(args)}`);
447
+ const usedDevices = window.zbyAVSDK_device_checker_init;
448
+ const { devices } = args;
449
+ localRoomId = args.roomId;
450
+ userId = args.userId;
451
+ localStreamId = args.streamId;
452
+ teacherId = args.teacherId;
453
+ const _devices = {
454
+ camera: (devices && devices.camera) || (usedDevices && usedDevices.camera && usedDevices.camera.use) || '',
455
+ microphone: (devices && devices.microphone) || (usedDevices && usedDevices.microphone && usedDevices.microphone.use) || '',
456
+ speaker: (devices && devices.speaker) || (usedDevices && usedDevices.speaker && usedDevices.speaker.use) || ''
457
+ };
458
+ console.log('-> load talrtc sdk. teacherId: ', teacherId);
459
+ await loadTalrtc(args.extensionVersion);
460
+ console.log('-> load talrtc sdk finished ');
461
+ const resp = await startEngine(args.appId, args.userId, args.usersign || '67890', 7, args.live_id, args.role === 'teacher' ? 1 : 0);
462
+ console.log('----> start engine ', resp);
463
+ console.log('-> start talrtc sdk success');
464
+
465
+ classMode = args.classMode >> 0;
466
+ // 0: 小组课 1: 小班课 2: 新小班课
467
+ if (classMode === 1 || classMode === 2){
468
+ if(args.role === 'teacher') {
469
+ //小班课 需要加载采集插件的输入数据,为了拼接talrtc头像
470
+ await loadCollectionInputEntry();
471
+
472
+ //屏幕流通道
473
+ screenStreamId = `${window.zby_sdk_init_params.institutionId}_${localRoomId}_${userId}_${args.live_id}_1`;
474
+ await enableExternalVideoSource(true, screenStreamId);
475
+ await setCameraEncodeFps(args.screenSameFps, screenStreamId);
476
+ await setCameraCaptureResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, screenStreamId);
477
+ await setCameraEncodeResolution(args.screenSameResolutionWidth, args.screenSameResolutionHeight, screenStreamId);
478
+ await setCameraEncodeBitrate(args.screenSameBitrate, screenStreamId);
479
+
480
+ //rtc同屏以及高光时刻,都开启音频外部采集,默认传1
481
+ await setAudioAuxSource(1);
482
+ teacherId = undefined;
483
+ }
484
+
485
+ if(args.noiseSuppressMode == -2) {
486
+ console.log('noiseSuppressMode 关闭噪声消除',args.noiseSuppressMode);
487
+ await setEnableNoiseSuppress(false);
488
+ } else if(args.noiseSuppressMode == -1) {
489
+ console.log('noiseSuppressMode 由底层进行配置,js层不进行配置',args.noiseSuppressMode);
490
+ } else {
491
+ console.log('noiseSuppressMode 启动噪音消除,模式为:',args.noiseSuppressMode);
492
+ await setEnableNoiseSuppress(true);
493
+ await setNoiseSuppressMode(args.noiseSuppressMode >> 0);
494
+ await enableTransientNoiseSuppress(true);
495
+ };
496
+ //aecMode为-1是关闭回音消除,云控下传
497
+ if(args.aecMode == -2) {
498
+ console.log('aecMode 关闭回音消除',args.aecMode);
499
+ await setEnableAEC(false);
500
+ } else if(args.aecMode == -1) {
501
+ console.log('aecMode 由底层进行配置,js层不进行配置',args.aecMode);
502
+ } else {
503
+ console.log('aecMode 启动回音消除,模式为:',args.aecMode);
504
+ await setEnableAEC(true);
505
+ await setAECMode(args.aecMode >> 0);
506
+ };
507
+ };
508
+
509
+ if((args.role === 'teacher' || args.role === 'assistant') && classMode === 0){
510
+ //坐立模式开启外部采集器,站立关闭外部采集器,只有小组课坐立模式才会调用,通过这个把rtmp数据给到talrtc(头像),开启了外部采集,talrtc不会再本地采集数据
511
+ // V2 不区分坐立站立,老师和辅导老师都启动外部视频源
512
+ await enableExternalVideoSource(true, localStreamId);
513
+ enableLocalCapture = false; // 禁用本地摄像头采集,防止与其他进程抢占摄像头
514
+ } else {
515
+ await setDefaultDevice(_devices, 'default');
516
+ }
517
+ // 主通道
518
+ await setCameraEncodeFps(args.encodeCaptureFps, localStreamId);
519
+ await setCameraCaptureResolution(args.previewResolutionWidth, args.previewResolutionHeight, localStreamId);
520
+ await setCameraEncodeResolution(args.encodedResolutionWidth, args.encodedResolutionHeight, localStreamId);
521
+ await setCameraEncodeBitrate(args.encodeCaptureBitrate, localStreamId);
522
+
523
+ await setMicphoneVolumInterval(500);
524
+ await setMediaSideFlags();
525
+ window.current_sdk_type = 'talrtc';
526
+ defaultApi.writeLog('avsdk TALRTC::init finished current_sdk_type: talrtc');
527
+ };
528
+
529
+
530
+ /**
531
+ * @function 初始化引擎
532
+ * @param {string} appid TalRTC 秘钥,必选
533
+ * @param {string} userid
534
+ * @param {string} usersign
535
+ * @param {1 | 7} type 直播中台SDK底层引擎类型 7: 自研引擎
536
+ * @param {string} liveID
537
+ * @param {0 | 1 | 2} businessRole 0: student 1: teacher 2: tutor
538
+ * @return Promise | void
539
+ */
540
+ const startEngine = (appid, userid, usersign, type, liveID, businessRole) => {
541
+ defaultApi.writeLog(`avsdk TALRTC::StartEngine appid: ${appid} userid: ${userid} usersign: ${usersign}, liveID: ${liveID}, businessRole: ${businessRole}`);
542
+ return callMethod('StartEngine', {
543
+ appid,
544
+ userid,
545
+ usersign,
546
+ type,
547
+ liveID: '' + liveID,
548
+ businessRole,
549
+ });
550
+ };
551
+
552
+ //开启消除回声
553
+ export const setEnableAEC = (enable) => {
554
+ return callMethod('EnableAEC', {enable});
555
+ };
556
+
557
+ //设置回声消除模式 mode :激进模式 =0/中等模式 =1/轻度模式 =2
558
+ export const setAECMode = (mode) => {
559
+ return callMethod('SetAECMode', {mode});
560
+ };
561
+
562
+ //开启噪音消除
563
+ export const setEnableNoiseSuppress = (enable) => {
564
+ return callMethod('EnableNoiseSuppress', {enable});
565
+ };
566
+
567
+ //设置噪音消除模式 mode :0:低 1:中 2
568
+ export const setNoiseSuppressMode = (mode) => {
569
+ return callMethod('SetNoiseSuppressMode', {mode});
570
+ };
571
+
572
+ //新增瞬时降噪接口 瞬时降噪 可以处理掉 敲键盘、敲桌子、敲门等瞬态噪声。非瞬态噪声,此接口无法处理,比如划桌子等摩擦噪音。
573
+ export const enableTransientNoiseSuppress = (enable) => {
574
+ return callMethod('EnableTransientNoiseSuppress',{enable});
575
+ };
576
+
577
+ /**
578
+ * 加载采集插件的输入数据 //调用后,talrtc 可以向采集插件传送摄像头数据,用于拼接
579
+ * @return {Promise}
580
+ */
581
+ export const loadCollectionInputEntry = () => {
582
+ return callMethod('LoadCollectionInputEntry', {});
583
+ };
584
+
585
+ /**
586
+ * 1、设置需要拼接的学生头像流id,按数组顺序拼接
587
+ * 2、加载截屏插件,获取截屏插件设置拉流头像回调函数
588
+ * @param {Array} stream_ids 拉流id数组,最大有效数量7个
589
+ * @returns {Promise}
590
+ */
591
+ export const loadCollectionInputEntry2 = (stream_ids) => {
592
+ return callMethod('LoadCollectionInputEntry2', {stream_ids});
593
+ };
594
+ //设置是否发送sei
595
+ const setMediaSideFlags = (start = true, onlyAudioPublish = false, mediaInfoType = 2, seiSendType = 1, idx = 0) => {
596
+ return callMethod('SetMediaSideFlags',{start,onlyAudioPublish, mediaInfoType, seiSendType, idx});
597
+ };
598
+
599
+ /**
600
+ * @function 销毁引擎
601
+ * @return Promise | void
602
+ */
603
+ const destroyEngine = () => {
604
+ return callMethod('DestroyEngine', {}).then((...args) => {
605
+ window.zbyAVSDK_init_sdk_type = 'rtc';
606
+ return args;
607
+ });
608
+ };
609
+
610
+ // 设置采集分辨率
611
+ const setCameraCaptureResolution = (width, height, streamId = localStreamId) => {
612
+ return callMethod('SetCameraCaptureResolution', { width, height, streamId });
613
+ };
614
+
615
+ // 设置编码分辨率
616
+ const setCameraEncodeResolution = (width, height, streamId) => {
617
+ return callMethod('SetCameraEncodeResolution', { width, height, streamId });
618
+ };
619
+
620
+ // 设置编码帧率
621
+ const setCameraEncodeFps = (fps, streamId) => {
622
+ return callMethod('SetCameraEncodeFps', { fps, streamId });
623
+ };
624
+
625
+ // 设置编码码率
626
+ const setCameraEncodeBitrate = (bitrate, streamId) => {
627
+ return callMethod('SetCameraEncodeBitrate', { bitrate, streamId });
628
+ };
629
+
630
+ // 获取指定摄像头的支持的分辨率
631
+ const getCameraResolution = async (deviceId) => {
632
+ const resp = await callMethod('GetCameraResolution', { deviceId });
633
+ const cameraArr = JSON.parse(resp.msg || '{}').SupportedResolution;
634
+ return {
635
+ code: resp.code,
636
+ msg: JSON.stringify(cameraArr)
637
+ };
638
+ };
639
+
640
+ // 获取 rtc_sdk.dll 版本号
641
+ const getSDKVersion = async () => {
642
+ if (sdkVersion) {
643
+ return sdkVersion;
644
+ }
645
+ return callMethod('GetSDKVersion', {}).then((e) => {
646
+ sdkVersion = e;
647
+ });
648
+ };
649
+
650
+ /**
651
+ * @function 设置默认的硬件设备,包括摄像头、麦克风以及扬声器
652
+ * @return Promise
653
+ */
654
+ export const setDefaultDevice = async (devices, operationType) => {
655
+ // 设置默认的摄像头
656
+ if (devices && devices.camera) {
657
+ if (zbyAVSDK_device_checker_init.camera.list.length == 0) {
658
+ await getCameraDeviceList();
659
+ }
660
+ await setCameraDevice({ deviceId: devices.camera, operationType: `${operationType}_1` });
661
+ } else {
662
+ const cameraData = await getCameraDeviceList();
663
+ if (cameraData.length) {
664
+ let hasSetCamera = false;
665
+ for (let item of cameraData) {
666
+ if (item.isDefault) {
667
+ await setCameraDevice({ deviceId: item.deviceId, operationType: `${operationType}_2` });
668
+ hasSetCamera = true;
669
+ break;
670
+ }
671
+ }
672
+ if (!hasSetCamera) {
673
+ await setCameraDevice({ deviceId: cameraData[0].deviceId, operationType: `${operationType}_3` });
674
+ }
675
+ }
676
+ }
677
+ // 设置默认的麦克风
678
+ if (devices && devices.microphone) {
679
+ if (zbyAVSDK_device_checker_init.microphone.list.length == 0) {
680
+ await getMicrophoneDeviceList();
681
+ }
682
+ console.log('pp2');
683
+ await setMicrophoneDevice({ deviceId: devices.microphone, operationType: `${operationType}_1` });
684
+ } else {
685
+ console.log('pp3');
686
+ const microPhoneData = await getMicrophoneDeviceList();
687
+ try {
688
+ if (microPhoneData.length) {
689
+ let hasSetMicrophone = false;
690
+ for (let item of microPhoneData) {
691
+ if (item.isDefault) {
692
+ await setMicrophoneDevice({ deviceId: item.deviceId, operationType: `${operationType}_2` });
693
+ hasSetMicrophone = true;
694
+ break;
695
+ }
696
+ }
697
+ if (!hasSetMicrophone) {
698
+ await setMicrophoneDevice({ deviceId: microPhoneData[0].deviceId, operationType: `${operationType}_3` });
699
+ }
700
+ }
701
+ } catch (e) {
702
+ console.log(e);
703
+ }
704
+
705
+ }
706
+ // 设置默认的扬声器
707
+ if (devices && devices.speaker) {
708
+ await setSpeakerDevice({ deviceId: devices.speaker, operationType: `${operationType}_1` });
709
+ } else {
710
+ const speakerData = await getSpeakerDeviceListInternal();
711
+ if (speakerData.length) {
712
+ let hasSetSpeaker = false;
713
+ for (let item of speakerData) {
714
+ if (item.isDefault) {
715
+ await setSpeakerDevice({ deviceId: item.deviceId, operationType: `${operationType}_2` });
716
+ hasSetSpeaker = true;
717
+ break;
718
+ }
719
+ }
720
+ if (!hasSetSpeaker) {
721
+ await setSpeakerDevice({ deviceId: speakerData[0].deviceId, operationType: `${operationType}_3` });
722
+ }
723
+ }
724
+ }
725
+ };
726
+
727
+ // 麦克风相关
728
+ /**
729
+ * @function 开启麦克风
730
+ * @param intervalMs: Number
731
+ * @return Promise | void
732
+ */
733
+ const startMicrophone = async (intervalMs = 500) => {
734
+ isNoticeMicVolume = true;
735
+ await callMethod('MuteAudioCollect', { mute: false });
736
+ return callMethod('StartMicrophone', { intervalMs });
737
+ };
738
+
739
+ /**
740
+ * @function 关闭麦克风
741
+ * @return Promise | void
742
+ */
743
+ const stopMicrophone = () => {
744
+ isNoticeMicVolume = false;
745
+ // 当小班课并且屏幕共享时,不要关闭麦克风,只要静音就可以,要不然底层会停止推所有音频流
746
+ if (isScreenSharing) {
747
+ return callMethod('MuteAudioCollect', { mute: true });
748
+ } else {
749
+ return callMethod('StopMicrophone', {});
750
+ }
751
+ };
752
+
753
+ const openOrCloseMicrophone = async (operation) => {
754
+ if (operation) {
755
+ await startMicrophone();
756
+ } else {
757
+ await stopMicrophone();
758
+ }
759
+ };
760
+
761
+ /**
762
+ * @function 设置麦克风音量大小回调周期
763
+ * @param intervalMs: Number
764
+ * @return Promise | void
765
+ */
766
+ const setMicphoneVolumInterval = (intervalMs) => {
767
+ return callMethod('SetMicphoneVolumInterval', { intervalMs });
768
+ };
769
+
770
+ /**
771
+ * @function 获取麦克风设备列表
772
+ * @return Promise | void
773
+ */
774
+ const getMicrophoneDeviceListInternal = () => {
775
+ return callMethod('GetMicrophoneDeviceList', {});
776
+ };
777
+
778
+ /**
779
+ * @function 获取麦克风列表
780
+ * @return Promise | void
781
+ */
782
+ const getMicrophoneDeviceList = async () => {
783
+ const resp = await getMicrophoneDeviceListInternal();
784
+ let microphoneListArr = JSON.parse(JSON.parse(resp.msg).DeviceList) || [];
785
+ console.log('getMicrophoneDeviceList from native', microphoneListArr);
786
+ let microphoneList = [];
787
+ for (let i = 0, len = microphoneListArr.length; i < len; i++) {
788
+ let nameForChecking = microphoneListArr[i].name.toLowerCase();
789
+ let checkIfIsDefaultFromName = (nameForChecking.indexOf('built-in') >= 0) ||
790
+ (nameForChecking.indexOf('builtin') >= 0) ||
791
+ (nameForChecking.indexOf('default') >= 0) ||
792
+ (nameForChecking.indexOf('默认') >= 0) ||
793
+ (nameForChecking.indexOf('默認') >= 0);
794
+ microphoneList.push({
795
+ deviceId: microphoneListArr[i].id,
796
+ deviceName: microphoneListArr[i].name,
797
+ isDefault: microphoneListArr[i].default || checkIfIsDefaultFromName
798
+ });
799
+ deviceListReport.micList[microphoneListArr[i].szDeviceId] = microphoneListArr[i].szDeviceName;
800
+ }
801
+ if (!window.zbyAVSDK_device_checker_init) {
802
+ window.zbyAVSDK_device_checker_init = {};
803
+ }
804
+ if (!window.zbyAVSDK_device_checker_init.microphone) {
805
+ window.zbyAVSDK_device_checker_init.microphone = {};
806
+ }
807
+ window.zbyAVSDK_device_checker_init.microphone.hasTest = true;
808
+ window.zbyAVSDK_device_checker_init.microphone.list = microphoneList;
809
+ console.log('getMicrophoneDeviceList', microphoneList);
810
+ return microphoneList;
811
+ };
812
+
813
+ /**
814
+ * @function 设置指定音频(当前麦克风)设备
815
+ * @param deviceId: String 音频设备 id,必选
816
+ * @return Promise | void
817
+ */
818
+ const setCurrentMicrophoneDevice = (deviceId) => {
819
+ window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
820
+ return callMethod('SetCurrentMicrophoneDevice', {
821
+ deviceId
822
+ });
823
+ };
824
+
825
+ /**
826
+ * @function 指定麦克风设备
827
+ * @param deviceId: String 麦克风设备 id,必选
828
+ * @param operationType: String 操作类型,可选
829
+ * @return Promise | void
830
+ */
831
+ const setMicrophoneDevice = async (args) => {
832
+ console.log('-> getMicrophoneDeviceList_microphoneListArr_tal_rtc_i');
833
+ let { deviceId, operationType, deviceState, changedDeviceId } = args;
834
+ let deviceName = '';
835
+ let microPhoneData;
836
+ if (!deviceId) {
837
+ microPhoneData = await getMicrophoneDeviceList();
838
+ if (microPhoneData.length) {
839
+ let hasGetMicrophone = false;
840
+ for (let item of microPhoneData) {
841
+ if (item.isDefault) {
842
+ deviceId = item.deviceId;
843
+ deviceName = item.deviceName;
844
+ hasGetMicrophone = true;
845
+ break;
846
+ }
847
+ }
848
+ if (!hasGetMicrophone) {
849
+ deviceId = microPhoneData[0].deviceId;
850
+ deviceName = microPhoneData[0].deviceName;
851
+ }
852
+ } else {
853
+ deviceId = '';
854
+ }
855
+ if (deviceId === '') {
856
+ NOTICE.noDevice({
857
+ deviceType: 'microphone'
858
+ });
859
+ }
860
+ }
861
+ try {
862
+ dataReport.setDevice({
863
+ device_type: 2,
864
+ device_id: deviceId,
865
+ device_name: deviceListReport.micList[deviceId],
866
+ operationType,
867
+ fore_state: operationType == 'hotPlug' ? +deviceState + 1 : '-'
868
+ });
869
+ window.zbyAVSDK_device_checker_init.microphone.name = deviceListReport.micList[deviceId];
870
+ } catch (e) {
871
+ };
872
+ await setCurrentMicrophoneDevice(deviceId);
873
+ if (operationType == 'hotPlug' || operationType == 'deviceError') {
874
+ if (!microPhoneData) {
875
+ microPhoneData = await getMicrophoneDeviceList();
876
+ deviceName = deviceListReport.micList[deviceId];
877
+ }
878
+ NOTICE[operationType]({
879
+ deviceType: 'microphone',
880
+ useDeviceId: deviceId,
881
+ useDeviceName: deviceName,
882
+ deviceList: microPhoneData,
883
+ deviceState,
884
+ changedDeviceId
885
+ });
886
+ }
887
+ };
888
+
889
+ /**
890
+ * @function 获取当前麦克风的音量
891
+ * @return Promise | void
892
+ */
893
+ const getCurrentMicrophoneVolume = () => {
894
+ return callMethod('GetCurrentMicrophoneVolume', {}).then(ret => {
895
+ let volume = 0;
896
+ try {
897
+ volume = Math.round(JSON.parse(ret.msg).microphoneVolume / 255 * 100);
898
+ } catch (e) {
899
+ console.error(`zby-live-sdk: getCurrentMicrophoneVolume ret: ${ret}. error: ${e}`);
900
+ }
901
+ return volume;
902
+ });
903
+ };
904
+
905
+ /**
906
+ * @function 设置当前麦克风的音量
907
+ * @param volume: number 音量值
908
+ * @return Promise | void
909
+ */
910
+ const setCurrentMicrophoneVolume = (volume) => {
911
+ return callMethod('SetCurrentMicrophoneVolume', {
912
+ volume: Math.round(volume / 100 * 255)
913
+ });
914
+ };
915
+
916
+ /**
917
+ * @function 获取当前麦克风ID
918
+ * @return Promise | void
919
+ */
920
+ const getCurrentMicrophoneId = (volume) => {
921
+ return callMethod('GetCurrentMicrophoneId', { volume });
922
+ };
923
+
924
+ // 摄像头相关
925
+ /**
926
+ * @function 打开摄像头
927
+ * @param width: Number
928
+ * @param height: Number
929
+ * @return Promise | void
930
+ */
931
+ const startCamera = (width, height) => {
932
+ return callMethod('StartCamera', { width, height, streamId: localStreamId });
933
+ };
934
+
935
+ /**
936
+ * @function 关闭摄像头
937
+ * @return Promise | void
938
+ */
939
+ const stopCamera = () => {
940
+ return callMethod('StopCamera', { streamId: localStreamId });
941
+ };
942
+
943
+ const openOrCloseCamera = async (operation) => {
944
+ console.log('openOrCloseCamera', operation);
945
+ if (operation) {
946
+ await startCamera();
947
+ } else {
948
+ await stopCamera();
949
+ }
950
+ };
951
+
952
+ /**
953
+ * @function 获取摄像头列表
954
+ * @return Promise | void
955
+ */
956
+ const getCameraDeviceListInternal = () => {
957
+ return callMethod('GetCameraDeviceList', {});
958
+ };
959
+
960
+ /**
961
+ * @function 获取摄像头列表
962
+ * @return Promise | void
963
+ */
964
+ export const getCameraDeviceList = async () => {
965
+ const resp = await getCameraDeviceListInternal();
966
+ let videoListArr = JSON.parse(JSON.parse(resp.msg).DeviceList) || [];
967
+ console.log('getCameraDeviceList from native', videoListArr);
968
+ let videoList = [];
969
+ for (let i = 0, len = videoListArr.length; i < len; i++) {
970
+ let nameForChecking = videoListArr[i].name.toLowerCase();
971
+ let checkIfIsDefaultFromName = (nameForChecking.indexOf('built-in') >= 0) ||
972
+ (nameForChecking.indexOf('builtin') >= 0) ||
973
+ (nameForChecking.indexOf('default') >= 0) ||
974
+ (nameForChecking.indexOf('默认') >= 0) ||
975
+ (nameForChecking.indexOf('默認') >= 0);
976
+ videoList.push({
977
+ deviceId: videoListArr[i].id,
978
+ deviceName: videoListArr[i].name,
979
+ isDefault: videoListArr[i].default || checkIfIsDefaultFromName
980
+ });
981
+ deviceListReport.cameraList[videoListArr[i].szDeviceId] = videoListArr[i].szDeviceName;
982
+ }
983
+ if (!window.zbyAVSDK_device_checker_init) {
984
+ window.zbyAVSDK_device_checker_init = {};
985
+ }
986
+ if (!window.zbyAVSDK_device_checker_init.camera) {
987
+ window.zbyAVSDK_device_checker_init.camera = {};
988
+ }
989
+ window.zbyAVSDK_device_checker_init.camera.hasTest = true;
990
+ window.zbyAVSDK_device_checker_init.camera.list = videoList;
991
+ console.log('getCameraDeviceList', videoList);
992
+ return videoList;
993
+ };
994
+
995
+ /**
996
+ * @function 设置指定视频(当前摄像头)设备
997
+ * @param deviceId: String 视频设备 id,必选
998
+ * @return Promise | void
999
+ */
1000
+ const setCurrentCameraDeviceInternal = (deviceId) => {
1001
+ window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
1002
+ return callMethod('SetCurrentCameraDevice', {
1003
+ streamId: localStreamId,
1004
+ deviceId
1005
+ });
1006
+ };
1007
+
1008
+ /**
1009
+ * @function 指定使用的摄像头
1010
+ * @param deviceId: String 摄像头 id,必选
1011
+ * @param operationType: String 操作类型,可选
1012
+ * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
1013
+ * 不传即是普通的设置摄像头设备的行为
1014
+ * @return Promise | void
1015
+ */
1016
+ const setCameraDevice = async (args) => {
1017
+ defaultApi.writeLog(`setCameraDevice ${JSON.stringify(args)} enableLocalCapture: ${enableLocalCapture}`);
1018
+ if (!enableLocalCapture) {
1019
+ return;
1020
+ }
1021
+ let { deviceId, operationType, deviceState, code, changedDeviceId } = args;
1022
+ let deviceName = '';
1023
+ let cameraData;
1024
+ if (!deviceId) {
1025
+ cameraData = await getCameraDeviceList();
1026
+
1027
+ if (cameraData.length) {
1028
+ let hasGetCamare = false;
1029
+ for (let item of cameraData) {
1030
+ if (item.isDefault) {
1031
+ deviceId = item.deviceId;
1032
+ deviceName = item.deviceName;
1033
+ hasGetCamare = true;
1034
+ break;
1035
+ }
1036
+ }
1037
+ if (!hasGetCamare) {
1038
+ deviceId = cameraData[0].deviceId;
1039
+ deviceName = cameraData[0].deviceName;
1040
+ }
1041
+ } else {
1042
+ deviceId = '';
1043
+ }
1044
+
1045
+ if (deviceId === '') {
1046
+ NOTICE.noDevice({
1047
+ deviceType: 'camera'
1048
+ });
1049
+ }
1050
+ }
1051
+ // deviceId = formatDeviceId(deviceId);
1052
+
1053
+ window.zbyAVSDK_device_checker_init.camera.use = deviceId;
1054
+ window.zbyAVSDK_device_checker_init.camera.name = deviceListReport.cameraList[deviceId];
1055
+ try {
1056
+ dataReport.setDevice({
1057
+ device_type: 1,
1058
+ device_id: deviceId,
1059
+ device_name: deviceListReport.cameraList[deviceId],
1060
+ operationType,
1061
+ fore_state: operationType == 'hotPlug' ? deviceState + 1 : '-'
1062
+ });
1063
+ } catch (e) {
1064
+ };
1065
+
1066
+ await setCurrentCameraDeviceInternal(deviceId);
1067
+
1068
+ if (operationType == 'hotPlug' || operationType == 'deviceError') {
1069
+ if (!cameraData) {
1070
+ cameraData = await getCameraDeviceList();
1071
+ deviceName = deviceListReport.cameraList[deviceId];
1072
+ }
1073
+ NOTICE[operationType]({
1074
+ deviceType: 'camera',
1075
+ useDeviceId: deviceId,
1076
+ useDeviceName: deviceName,
1077
+ deviceList: cameraData,
1078
+ messge: code,
1079
+ deviceState,
1080
+ changedDeviceId
1081
+ });
1082
+ }
1083
+ try {
1084
+ NOTICE.useredCamera({
1085
+ deviceId,
1086
+ deviceName
1087
+ });
1088
+ } catch (e) {
1089
+ };
1090
+ };
1091
+
1092
+ /**
1093
+ * @function 获取当前摄像头 id
1094
+ * @return Promise | void
1095
+ */
1096
+ const GetCurrentCameraId = () => {
1097
+ return callMethod('GetCurrentCameraId', {});
1098
+ };
1099
+
1100
+ // 扬声器相关
1101
+
1102
+ /**
1103
+ * @function 获取扬声器列表
1104
+ * @return Promise | void
1105
+ */
1106
+ const getAudioDeviceList = () => {
1107
+ return callMethod('GetAudioDeviceList', {});
1108
+ };
1109
+
1110
+ /**
1111
+ * @function 获取系统默认的音频设备
1112
+ * @param deviceType: Number 音频设备类型,0 -> 麦克风,1 -> 扬声器,必选
1113
+ * @return Promise | void
1114
+ */
1115
+ const getDefaultAudioDeviceId = (deviceType) => {
1116
+ return callMethod('GetDefaultAudioDeviceId', {
1117
+ deviceType,
1118
+ // 以下两个参数仅仅是为了向下向后兼容,使用 hardcore 即可
1119
+ deviceId: '',
1120
+ deviceIdLength: 0
1121
+ });
1122
+ };
1123
+
1124
+ /**
1125
+ * @function 获取系统默认的扬声器设备 id
1126
+ * @return Promise
1127
+ */
1128
+ const getDefaultSpeaker = () => {
1129
+ return getDefaultAudioDeviceId(1);
1130
+ };
1131
+
1132
+ /**
1133
+ * @function 获取扬声器设备列表
1134
+ * @return Promise | void
1135
+ */
1136
+ const getSpeakerDeviceListInternal = () => {
1137
+ return callMethod('GetSpeakerDeviceList', {});
1138
+ };
1139
+
1140
+ /**
1141
+ * @function 获取扬声器列表
1142
+ * @return Promise | void
1143
+ */
1144
+ const getSpeakerDeviceList = async () => {
1145
+ const resp = await getSpeakerDeviceListInternal();
1146
+ let speakerListArr = JSON.parse(JSON.parse(resp.msg).DeviceList) || [];
1147
+ console.log('getSpeakerDeviceList from native', speakerListArr);
1148
+ let speakerList = [];
1149
+ for (let i = 0, len = speakerListArr.length; i < len; i++) {
1150
+ let nameForChecking = speakerListArr[i].name.toLowerCase();
1151
+ let checkIfIsDefaultFromName = (nameForChecking.indexOf('built-in') >= 0) ||
1152
+ (nameForChecking.indexOf('builtin') >= 0) ||
1153
+ (nameForChecking.indexOf('default') >= 0) ||
1154
+ (nameForChecking.indexOf('默认') >= 0) ||
1155
+ (nameForChecking.indexOf('默認') >= 0);
1156
+ speakerList.push({
1157
+ deviceId: speakerListArr[i].id,
1158
+ deviceName: speakerListArr[i].name,
1159
+ isDefault: speakerListArr[i].default || checkIfIsDefaultFromName
1160
+ });
1161
+ deviceListReport.speakerList[speakerListArr[i].szDeviceId] = speakerListArr[i].szDeviceName;
1162
+ }
1163
+ if (!window.zbyAVSDK_device_checker_init) {
1164
+ window.zbyAVSDK_device_checker_init = {};
1165
+ }
1166
+ if (!window.zbyAVSDK_device_checker_init.speaker) {
1167
+ window.zbyAVSDK_device_checker_init.speaker = {};
1168
+ }
1169
+ window.zbyAVSDK_device_checker_init.speaker.hasTest = true;
1170
+ window.zbyAVSDK_device_checker_init.speaker.list = speakerList;
1171
+ console.log('getSpeakerDeviceList', speakerList);
1172
+ return speakerList;
1173
+ };
1174
+
1175
+ /**
1176
+ * @function 设置指定音频(当前扬声器)设备
1177
+ * @param deviceId: String 音频设备 id,必选
1178
+ * @return Promise | void
1179
+ */
1180
+ const setCurrentSpeakerDevice = (deviceId) => {
1181
+ window.zbyAVSDK_device_checker_init.speaker.use = deviceId;
1182
+ return callMethod('SetCurrentSpeakerDevice', {
1183
+ deviceId
1184
+ });
1185
+ };
1186
+
1187
+ /**
1188
+ * @function 指定扬声器
1189
+ * @param deviceId: String 扬声器 id,必选
1190
+ * @param operationType:String 操作类型,可选
1191
+ * 'device_error' -> 设备出错处理,'plug_and_unplug' -> 热插拔处理,
1192
+ * 不传即是普通的设置扬声器设备的行为
1193
+ * @return Promise | void
1194
+ */
1195
+ const setSpeakerDevice = async (args) => {
1196
+ defaultApi.writeLog(`${JSON.stringify(args)}----setSpeakerDevice-talrtc----`);
1197
+ console.log('hsghsghsg999', args);
1198
+ let { deviceId, operationType, deviceState, code, changedDeviceId } = args;
1199
+ let deviceName = '';
1200
+ let speakerData;
1201
+ if (!deviceId) {
1202
+ speakerData = await getSpeakerDeviceList();
1203
+ console.log('hsghsghsg0', speakerData.length);
1204
+ if (speakerData.length) {
1205
+ let hasGetSpeaker = false;
1206
+ console.log('hsghsghsg1', hasGetSpeaker);
1207
+ for (let item of speakerData) {
1208
+ if (item.isDefault) {
1209
+ deviceId = item.deviceId;
1210
+ deviceName = item.deviceName;
1211
+ hasGetSpeaker = true;
1212
+ break;
1213
+ }
1214
+ }
1215
+ console.log('hsghsghsg2', hasGetSpeaker);
1216
+ if (!hasGetSpeaker) {
1217
+ deviceId = speakerData[0].deviceId;
1218
+ deviceName = speakerData[0].deviceName;
1219
+ console.log('hsghsghsg91', deviceId);
1220
+ }
1221
+ } else {
1222
+ deviceId = '';
1223
+ }
1224
+ if (deviceId === '') {
1225
+ NOTICE.noDevice({
1226
+ deviceType: 'speaker'
1227
+ });
1228
+ }
1229
+ }
1230
+ console.log('hsghsghsg999', deviceId);
1231
+ try {
1232
+ dataReport.setDevice({
1233
+ device_type: 3,
1234
+ device_id: deviceId,
1235
+ device_name: deviceListReport.speakerList[deviceId],
1236
+ operationType,
1237
+ fore_state: operationType == 'hotPlug' ? deviceState + 1 : '-'
1238
+ });
1239
+ window.zbyAVSDK_device_checker_init.speaker.name = deviceListReport.speakerList[deviceId];
1240
+ } catch (e) {
1241
+ console.log(e);
1242
+ };
1243
+ console.log('-> set deviceId:', deviceId);
1244
+ usingAudioDeviceId.speaker = deviceId;
1245
+ await setCurrentSpeakerDevice(deviceId);
1246
+ if (operationType == 'hotPlug' || operationType == 'deviceError') {
1247
+ if (!speakerData) {
1248
+ speakerData = await getSpeakerDeviceList();
1249
+ deviceName = deviceListReport.speakerList[deviceId];
1250
+ }
1251
+ console.log('hsghsghsg99911', deviceId);
1252
+ NOTICE[operationType]({
1253
+ deviceType: 'speaker',
1254
+ useDeviceId: deviceId,
1255
+ useDeviceName: deviceName,
1256
+ deviceList: speakerData,
1257
+ message: code,
1258
+ deviceState,
1259
+ changedDeviceId
1260
+ });
1261
+ }
1262
+ console.log('hsghsghsg999112', deviceId);
1263
+ };
1264
+
1265
+ /**
1266
+ * @function 获取当前扬声器的音量
1267
+ * @returns {Promise<number>}
1268
+ */
1269
+ const getCurrentSpeakerVolume = () => {
1270
+ return callMethod('GetCurrentSpeakerVolume', {}).then(ret => {
1271
+ let volume = 0;
1272
+ try {
1273
+ volume = Math.round(JSON.parse(ret.msg).speakerVolume / 255 * 100);
1274
+ } catch (e) {
1275
+ console.error(`zby-live-sdk: getCurrentSpeakerVolume ret: ${ret}. error: ${e}`);
1276
+ }
1277
+ return volume;
1278
+ });
1279
+ };
1280
+
1281
+ /**
1282
+ * @function 获取当前扬声器ID
1283
+ * @return Promise | void
1284
+ */
1285
+ const getCurrentSpeakerId = () => {
1286
+ return callMethod('GetCurrentSpeakerId', {});
1287
+ };
1288
+
1289
+ /**
1290
+ * @function 是否存在媒体流
1291
+ * @param streamId
1292
+ * @returns {boolean}
1293
+ */
1294
+ export const hasStream = streamId => {
1295
+ return Object.keys(streamIdToPreviewId).includes(streamId);
1296
+ };
1297
+
1298
+
1299
+ /**
1300
+ * @function 获取当前视频流的通道
1301
+ * @param streamId 流 id
1302
+ * @return channelIndex
1303
+ */
1304
+ export const getChannelIndex = (streamId) => {
1305
+ defaultApi.writeLog(`avsdk TALSDK::getChannelIndex streamId: ${streamId} ${JSON.stringify(streamIdToPreviewId)}`);
1306
+ return streamIdToPreviewId[streamId];
1307
+ };
1308
+
1309
+ /**
1310
+ * @function 获取当前视频流所有的通道
1311
+ * @param streamId 流 id
1312
+ * @return channelIndex
1313
+ */
1314
+ export const getAllChannelIndex = () => {
1315
+ defaultApi.writeLog(`avsdk TALSDK::getAllChannelIndex ${JSON.stringify(streamIdToPreviewId)}`);
1316
+ return streamIdToPreviewId;
1317
+ };
1318
+
1319
+
1320
+ /**
1321
+ * @function 设置当前扬声器音量
1322
+ * @param volume: number 音量值
1323
+ * @return Promise | void
1324
+ */
1325
+ const setCurrentSpeakerVolume = (volume) => {
1326
+ return callMethod('SetCurrentSpeakerVolume', {
1327
+ volume: Math.round(volume / 100 * 255)
1328
+ });
1329
+ };
1330
+
1331
+ /**
1332
+ * @function 设置扬声器静音
1333
+ * @param mute: Boolean 必选
1334
+ * @return Promise | void
1335
+ */
1336
+ const setSpeakerDeviceMute = (mute) => {
1337
+ return callMethod('SetSpeakerDeviceMute', {
1338
+ deviceId: usingAudioDeviceId.speaker,
1339
+ mute
1340
+ });
1341
+ };
1342
+
1343
+ /**
1344
+ * @function 获取当前应用程序音量
1345
+ * @return Promise | void
1346
+ */
1347
+ const getSpeakerSimpleVolume = () => {
1348
+ return callMethod('GetSpeakerSimpleVolume', {
1349
+ deviceId: usingAudioDeviceId.speaker
1350
+ }).then(ret => {
1351
+ let volume = 0;
1352
+ try {
1353
+ volume = Math.round(JSON.parse(ret.msg).speakerVolume / 255 * 100);
1354
+ } catch (e) {
1355
+ console.error(`zby-live-sdk: getSpeakerSimpleVolume ret: ${ret}. error: ${e}`);
1356
+ }
1357
+ return volume;
1358
+ });
1359
+ };
1360
+
1361
+ /**
1362
+ * @function 设置当前应用程序音量
1363
+ * @param volume: Number 应用程序音量,必选
1364
+ * @return Promise | void
1365
+ */
1366
+ const setSpeakerSimpleVolume = (volume) => {
1367
+ return callMethod('SetSpeakerSimpleVolume', {
1368
+ deviceId: usingAudioDeviceId.speaker,
1369
+ volume: Math.round(volume / 100 * 255)
1370
+ });
1371
+ };
1372
+
1373
+ /**
1374
+ * @function 设置当前应用程序静音
1375
+ * @param mute: Boolean 是否静音,必选
1376
+ * @return Promise | void
1377
+ */
1378
+ const setSpeakerSimpleMute = (mute) => {
1379
+ return callMethod('SetSpeakerSimpleMute', {
1380
+ deviceId: usingAudioDeviceId.speaker,
1381
+ mute
1382
+ });
1383
+ };
1384
+
1385
+ const enableAudioSpeakerCapture = (capture) => {
1386
+ setSystemAudioLoopback(capture);
1387
+ };
1388
+
1389
+ /**
1390
+ * @function 打开系统声音采集
1391
+ * @enable true|false
1392
+ * @return Promise | void
1393
+ */
1394
+ const setSystemAudioLoopback = (enable) => {
1395
+ isSpeakerCapturing = enable;
1396
+ return callMethod('SetAudioSpeakerCapture', {
1397
+ enable,
1398
+ });
1399
+ };
1400
+
1401
+ /**
1402
+ * @function 录制转推对外接口
1403
+ * @param type: String start/stop 必选
1404
+ * @param targetUrl: String 转推地址(支持 RTMP) 必选
1405
+ * @return Promise | void
1406
+ */
1407
+ const controlCdnStreaming = async (type) => {
1408
+ const targetUrl = 'rtmp://media-push-ali-test.livecloud.eaydu.com/testlive/210926-11?key=75c868672113df0063430e9e47fb2a60&appId=zt10001&bizId=zt10001AI&cdn=1';
1409
+ defaultApi.writeLog(`avsdk TALRTC::controlCdnStreaming targetUrl: ${targetUrl} type: ${type}`);
1410
+ if (type === 'start') {
1411
+ return addPublishRtmpStreamUrl(targetUrl);
1412
+ }
1413
+ if (type === 'stop') {
1414
+ return removePublishStreamUrl(targetUrl);
1415
+ }
1416
+ };
1417
+
1418
+ /**
1419
+ * @function SEI 消息
1420
+ * @param data: String 数据
1421
+ * @param dataSize: Number 数据长度
1422
+ * @param repeatCount: Number 重试次数
1423
+ * @return Promise | void
1424
+ */
1425
+ const sendSEIMsg = (data) => {
1426
+ return callMethod('SendSEIMsg', { data: data, dataSize: data.length, repeatCount: 3 });
1427
+ };
1428
+
1429
+ /**
1430
+ * @function 开始屏幕采集
1431
+ * @return Promise | void
1432
+ */
1433
+ const startScreenCapture = () => {
1434
+ return callMethod('StartScreenCapture', {});
1435
+ };
1436
+
1437
+ /**
1438
+ * @function 停止屏幕采集
1439
+ * @return Promise | void
1440
+ */
1441
+ const stopScreenCapture = () => {
1442
+ return callMethod('StopScreenCapture', {});
1443
+ };
1444
+
1445
+
1446
+ // 推拉流相关
1447
+ /**
1448
+ * @function 开启本地或者远程的视频视图
1449
+ * @param isLocal: Boolean 是否是本地的视频预览,必选
1450
+ * @param streamId: String 要拉取的视频流的 id,可选,只有拉取远程的视频流的时候才是必选的
1451
+ * @param domId: String <video> 标签的 id,可选
1452
+ * 如果传了 domId,就把视频绑定到对应的 <video> 标签上
1453
+ * @return Promise 可从 Promise 中获取 src,Promise.then((src) => {})
1454
+ */
1455
+ const startLocalOrRemotePreview = async (isLocal, streamId, domId, isTeacherNewWindowPreview) => {
1456
+ defaultApi.writeLog(`avsdk TALSDK::startLocalOrRemotePreview streamId: ${streamId}, isLocal: ${isLocal}`);
1457
+ let isUpdateChromeVersion = await util.getChromeVersion();
1458
+ const externalConstraints = {
1459
+ audio: false,
1460
+ video: {
1461
+ mandatory: {
1462
+ chromeMediaSource: 'external',
1463
+ chromeMediaSourceId: `ems://talrtc_ext/${isLocal || isTeacherNewWindowPreview ? -2 : streamId}`
1464
+ }
1465
+ }
1466
+ };
1467
+
1468
+ console.log('externalConstraints', externalConstraints);
1469
+
1470
+ return new Promise((resolve, reject) => {
1471
+ const handleExternalSuccess = (stream) => {
1472
+ defaultApi.writeLog(`${stream}----stream-talrtc----`);
1473
+ stream.oninactive = () => {
1474
+ defaultApi.writeLog('Stream inactive');
1475
+ };
1476
+ const src = isUpdateChromeVersion ? stream : window.URL.createObjectURL(stream);
1477
+ console.log('内核升级', isUpdateChromeVersion, domId, src);
1478
+ if (domId && document.querySelector(domId)) {
1479
+ if (isUpdateChromeVersion) {
1480
+ document.querySelector(domId).srcObject = stream;
1481
+ console.log('内核升级1', domId, document.querySelector(domId), document.querySelector(domId).srcObject, src);
1482
+ } else {
1483
+ console.log('内核升级2');
1484
+ document.querySelector(domId).src = window.URL.createObjectURL(stream);
1485
+ }
1486
+ }
1487
+ defaultApi.writeLog(`-> setVidoeSrc, streamId:${streamId}, domId: ${domId}, src: ${src}`);
1488
+ resolve(src);
1489
+ };
1490
+
1491
+ const handleExternalError = (error) => {
1492
+ if (error.name === 'ConstraintNotSatisfiedError') {
1493
+ console.error('ConstraintNotSatisfiedError');
1494
+ } else if (error.name === 'PermissionDeniedError') {
1495
+ console.error(
1496
+ 'Permissions have not been granted to use your camera and '
1497
+ + 'microphone, you need to allow the page access to your devices in '
1498
+ + 'order for the demo to work.'
1499
+ );
1500
+ }
1501
+ console.error(`getUserMedia error: ${error.name}`, error);
1502
+ if (domId) {
1503
+ document.querySelector(domId).src = '';
1504
+ }
1505
+ reject('');
1506
+ };
1507
+
1508
+ if (navigator.webkitGetUserMedia) {
1509
+ navigator.webkitGetUserMedia(
1510
+ externalConstraints,
1511
+ handleExternalSuccess,
1512
+ handleExternalError
1513
+ );
1514
+ }
1515
+ });
1516
+ };
1517
+
1518
+ /**
1519
+ * @function 开始推流
1520
+ * @param streamId: String 流 id
1521
+ * @return Promise | void
1522
+ */
1523
+ const startPush = async (streamId) => {
1524
+ if (!streamId) {
1525
+ streamId = localStreamId;
1526
+ }
1527
+
1528
+ if (streamId === screenStreamId) {
1529
+ isScreenSharing = true;
1530
+ }
1531
+ setAudioType(4, localStreamId); // 设置采集类型,不用await,避免接口挂起导致未推流
1532
+
1533
+ defaultApi.writeLog(`avsdk TALSDK::startPush streamId: ${streamId}`);
1534
+ try {
1535
+ if (!isFirstHeartBeatReport) {
1536
+ isFirstHeartBeatReport = true;
1537
+ heartBeatDataReport('start');
1538
+ }
1539
+ } catch (error) {}
1540
+ const muteAudio = localStreamMuteStatus.audio;
1541
+ const muteVideo = localStreamMuteStatus.video;
1542
+ return callMethod('StartPush', {
1543
+ streamId,
1544
+ muteAudio,
1545
+ muteVideo,
1546
+ muteDefaultAudio: classMode === 1 || classMode === 2,
1547
+ muteDefaultVideo: classMode === 1 || classMode === 2
1548
+ });
1549
+ };
1550
+
1551
+ /**
1552
+ * @function 停止推流
1553
+ * @param streamId: String 流 id
1554
+ * @return Promise | void
1555
+ */
1556
+ const stopPush = (streamId) => {
1557
+ if (!streamId) {
1558
+ streamId = localStreamId;
1559
+ }
1560
+
1561
+ if (streamId === screenStreamId) {
1562
+ isScreenSharing = false;
1563
+ }
1564
+ defaultApi.writeLog(`avsdk TALSDK::stopPush streamId: ${streamId}`);
1565
+ return callMethod('StopPush', { streamId });
1566
+ };
1567
+
1568
+ /**
1569
+ * @function 不推/推 音频
1570
+ * @param mute: bool
1571
+ * @return Promise | void
1572
+ */
1573
+ const muteLocalAudio = (mute) => {
1574
+ defaultApi.writeLog(`avsdk TALSDK::muteLocalAudio mute: ${mute}`);
1575
+ return callMethod('MuteLocalAudio', { mute }).then((...args) => {
1576
+ localStreamMuteStatus.audio = mute;
1577
+ return args;
1578
+ });
1579
+ };
1580
+
1581
+ /**
1582
+ * @function 不推/推 视频
1583
+ * @param mute: bool
1584
+ * @return Promise | void
1585
+ */
1586
+ const muteLocalVideo = (mute, streamId) => {
1587
+ defaultApi.writeLog(`avsdk TALSDK::muteLocalVideo mute: ${mute}`);
1588
+ return callMethod('MuteLocalVideo', { mute, streamId }).then((...args) => {
1589
+ localStreamMuteStatus.video = mute;
1590
+ return args;
1591
+ });
1592
+ };
1593
+
1594
+ /**
1595
+ * @function 开始转推本地摄像头 TALRTC 流至 RTMP。目前只支持转推 1 路
1596
+ * @return Promise | void
1597
+ */
1598
+ const addPublishRtmpStreamUrl = (url) => {
1599
+ defaultApi.writeLog(`avsdk TALSDK::addPublishRtmpStreamUrl url: ${url}`);
1600
+ return callMethod('AddPublishRtmpStreamUrl', { url });
1601
+ };
1602
+
1603
+ /**
1604
+ * @function 停止转推 RTMP
1605
+ * @return Promise | void
1606
+ */
1607
+ const removePublishStreamUrl = (url) => {
1608
+ defaultApi.writeLog(`avsdk TALSDK::removePublishStreamUrl url: ${url}`);
1609
+ return callMethod('RemovePublishStreamUrl', { url });
1610
+ };
1611
+
1612
+ /**
1613
+ * @function 开始拉流
1614
+ * @param {string} streamId 流 id
1615
+ * @return {Promise} Promise | void
1616
+ */
1617
+ const startPlay = (streamId, muteAudio, muteVideo) => {
1618
+ if (typeof muteAudio === 'undefined') {
1619
+ muteAudio = getStreamMuteStatus(streamId, 'audio');
1620
+ }
1621
+
1622
+ if (typeof muteVideo === 'undefined') {
1623
+ muteVideo = getStreamMuteStatus(streamId, 'video');
1624
+ }
1625
+ defaultApi.writeLog(`avsdk TALSDK::startPlay streamId: ${streamId}, muteAudio: ${muteAudio}, muteVideo: ${muteVideo}`);
1626
+ return callMethod('StartPlay', {
1627
+ myStreamId: localStreamId,
1628
+ streamId,
1629
+ muteAudio,
1630
+ muteVideo,
1631
+ muteDefaultAudio: classMode === 1 || classMode === 2,
1632
+ muteDefaultVideo: classMode === 1 || classMode === 2
1633
+ }).then((...args) => {
1634
+ if (args[0].code !== 0 && streamIdToPreviewId[streamId]) {
1635
+ console.log('retry startPlay----', streamId);
1636
+ return startPlay(streamId, muteAudio, muteVideo);
1637
+ }
1638
+ setStreamMuteStatus(streamId, 'audio', muteAudio);
1639
+ setStreamMuteStatus(streamId, 'video', muteVideo);
1640
+ return args;
1641
+ });
1642
+ };
1643
+
1644
+ /**
1645
+ * @function 初始化拉流
1646
+ * @param streamId:String 从传来的信道消息中获取,必选
1647
+ * @param domId:String <video> 标签的 id,可选
1648
+ * 如果传了就把视频绑定到对应的 <video> 标签上
1649
+ * @param pInfo:String 多媒体流附加信息,可选,默认为 'none'
1650
+ * @return src:String 视频预览地址
1651
+ */
1652
+ export const initPullFlow = async (streamId, domId, mute, pInfo, notAutoPlay, audioOnly) => {
1653
+ defaultApi.writeLog(`TALRTC::initPullFlow_start , streamId :${streamId}, notAutoPlay: ${notAutoPlay}, audioOnly: ${audioOnly}`);
1654
+ let playChannel;
1655
+ resetStreamIdRtcPlayerInfo1(streamId);
1656
+ resetStreamIdRtcPlayerInfo(streamId);
1657
+ try {
1658
+ if (!isFirstHeartBeatReport) {
1659
+ isFirstHeartBeatReport = true;
1660
+ heartBeatDataReport('start');
1661
+ }
1662
+ } catch (error) {}
1663
+ if (streamIdToPreviewId[streamId] == undefined) {
1664
+
1665
+ defaultApi.writeLog(`TALRTC:: The streamId queue does not contain this streamId ${uiChnIndexs}`);
1666
+ playChannel = uiChnIndexs.pop();
1667
+ streamIdToPreviewId[streamId] = playChannel;
1668
+ // console.log('streamIdIsNoExited',playChannel,streamIdToPreviewId[streamId],uiChnIndexs);
1669
+ } else {
1670
+ defaultApi.writeLog(`TALRTC::The streamId queue contains this streamId: ${streamId}`);
1671
+ playChannel = streamIdToPreviewId[streamId];
1672
+ // console.log('streamIdIsExited',playChannel,streamIdToPreviewId[streamId],uiChnIndexs);
1673
+ await stopPlay(streamId);
1674
+ resetStreamIdRtcPlayerInfo(streamId);
1675
+ }
1676
+ // 判断是否创建或者获取播放通道成功,不成功就删掉重试
1677
+ if (playChannel == undefined) {
1678
+ delete streamIdToPreviewId[streamId];
1679
+ defaultApi.writeLog('error', 'TALRTC:: uiChnIndex is not exist');
1680
+ return initPullFlow(streamId, domId, mute, pInfo, notAutoPlay, audioOnly);
1681
+ }
1682
+ previewIdToStreamId[playChannel] = streamId;
1683
+ /**
1684
+ * 将 muteRemote 参数耦合进startPlay,减少异步调用,降低黑屏概率
1685
+ */
1686
+ // 是否拉取声音
1687
+ // await muteRemoteAudio(streamId, !!mute);
1688
+ // // 是否拉取画面
1689
+ // await muteRemoteVideo(streamId, !!audioOnly);
1690
+ const videoSrc = await startLocalOrRemotePreview(false, streamId, domId);
1691
+
1692
+ if (!notAutoPlay) {
1693
+ await startPlay(streamId, !!mute, !!audioOnly);
1694
+ }
1695
+ return {
1696
+ videoSrc,
1697
+ playerId: playChannel
1698
+ };
1699
+ };
1700
+
1701
+
1702
+
1703
+ /**
1704
+ * @function 开始连麦
1705
+ * @param mode: number 采集器默认, 默认0=站立 1=坐立
1706
+ * @return Promise | void
1707
+ */
1708
+ export const teacherStartLinkMic = async (mode = 0) => {
1709
+ await startMicrophone();
1710
+ if (mode === 0) {
1711
+ await muteLocalVideo(true, localStreamId);
1712
+ await setMicrophoneDevice({});
1713
+ // await muteStreamAudio(false);
1714
+ await muteLocalAudio(false);
1715
+ } else {
1716
+ // await muteLocalVideo(false, localStreamId);
1717
+ await muteLocalAudio(false);
1718
+ }
1719
+ };
1720
+
1721
+
1722
+ /**
1723
+ * @function 结束连麦
1724
+ * @param mode:number 采集器默认, 默认0=站立 1=坐立
1725
+ * @return Promise | void
1726
+ */
1727
+ export const teacherStopLinkMic = async (mode = 0) => {
1728
+ await stopMicrophone();
1729
+ if (mode === 0) {
1730
+ await muteLocalAudio(true);
1731
+ } else {
1732
+ await setSystemAudioLoopback(false);
1733
+ }
1734
+ };
1735
+
1736
+ /**
1737
+ * @function 停止拉流
1738
+ * @param streamId: String 流 id
1739
+ * @return Promise | void
1740
+ */
1741
+ const stopPlay = (streamId, recovery) => {
1742
+ const id = streamIdToPreviewId[streamId];
1743
+ delete streamIdRtcPlayerInfo[streamId];
1744
+ if (recovery && id && !uiChnIndexs.includes(id)) {
1745
+ uiChnIndexs.push(id);
1746
+ defaultApi.writeLog(`avsdk TALRTC::stopPlayStream streamId: ${streamId} id: ${id} uiChnIndexs: ${uiChnIndexs}`);
1747
+ delete streamIdToPreviewId[streamId];
1748
+ }
1749
+ return callMethod('StopPlay', { streamId });
1750
+ };
1751
+
1752
+
1753
+ /**
1754
+ * @function 停止拉所有的流
1755
+ * @return Promise | void
1756
+ */
1757
+ const stopPlayAll = () => {
1758
+ return callMethod('StopPlayAll', {});
1759
+ };
1760
+
1761
+ /**
1762
+ * @function 切换播放流
1763
+ * @param streamId: String 需要停止流的 id,必选
1764
+ * @param toStreamId: String 被拉取的流的 id,必选
1765
+ * @param toDomId: String <video> 标签的 id,可选
1766
+ * @return Promise | void
1767
+ */
1768
+ const changePullFlow = async (streamId, toStreamId, toDomId) => {
1769
+ await stopPlay(streamId);
1770
+ return initPullFlow(toStreamId, toDomId);
1771
+ };
1772
+
1773
+ /**
1774
+ * @function 是否拉取音频流--通过播放器id控制
1775
+ * @param mute: Boolean,必选,true -> 不拉取,false -> 拉取,必选
1776
+ * @param playerId: Number,必选,播放器 id
1777
+ * @return Promise | void
1778
+ */
1779
+ const pullAudioFlow = (playerId, mute, streamid) => {
1780
+ defaultApi.writeLog(`pullAudioFlow -- playerId ${playerId} operation ${mute} streamId ${streamid}`);
1781
+ if(streamid){
1782
+ if(streamIdToPreviewId[streamid] == undefined) {
1783
+ NOTICE.pullAudioFlowError({streamid})
1784
+ return
1785
+ }else{
1786
+ playerId = streamIdToPreviewId[streamid]
1787
+ console.log('pullAudioFlow-playerId',playerId,streamIdToPreviewId)
1788
+ }
1789
+ }
1790
+ let streamId = previewIdToStreamId[playerId];
1791
+ try {
1792
+ streamIdRtcPlayerInfo1[streamId].audio_type = !mute;
1793
+ console.log('pullAudioFlow ::previewIdToStreamId', streamId, previewIdToStreamId);
1794
+ console.log('pullAudioFlow ::streamIdRtcPlayerInfo1', streamId, streamIdRtcPlayerInfo1);
1795
+ } catch (e) {
1796
+ console.log('pullAudioFlow ::streamIdRtcPlayerInfo1--error', e);
1797
+ }
1798
+ try {
1799
+ dataReport.setPullVoice({
1800
+ code: +!mute,
1801
+ pull_uid: util.getUidByStreamId(streamId),
1802
+ pull_streamid: streamId,
1803
+ playerId
1804
+ // operator:'client'
1805
+ });
1806
+ } catch (e) { };
1807
+ return muteRemoteAudio(streamId, mute);
1808
+ };
1809
+
1810
+ /**
1811
+ * @function 设置镜像状态(预览)
1812
+ * @param { boolean } type true=开启 false=关闭
1813
+ * @return: Promise
1814
+ */
1815
+ const setMirrorStatus = (isMirror) => {
1816
+ return callMethod('SetMirrorStatus', { isMirror, streamId: localStreamId });
1817
+ };
1818
+
1819
+ /**
1820
+ * @function 设置拉流镜像状态
1821
+ * @param { boolean } type true=开启 false=关闭
1822
+ * @return: Promise
1823
+ */
1824
+ const setPlayViewMirror = (isMirror, id) => {
1825
+ return callMethod('SetPlayViewMirror', { isMirror, streamId: id });
1826
+ };
1827
+
1828
+ /**
1829
+ * @function 开启预览
1830
+ * @param idx: Number 推流通道,可选,默认为 0
1831
+ * @return Promise | void
1832
+ */
1833
+ const startPreview = (idx) => {
1834
+ if (typeof idx === 'undefined') {
1835
+ idx = 0;
1836
+ }
1837
+ return callMethod('StartPreview', {
1838
+ idx
1839
+ });
1840
+ };
1841
+
1842
+ /**
1843
+ * @function 停止预览
1844
+ * @param idx: Number 推流通道,可选,默认为 0
1845
+ * @return Promise | void
1846
+ */
1847
+ const stopPreview = (idx) => {
1848
+ if (typeof idx === 'undefined') {
1849
+ idx = 0;
1850
+ }
1851
+ return callMethod('StopPreview', {
1852
+ idx
1853
+ });
1854
+ };
1855
+
1856
+ /**
1857
+ * @function 不拉/拉 音频
1858
+ * @param streamId: string
1859
+ * @param mute: bool
1860
+ * @return Promise | void
1861
+ */
1862
+ const muteRemoteAudio = (streamId, mute) => {
1863
+ return callMethod('MuteRemoteAudio', { streamId, mute }).then((...args) => {
1864
+ setStreamMuteStatus(streamId, 'audio', mute);
1865
+ return args;
1866
+ });
1867
+ };
1868
+
1869
+ /**
1870
+ * @function 不拉/拉 视频
1871
+ * @param streamId: string
1872
+ * @param mute: bool
1873
+ * @return Promise | void
1874
+ */
1875
+ const muteRemoteVideo = (streamId, mute) => {
1876
+ return callMethod('MuteRemoteVideo', { streamId, mute }).then((...args) => {
1877
+ setStreamMuteStatus(streamId, 'video', mute);
1878
+ return args;
1879
+ });
1880
+ };
1881
+ /**
1882
+ * @function 设置外部采集设备模块
1883
+ * @param enable 是否开启外部采集设备模块
1884
+ * @attention 必须在 InitSDK 前调用,置空必须在UninitSDK之后
1885
+ * @return Promise | void
1886
+ */
1887
+ const enableExternalVideoSource = (enable = false, streamId = localStreamId) => {
1888
+ defaultApi.writeLog('info', 'avsdk TALRTC::EnableExternalVideoSource');
1889
+ return callMethod('EnableExternalVideoSource', {enable, streamId});
1890
+ };
1891
+
1892
+ //设置音频数据来源
1893
+ const setAudioAuxSource = (source) => {
1894
+ defaultApi.writeLog('info', 'avsdk TALRTC::setAudioAuxSource');
1895
+ return callMethod('SetAudioAuxSource', {
1896
+ source
1897
+ });
1898
+ };
1899
+
1900
+ /**
1901
+ * RTC流音频类型
1902
+ * @param {number} type 音频类型 0:静音 / 1:麦克风 / 2:拉流的声音 / 3:1+2 / 4:麦克风+扬声器 5: 扬声器
1903
+ * @param {string} streamId 流
1904
+ * @returns {Promise<void>}
1905
+ */
1906
+ const setAudioType = (type, streamId) => {
1907
+ defaultApi.writeLog('info', 'avsdk TALRTC::SetAudioType');
1908
+ if (!streamId) {
1909
+ streamId = screenStreamId;
1910
+ }
1911
+ return callMethod('SetAudioType', {
1912
+ streamId,
1913
+ type
1914
+ });
1915
+ };
1916
+
1917
+ //开启音频外部采集 参数:channel 参数类型:int 通道(传1使用辅通道)
1918
+ const startAudioExCapture = (channel) => {
1919
+ defaultApi.writeLog('info', 'avsdk TALRTC::startAudioExCapture');
1920
+ return callMethod('StartAudioExCapture', {
1921
+ channel
1922
+ });
1923
+ };
1924
+ /**
1925
+ * @function 加载采集插件的输出数据 //采集屏幕并且拼接了连麦学生头像的数据
1926
+ * @returns {Promise}
1927
+ */
1928
+ export const loadCollectionOutputEntry = () => {
1929
+ return callMethod('LoadCollectionOutputEntry', {
1930
+ streamId: localStreamId
1931
+ });
1932
+ };
1933
+ /**
1934
+ * @function 加载采集插件的输出数据 //只采集屏幕数据
1935
+ * @returns {Promise}
1936
+ */
1937
+ const LoadCollectionOutputEntry2 = () => {
1938
+ defaultApi.writeLog('info', 'avsdk TALRTC::LoadCollectionOutputEntry2');
1939
+ return callMethod('LoadCollectionOutputEntry2', {
1940
+ streamId: screenStreamId
1941
+ });
1942
+ };
1943
+
1944
+ //开启同屏
1945
+ const startMultiScreen = async () => {
1946
+ defaultApi.writeLog('info', 'avsdk TALRTC::startMultiScreen');
1947
+ // await enableExternalVideoSource(true,1);
1948
+ await LoadCollectionOutputEntry2();
1949
+ };
1950
+
1951
+ /**
1952
+ * 音频增强调到最高优先级
1953
+ * @param {string} streamId
1954
+ * @param {string} uid
1955
+ * @param {boolean} add
1956
+ * @returns
1957
+ */
1958
+ const setChiefAudioStream = async (streamId, uid, add = true) => {
1959
+ defaultApi.writeLog('info', 'avsdk TALRTC::SetChiefAudioStream');
1960
+ return callMethod('SetChiefAudioStream', {
1961
+ streamId,
1962
+ uid,
1963
+ add
1964
+ });
1965
+ };
1966
+
1967
+ /**
1968
+ * talrtc40路拉流混音接口
1969
+ * @param {number} mixMode //混流模式 0=关闭混流 1=开启混流
1970
+ * @param {Array<string>} streamIdArr //表示要突出声音的channel
1971
+ * @returns {Promise}
1972
+ */
1973
+ export const setAudioMixMode = async (mixMode, streamIdArr) => {
1974
+ // let channelsArr = [];
1975
+ // if(streamIdArr.length > 0){
1976
+ // streamIdArr.forEach( item => {
1977
+ // channelsArr.push(getChannelIndex(item));
1978
+ // });
1979
+ // }
1980
+ if (!Array.isArray(streamIdArr)) return;
1981
+
1982
+ return callMethod('SetAudioMixMode', {mixMode: mixMode, streamIdArr, num: streamIdArr.length});
1983
+ };
1984
+
1985
+
1986
+ /**
1987
+ * @function 离开教室
1988
+ * @return Promise | void
1989
+ */
1990
+ const leaveRoom = async () => {
1991
+ for (let key in streamIdToPreviewId) {
1992
+ await stopPlay(key);//huishou
1993
+ }
1994
+
1995
+ if (screenStreamId) {
1996
+ screenStreamId = null;
1997
+ }
1998
+
1999
+ await stopPush(localStreamId);
2000
+
2001
+ if (isSpeakerCapturing) {
2002
+ await setSystemAudioLoopback(false);
2003
+ }
2004
+ if(window.zby_sdk_init_params.role === 'teacher' && window.zby_sdk_init_params.mode === 1){
2005
+ //坐立模式关闭外部采集器,站立关闭外部采集器
2006
+ await enableExternalVideoSource(false);
2007
+ }
2008
+ localStreamId = null;
2009
+ removerListener();
2010
+ await destroyEngine();
2011
+ heartBeatDataReport('stop');
2012
+ // isFirstHeartBeatReport = false;
2013
+ streamIdToPreviewId = {};
2014
+ previewIdToStreamId = {};
2015
+ streamIdRtcPlayerInfo = {};
2016
+ streamIdRtcPlayerInfo1 = {};
2017
+ uiChnIndexs.length = 100;
2018
+ uiChnIndexs.fill(0).forEach((value, index, array) => {
2019
+ array[index] = array.length - (index + 1);
2020
+ });
2021
+ };
2022
+
2023
+
2024
+ /**
2025
+ * @function 卸载 Talrtc 扩展
2026
+ * @return Promise | void
2027
+ */
2028
+ export const unloadTalrtc = () => {
2029
+ // EM 是寄宿于端的,浏览器中并不存在,为防止报错需要先进行能力检测
2030
+ if (EM) {
2031
+ return new Promise((resolve, reject) => {
2032
+ EM.UnLoad(
2033
+ extensionId,
2034
+ (code, msg) => {
2035
+ defaultApi.writeLog(`unloadTalrtc Code: ${code}\nMessage: ${msg}`);
2036
+ resolve();
2037
+ }
2038
+ );
2039
+ });
2040
+ }
2041
+ };
2042
+
2043
+
2044
+ const heartBeatRealKeys = ['video_fps', 'video_bitrate', 'audio_fps', 'audio_bitrate'];
2045
+
2046
+ const _heartBeatDataReport = () => {
2047
+ console.log('上报吧2');
2048
+ // let cpuRate = 0;
2049
+ // let memRate = 0;
2050
+ // let rateCount = 0;
2051
+ // let appCpuRate = 0;
2052
+ // let appMemUsed = 0;
2053
+ // let rateTimer = setInterval(async () => {
2054
+ // rateCount++;
2055
+ // let {cpu_rate, mem_rate, gpus, app_cpu_rate, app_mem_used} = (await toolApi.getCurCpuMemInfo()).msg;
2056
+ // cpu_rate = cpu_rate < 0 ? 0 : cpu_rate;
2057
+ // cpuRate += parseFloat(cpu_rate);
2058
+ // memRate += parseFloat(mem_rate);
2059
+ // if (window.zbyAVSDK_init_params.zego.role === 'student') {
2060
+ // appCpuRate += parseFloat(app_cpu_rate);
2061
+ // appMemUsed += parseFloat(app_mem_used);
2062
+ // }
2063
+
2064
+ // if (rateCount >= 3) {
2065
+ // heartBeatRealKeys.forEach(realKey => {
2066
+ // if (heartBeatDataReportObj.hasOwnProperty(realKey) && heartBeatDataReportObj.count > 0) {
2067
+ // heartBeatDataReportObj[realKey] = util.toFixed(heartBeatDataReportObj[realKey]/heartBeatDataReportObj.count);
2068
+ // }
2069
+ // });
2070
+ const pullInfo = [];
2071
+ console.log('拉流的类型1',streamIdRtcPlayerInfo);
2072
+ Object.keys(streamIdRtcPlayerInfo).forEach(streamid => {
2073
+ console.log('拉流的类型1.5');
2074
+ heartBeatRealKeys.forEach(realKey => {
2075
+ if (!streamIdRtcPlayerInfo[streamid].hasOwnProperty(realKey)) {
2076
+ streamIdRtcPlayerInfo[streamid][realKey] = [];
2077
+ }
2078
+ // if (streamIdRtcPlayerInfo[streamid].count > 0) {
2079
+ // streamIdRtcPlayerInfo[streamid][realKey] = util.toFixed(streamIdRtcPlayerInfo[streamid][realKey]/streamIdRtcPlayerInfo[streamid].count);
2080
+ // }
2081
+ });
2082
+ console.log('拉流的类型2',streamIdRtcPlayerInfo1[streamid]);
2083
+ //获取拉流类型,后期可写为函数提出去
2084
+ if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2085
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'both';
2086
+ } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2087
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'video';
2088
+ } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
2089
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'audio';
2090
+ } else {
2091
+ streamIdRtcPlayerInfo1[streamid].stream_type = 'none';
2092
+ }
2093
+ // console.log('hsghsghsg_type_type', streamIdRtcPlayerInfo1[streamid].stream_type);
2094
+
2095
+ pullInfo.push({
2096
+ streamid,
2097
+ // uid: util.getUidByStreamId(streamid),
2098
+ ...streamIdRtcPlayerInfo[streamid],
2099
+ pull_type: streamIdRtcPlayerInfo1[streamid].stream_type,
2100
+ volume: streamIdRtcPlayerInfo[streamid].volume.slice(0,streamIdRtcPlayerInfo[streamid].volume.length-1)
2101
+ });
2102
+ resetStreamIdRtcPlayerInfo(streamid);
2103
+ });
2104
+ if (isFirstHeartBeatReport) {
2105
+ try {
2106
+ //静音推流时过滤掉音频帧率和码率,上报为0
2107
+ // if (!isNoticeMicVolumeZego) {
2108
+ // heartBeatDataReportObj['audio_fps'] = [];
2109
+ // heartBeatDataReportObj['audio_bitrate'] = [];
2110
+ // }
2111
+
2112
+ //获取推流类型,后期可写为函数提出去
2113
+ if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2114
+ zbysdk.deviceStatus.stream_type = 'both';
2115
+ } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2116
+ zbysdk.deviceStatus.stream_type = 'audio';
2117
+ } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
2118
+ zbysdk.deviceStatus.stream_type = 'video';
2119
+ } else {
2120
+ zbysdk.deviceStatus.stream_type = 'none';
2121
+ };
2122
+ // console.log('push_type222',zbysdk.deviceStatus,zbysdk.deviceStatus.stream_type);
2123
+ defaultApi.writeLog(`push_type_talrtc,camera: ${zbysdk.deviceStatus.camera},microphone: ${zbysdk.deviceStatus.microphone},type: ${zbysdk.deviceStatus.stream_type},a_fps: ${dataTalrtcCapture.fps}, a_bit: ${dataTalrtcCapture.audioBitrate}, v_fps: ${dataTalrtcCapture.fps}, v_bit: ${dataTalrtcCapture.videoBitrate}`);
2124
+ // if (window.zbyAVSDK_init_params.zego.role === 'teacher') {
2125
+ dataReport.heartbeat({
2126
+ ...{...heartBeatDataReportObj, push_type: zbysdk.deviceStatus.stream_type, volume: heartBeatDataReportObj.volume.slice(0,heartBeatDataReportObj.volume.length-1)},
2127
+ // pull_info: JSON.stringify(pullInfo),
2128
+ pull_info: pullInfo,
2129
+ // cpu_rate: util.toFixed(cpuRate/rateCount),
2130
+ // mem_rate: util.toFixed(memRate/rateCount),
2131
+ });
2132
+ // } else {
2133
+ // dataReport.heartbeat({
2134
+ // ...heartBeatDataReportObj,
2135
+ // pull_info: JSON.stringify(pullInfo),
2136
+ // cpu_rate: util.toFixed(cpuRate/rateCount),
2137
+ // mem_rate: util.toFixed(memRate/rateCount),
2138
+ // app_cpu: util.toFixed(appCpuRate/rateCount),
2139
+ // app_mem: util.toFixed(appMemUsed/rateCount),
2140
+ // video_mem: gpus
2141
+ // });
2142
+ // }
2143
+ } catch (e) {
2144
+ console.log(e);
2145
+ }
2146
+ }
2147
+ resetHeartBeatDataReportObj();
2148
+ // cpuRate = 0;
2149
+ // memRate = 0;
2150
+ // appCpuRate = 0;
2151
+ // appMemUsed = 0;
2152
+
2153
+ // clearInterval(rateTimer);
2154
+ // }
2155
+ // }, 10 * 1000);
2156
+ };
2157
+
2158
+ const heartBeatDataReport = (type) => {
2159
+ try {
2160
+ if (type === 'start' && !heartBeatDataReportTimer) {
2161
+ console.log('start heart beat report');
2162
+ _heartBeatDataReport();
2163
+ heartBeatDataReportTimer = setInterval(() => {
2164
+ _heartBeatDataReport();
2165
+ }, 30 * 1000);
2166
+ }
2167
+ if (type === 'stop') {
2168
+ clearInterval(heartBeatDataReportTimer);
2169
+ heartBeatDataReportTimer = null;
2170
+ }
2171
+ } catch (error) {
2172
+ console.log(error);
2173
+ }
2174
+ };
2175
+ const lastStreamReportTimestamps = {};
2176
+
2177
+ const heartBeatDataReportCalc = (name, _data) => {
2178
+ console.log('hsgmzk111',name,_data);
2179
+ let _d = JSON.parse(_data.stats);
2180
+ console.log('hsgmzk222',name,_d);
2181
+
2182
+ if (lastStreamReportTimestamps[_d.streamId] && Date.now() - lastStreamReportTimestamps[_d.streamId] < 4999) {
2183
+ return;
2184
+ } else {
2185
+ lastStreamReportTimestamps[_d.streamId] = Date.now();
2186
+ }
2187
+ // 拉流
2188
+ const pullKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
2189
+ if (name === 'onRemoteStatistics') {
2190
+ console.log('hsgmzk222',streamIdRtcPlayerInfo,_d.streamId);
2191
+ if (streamIdRtcPlayerInfo && streamIdRtcPlayerInfo.hasOwnProperty(_d.streamId)) {
2192
+ console.log('hsgmzk22211');
2193
+ let streamid = _d.streamId;
2194
+ let isReport = true;
2195
+ // streamIdRtcPlayerInfo[streamid].count++;
2196
+ heartBeatRealKeys.forEach((realKey, index) => {
2197
+ if (_d.hasOwnProperty(pullKeys[index])) {
2198
+ if (streamIdRtcPlayerInfo[streamid][realKey] === undefined) {
2199
+ streamIdRtcPlayerInfo[streamid][realKey] = [];
2200
+ isReport = false;
2201
+ }
2202
+ // streamIdRtcPlayerInfo[streamid][realKey].push(parseFloat(parseInt(item[pullKeys[index]])));
2203
+ }
2204
+ });
2205
+ console.log('hsgmzk333',_d);
2206
+ if (isReport) {
2207
+ let audio_fps_talrtc_pull = 0;
2208
+ let audio_bitrate_talrtc_pull = 0;
2209
+ let video_fps_talrtc_pull = 0;
2210
+ let video_bitrate_talrtc_pull = 0;
2211
+ console.log('hsgmzk444',_d);
2212
+ if(streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2213
+ console.log('hsgmzk555',_d);
2214
+ audio_fps_talrtc_pull = parseFloat(parseInt(_d.afps));
2215
+ audio_bitrate_talrtc_pull = parseFloat(parseInt(_d.audioBitrate));
2216
+ video_fps_talrtc_pull = parseFloat(parseInt(_d.fps));
2217
+ video_bitrate_talrtc_pull = parseFloat(parseInt(_d.videoBitrate));
2218
+ } else if(!streamIdRtcPlayerInfo1[streamid].audio_type && streamIdRtcPlayerInfo1[streamid].video_type) {
2219
+ video_fps_talrtc_pull = parseFloat(parseInt(_d.fps));
2220
+ video_bitrate_talrtc_pull = parseFloat(parseInt(_d.videoBitrate));
2221
+ } else if(streamIdRtcPlayerInfo1[streamid].audio_type && !streamIdRtcPlayerInfo1[streamid].video_type) {
2222
+ audio_fps_talrtc_pull = parseFloat(parseInt(_d.afps));
2223
+ audio_bitrate_talrtc_pull = parseFloat(parseInt(_d.audioBitrate));
2224
+ }
2225
+ streamIdRtcPlayerInfo[streamid].audio_fps.push(audio_fps_talrtc_pull);
2226
+ streamIdRtcPlayerInfo[streamid].audio_bitrate.push(audio_bitrate_talrtc_pull);
2227
+ streamIdRtcPlayerInfo[streamid].video_fps.push(video_fps_talrtc_pull);
2228
+ streamIdRtcPlayerInfo[streamid].video_bitrate.push(video_bitrate_talrtc_pull);
2229
+ streamIdRtcPlayerInfo[streamid].pull_loss.push(_d.packetLoss);
2230
+ streamIdRtcPlayerInfo[streamid].pull_delay.push(_d.rtt);
2231
+
2232
+ streamIdRtcPlayerInfo[streamid].ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
2233
+ streamIdRtcPlayerInfo[streamid].video_ifg += _d.videoBlockRate;
2234
+ streamIdRtcPlayerInfo[streamid].audio_ifg += _d.audioBlockRate;
2235
+ // console.log('hsg_tpull_keys',JSON.parse(JSON.stringify(streamIdRtcPlayerInfo[streamid])),_d.avTimestampDiff);
2236
+ console.log('streamIdRtcPlayerInfo::: ', streamIdRtcPlayerInfo);
2237
+ }
2238
+ }
2239
+ }
2240
+ // 推流
2241
+ const pushKeys = ['fps', 'videoBitrate', 'afps', 'audioBitrate'];
2242
+ if (name === 'onLocalStatistics') {
2243
+ // console.log('hsgmzk',_d);
2244
+ // heartBeatDataReportObj.count++;
2245
+ // heartBeatRealKeys.forEach((realKey, index) => {
2246
+ // if (heartBeatDataReportObj.hasOwnProperty(realKey) && _d.hasOwnProperty(pushKeys[index])) {
2247
+ // heartBeatDataReportObj[realKey].push(parseFloat(parseInt(_d[pushKeys[index]])));
2248
+ // }
2249
+ // });
2250
+ let audio_fps_talrtc_push = 0;
2251
+ let audio_bitrate_talrtc_push = 0;
2252
+ let video_fps_talrtc_push = 0;
2253
+ let video_bitrate_talrtc_push = 0;
2254
+ dataTalrtcCapture =_d;
2255
+
2256
+ if(zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2257
+ audio_fps_talrtc_push = parseFloat(parseInt(_d.afps));
2258
+ audio_bitrate_talrtc_push = parseFloat(parseInt(_d.audioBitrate));
2259
+ video_fps_talrtc_push = parseFloat(parseInt(_d.fps));
2260
+ video_bitrate_talrtc_push= parseFloat(parseInt(_d.videoBitrate));
2261
+ } else if(!zbysdk.deviceStatus.camera && zbysdk.deviceStatus.microphone) {
2262
+ audio_fps_talrtc_push = parseFloat(parseInt(_d.afps));
2263
+ audio_bitrate_talrtc_push = parseFloat(parseInt(_d.audioBitrate));
2264
+ } else if(zbysdk.deviceStatus.camera && !zbysdk.deviceStatus.microphone) {
2265
+ video_fps_talrtc_push = parseFloat(parseInt(_d.fps));
2266
+ video_bitrate_talrtc_push = parseFloat(parseInt(_d.videoBitrate));
2267
+ }
2268
+ heartBeatDataReportObj.audio_fps.push(audio_fps_talrtc_push);
2269
+ heartBeatDataReportObj.audio_bitrate.push(audio_bitrate_talrtc_push);
2270
+ heartBeatDataReportObj.video_fps.push(video_fps_talrtc_push);
2271
+ heartBeatDataReportObj.video_bitrate.push(video_bitrate_talrtc_push);
2272
+ heartBeatDataReportObj.push_loss.push(_d.packetLoss);
2273
+ heartBeatDataReportObj.push_delay.push(_d.rtt);
2274
+ heartBeatDataReportObj.ctime.push(Math.round((new Date().getTime()+dataReport.timestamp)/1000));
2275
+ }
2276
+ };
2277
+
2278
+ //推流字段
2279
+ const resetHeartBeatDataReportObj = () => {
2280
+ console.log('resetHeartBeatDataReportObj重置');
2281
+ heartBeatDataReportObj = {
2282
+ ctime: [],
2283
+ push_type: 'none',
2284
+ video_fps: [],
2285
+ video_bitrate: [],
2286
+ audio_fps: [],
2287
+ audio_bitrate: [],
2288
+ push_loss: [],
2289
+ push_delay: [],
2290
+ volume: ''
2291
+ };
2292
+ };
2293
+
2294
+ resetHeartBeatDataReportObj();
2295
+ //记录拉流类型,用作数据上报
2296
+ const resetStreamIdRtcPlayerInfo1 = (streamId) => {
2297
+ streamIdRtcPlayerInfo1[streamId] = {
2298
+ audio_type: false,
2299
+ video_type: false,
2300
+ stream_type: 'none'
2301
+ };
2302
+ // console.log('hsgshgs_heartbeat',streamIdRtcPlayerInfo1);
2303
+ };
2304
+ //拉流字段
2305
+ const resetStreamIdRtcPlayerInfo = (streamId) => {
2306
+ console.log('reset stream info ----------', streamId);
2307
+ streamIdRtcPlayerInfo[streamId] = {
2308
+ pull_uid: util.getUidByStreamId(streamId),
2309
+ streamid: streamId,
2310
+ ctime: [],
2311
+ pull_type: streamIdRtcPlayerInfo1[streamId].stream_type,
2312
+ volume: '',
2313
+ // 平均值
2314
+ // count: 0,
2315
+ // video_fps: 0,
2316
+ // video_bitrate: 0,
2317
+ // audio_fps: 0,
2318
+ // audio_bitrate: 0,
2319
+ video_fps: [],
2320
+ video_bitrate: [],
2321
+ audio_fps: [],
2322
+ audio_bitrate: [],
2323
+ pull_loss: [],
2324
+ pull_delay: [],
2325
+ //音画不同步字段
2326
+ avtimestampdiff: [],
2327
+ // 累加
2328
+ audio_ifg: 0,
2329
+ video_ifg: 0
2330
+ };
2331
+
2332
+ console.log('reset stream info ----------', streamId, streamIdRtcPlayerInfo[streamId]);
2333
+ };
2334
+
2335
+ export default {
2336
+ init,
2337
+ setCameraCaptureResolution,
2338
+ setCameraEncodeResolution,
2339
+ setCameraEncodeFps, // 未实现调用
2340
+ setCameraEncodeBitrate,
2341
+ getCameraResolution,
2342
+ getMicrophoneDeviceList,
2343
+ setMicrophoneDevice,
2344
+ openOrCloseMicrophone,
2345
+ getCurrentMicrophoneVolume,
2346
+ setCurrentMicrophoneVolume,
2347
+ getCameraDeviceList,
2348
+ setCameraDevice,
2349
+ openOrCloseCamera,
2350
+ getSpeakerDeviceList,
2351
+ setSpeakerDevice,
2352
+ getCurrentSpeakerVolume,
2353
+ setCurrentSpeakerVolume,
2354
+ setSpeakerDeviceMute,
2355
+ muteRemoteAudio,
2356
+ muteRemoteVideo,
2357
+ getSpeakerSimpleVolume,
2358
+ setSpeakerSimpleVolume,
2359
+ setSpeakerSimpleMute,
2360
+ startLocalOrRemotePreview,
2361
+ enableAudioSpeakerCapture,
2362
+ startPush,
2363
+ stopPush,
2364
+ startPlay,
2365
+ stopPlay,
2366
+ initPullFlow,
2367
+ stopPlayAll,
2368
+ changePullFlow,
2369
+ startPreview,
2370
+ stopPreview,
2371
+ sendSEIMsg,
2372
+ loadCollectionInputEntry2,
2373
+ controlCdnStreaming, // addPublishRtmpStreamUrl or removePublishStreamUrl
2374
+ leaveRoom,
2375
+ destroyEngine,
2376
+ unloadTalrtc,
2377
+ muteLocalVideo,
2378
+ muteLocalAudio,
2379
+ pullAudioFlow,
2380
+ hasStream,
2381
+ teacherStartLinkMic,
2382
+ teacherStopLinkMic,
2383
+ setMirrorStatus,
2384
+ setPlayViewMirror,
2385
+ setAudioAuxSource,
2386
+ setAudioType,
2387
+ startAudioExCapture,
2388
+ startMultiScreen,
2389
+ setAudioMixMode,
2390
+ getChannelIndex,
2391
+ getSDKVersion
2392
+ };