@coze/realtime-api 1.1.1 → 1.2.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,875 @@
1
+ "use strict";
2
+ // The require scope
3
+ var __webpack_require__ = {};
4
+ /************************************************************************/ // webpack/runtime/compat_get_default_export
5
+ (()=>{
6
+ // getDefaultExport function for compatibility with non-ESM modules
7
+ __webpack_require__.n = function(module) {
8
+ var getter = module && module.__esModule ? function() {
9
+ return module['default'];
10
+ } : function() {
11
+ return module;
12
+ };
13
+ __webpack_require__.d(getter, {
14
+ a: getter
15
+ });
16
+ return getter;
17
+ };
18
+ })();
19
+ // webpack/runtime/define_property_getters
20
+ (()=>{
21
+ __webpack_require__.d = function(exports1, definition) {
22
+ for(var key in definition)if (__webpack_require__.o(definition, key) && !__webpack_require__.o(exports1, key)) Object.defineProperty(exports1, key, {
23
+ enumerable: true,
24
+ get: definition[key]
25
+ });
26
+ };
27
+ })();
28
+ // webpack/runtime/has_own_property
29
+ (()=>{
30
+ __webpack_require__.o = function(obj, prop) {
31
+ return Object.prototype.hasOwnProperty.call(obj, prop);
32
+ };
33
+ })();
34
+ // webpack/runtime/make_namespace_object
35
+ (()=>{
36
+ // define __esModule on exports
37
+ __webpack_require__.r = function(exports1) {
38
+ if ('undefined' != typeof Symbol && Symbol.toStringTag) Object.defineProperty(exports1, Symbol.toStringTag, {
39
+ value: 'Module'
40
+ });
41
+ Object.defineProperty(exports1, '__esModule', {
42
+ value: true
43
+ });
44
+ };
45
+ })();
46
+ /************************************************************************/ var __webpack_exports__ = {};
47
+ // ESM COMPAT FLAG
48
+ __webpack_require__.r(__webpack_exports__);
49
+ // EXPORTS
50
+ __webpack_require__.d(__webpack_exports__, {
51
+ RealtimeAPIError: ()=>/* reexport */ RealtimeAPIError,
52
+ RealtimeUtils: ()=>/* reexport */ utils_namespaceObject,
53
+ RealtimeError: ()=>/* reexport */ error_RealtimeError,
54
+ EventNames: ()=>/* reexport */ event_names,
55
+ RealtimeClient: ()=>/* binding */ RealtimeClient
56
+ });
57
+ // NAMESPACE OBJECT: ./src/utils.ts
58
+ var utils_namespaceObject = {};
59
+ __webpack_require__.r(utils_namespaceObject);
60
+ __webpack_require__.d(utils_namespaceObject, {
61
+ checkDevicePermission: ()=>checkDevicePermission,
62
+ checkPermission: ()=>checkPermission,
63
+ getAudioDevices: ()=>getAudioDevices,
64
+ isMobileVideoDevice: ()=>isMobileVideoDevice,
65
+ isScreenShareDevice: ()=>isScreenShareDevice,
66
+ isScreenShareSupported: ()=>isScreenShareSupported,
67
+ sleep: ()=>sleep
68
+ });
69
+ const api_namespaceObject = require("@coze/api");
70
+ const rtc_namespaceObject = require("@volcengine/rtc");
71
+ var rtc_default = /*#__PURE__*/ __webpack_require__.n(rtc_namespaceObject);
72
+ /**
73
+ + * Delays execution for the specified duration
74
+ + * @param milliseconds The time to sleep in milliseconds
75
+ + * @throws {Error} If milliseconds is negative
76
+ + * @returns Promise that resolves after the specified duration
77
+ + */ const sleep = (milliseconds)=>{
78
+ if (milliseconds < 0) throw new Error('Sleep duration must be non-negative');
79
+ return new Promise((resolve)=>setTimeout(resolve, milliseconds));
80
+ };
81
+ /**
82
+ * @deprecated use checkDevicePermission instead
83
+ * Check microphone permission,return boolean
84
+ */ const checkPermission = async function() {
85
+ let { audio = true, video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
86
+ try {
87
+ const result = await rtc_default().enableDevices({
88
+ audio,
89
+ video
90
+ });
91
+ return result.audio;
92
+ } catch (error) {
93
+ console.error('Failed to check device permissions:', error);
94
+ return false;
95
+ }
96
+ };
97
+ /**
98
+ * Checks device permissions for audio and video
99
+ * @param checkVideo Whether to check video permissions (default: false)
100
+ * @returns Promise that resolves with the device permission status
101
+ */ const checkDevicePermission = async function() {
102
+ let checkVideo = arguments.length > 0 && void 0 !== arguments[0] && arguments[0];
103
+ return await rtc_default().enableDevices({
104
+ audio: true,
105
+ video: checkVideo
106
+ });
107
+ };
108
+ /**
109
+ * Get audio devices
110
+ * @returns Promise<AudioDevices> Object containing arrays of audio input and output devices
111
+ */ const getAudioDevices = async function() {
112
+ let { video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
113
+ let devices = [];
114
+ if (video) {
115
+ devices = await rtc_default().enumerateDevices();
116
+ if (isScreenShareSupported()) // @ts-expect-error - add screenShare device to devices
117
+ devices.push({
118
+ deviceId: 'screenShare',
119
+ kind: 'videoinput',
120
+ label: 'Screen Share',
121
+ groupId: 'screenShare'
122
+ });
123
+ } else devices = await [
124
+ ...await rtc_default().enumerateAudioCaptureDevices(),
125
+ ...await rtc_default().enumerateAudioPlaybackDevices()
126
+ ];
127
+ if (!(null == devices ? void 0 : devices.length)) return {
128
+ audioInputs: [],
129
+ audioOutputs: [],
130
+ videoInputs: []
131
+ };
132
+ return {
133
+ audioInputs: devices.filter((i)=>i.deviceId && 'audioinput' === i.kind),
134
+ audioOutputs: devices.filter((i)=>i.deviceId && 'audiooutput' === i.kind),
135
+ videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind)
136
+ };
137
+ };
138
+ const isScreenShareDevice = (deviceId)=>'screenShare' === deviceId;
139
+ /**
140
+ * 判断是否前后置摄像头
141
+ * @param deviceId
142
+ * @returns
143
+ */ const isMobileVideoDevice = (deviceId)=>'user' === deviceId || 'environment' === deviceId;
144
+ /**
145
+ * Check if browser supports screen sharing
146
+ * 检查浏览器是否支持屏幕共享
147
+ */ function isScreenShareSupported() {
148
+ var _navigator_mediaDevices, _navigator;
149
+ return !!(null === (_navigator = navigator) || void 0 === _navigator ? void 0 : null === (_navigator_mediaDevices = _navigator.mediaDevices) || void 0 === _navigator_mediaDevices ? void 0 : _navigator_mediaDevices.getDisplayMedia);
150
+ }
151
+ var event_names_EventNames = /*#__PURE__*/ function(EventNames) {
152
+ /**
153
+ * en: All events
154
+ * zh: 所有事件
155
+ */ EventNames["ALL"] = "realtime.event";
156
+ /**
157
+ * en: All client events
158
+ * zh: 所有客户端事件
159
+ */ EventNames["ALL_CLIENT"] = "client.*";
160
+ /**
161
+ * en: All server events
162
+ * zh: 所有服务端事件
163
+ */ EventNames["ALL_SERVER"] = "server.*";
164
+ /**
165
+ * en: Client connected
166
+ * zh: 客户端连接
167
+ */ EventNames["CONNECTED"] = "client.connected";
168
+ /**
169
+ * en: Client connecting
170
+ * zh: 客户端连接中
171
+ */ EventNames["CONNECTING"] = "client.connecting";
172
+ /**
173
+ * en: Client interrupted
174
+ * zh: 客户端中断
175
+ */ EventNames["INTERRUPTED"] = "client.interrupted";
176
+ /**
177
+ * en: Client disconnected
178
+ * zh: 客户端断开
179
+ */ EventNames["DISCONNECTED"] = "client.disconnected";
180
+ /**
181
+ * en: Client audio unmuted
182
+ * zh: 客户端音频未静音
183
+ */ EventNames["AUDIO_UNMUTED"] = "client.audio.unmuted";
184
+ /**
185
+ * en: Client audio muted
186
+ * zh: 客户端音频静音
187
+ */ EventNames["AUDIO_MUTED"] = "client.audio.muted";
188
+ /**
189
+ * en: Client video on
190
+ * zh: 客户端视频开启
191
+ */ EventNames["VIDEO_ON"] = "client.video.on";
192
+ /**
193
+ * en: Client video off
194
+ * zh: 客户端视频关闭
195
+ */ EventNames["VIDEO_OFF"] = "client.video.off";
196
+ /**
197
+ * en: Client video event
198
+ * zh: 客户端视频事件
199
+ */ EventNames["PLAYER_EVENT"] = "client.video.event";
200
+ /**
201
+ * en: Client error
202
+ * zh: 客户端错误
203
+ */ EventNames["ERROR"] = "client.error";
204
+ /**
205
+ * en: Audio noise reduction enabled
206
+ * zh: 抑制平稳噪声
207
+ */ EventNames["SUPPRESS_STATIONARY_NOISE"] = "client.suppress.stationary.noise";
208
+ /**
209
+ * en: Suppress non-stationary noise
210
+ * zh: 抑制非平稳噪声
211
+ */ EventNames["SUPPRESS_NON_STATIONARY_NOISE"] = "client.suppress.non.stationary.noise";
212
+ /**
213
+ * en: Audio input device changed
214
+ * zh: 音频输入设备改变
215
+ */ EventNames["AUDIO_INPUT_DEVICE_CHANGED"] = "client.input.device.changed";
216
+ /**
217
+ * en: Audio output device changed
218
+ * zh: 音频输出设备改变
219
+ */ EventNames["AUDIO_OUTPUT_DEVICE_CHANGED"] = "client.output.device.changed";
220
+ /**
221
+ * en: Video input device changed
222
+ * zh: 视频输入设备改变
223
+ */ EventNames["VIDEO_INPUT_DEVICE_CHANGED"] = "client.video.input.device.changed";
224
+ /**
225
+ * en: Network quality changed
226
+ * zh: 网络质量改变
227
+ */ EventNames["NETWORK_QUALITY"] = "client.network.quality";
228
+ /**
229
+ * en: Bot joined
230
+ * zh: Bot 加入
231
+ */ EventNames["BOT_JOIN"] = "server.bot.join";
232
+ /**
233
+ * en: Bot left
234
+ * zh: Bot 离开
235
+ */ EventNames["BOT_LEAVE"] = "server.bot.leave";
236
+ /**
237
+ * en: Audio speech started
238
+ * zh: 开始说话
239
+ */ EventNames["AUDIO_AGENT_SPEECH_STARTED"] = "server.audio.agent.speech_started";
240
+ /**
241
+ * en: Audio speech stopped
242
+ * zh: 停止说话
243
+ */ EventNames["AUDIO_AGENT_SPEECH_STOPPED"] = "server.audio.agent.speech_stopped";
244
+ /**
245
+ * en: Server error
246
+ * zh: 服务端错误
247
+ */ EventNames["SERVER_ERROR"] = "server.error";
248
+ /**
249
+ * en: User speech started
250
+ * zh: 用户开始说话
251
+ */ EventNames["AUDIO_USER_SPEECH_STARTED"] = "server.audio.user.speech_started";
252
+ /**
253
+ * en: User speech stopped
254
+ * zh: 用户停止说话
255
+ */ EventNames["AUDIO_USER_SPEECH_STOPPED"] = "server.audio.user.speech_stopped";
256
+ /**
257
+ * en: User successfully enters the room
258
+ * zh: 用户成功进入房间后,会收到该事件
259
+ */ EventNames["SESSION_CREATED"] = "server.session.created";
260
+ /**
261
+ * en: Session updated
262
+ * zh: 会话更新
263
+ */ EventNames["SESSION_UPDATE"] = "server.session.update";
264
+ return EventNames;
265
+ }(event_names_EventNames || {});
266
+ /* ESM default export */ const event_names = event_names_EventNames;
267
+ var error_RealtimeError = /*#__PURE__*/ function(RealtimeError) {
268
+ RealtimeError["DEVICE_ACCESS_ERROR"] = "DEVICE_ACCESS_ERROR";
269
+ RealtimeError["STREAM_CREATION_ERROR"] = "STREAM_CREATION_ERROR";
270
+ RealtimeError["CONNECTION_ERROR"] = "CONNECTION_ERROR";
271
+ RealtimeError["DISCONNECTION_ERROR"] = "DISCONNECTION_ERROR";
272
+ RealtimeError["INTERRUPT_ERROR"] = "INTERRUPT_ERROR";
273
+ RealtimeError["EVENT_HANDLER_ERROR"] = "EVENT_HANDLER_ERROR";
274
+ RealtimeError["PERMISSION_DENIED"] = "PERMISSION_DENIED";
275
+ RealtimeError["NETWORK_ERROR"] = "NETWORK_ERROR";
276
+ RealtimeError["INVALID_STATE"] = "INVALID_STATE";
277
+ RealtimeError["CREATE_ROOM_ERROR"] = "CREATE_ROOM_ERROR";
278
+ RealtimeError["PARSE_MESSAGE_ERROR"] = "PARSE_MESSAGE_ERROR";
279
+ RealtimeError["HANDLER_MESSAGE_ERROR"] = "HANDLER_MESSAGE_ERROR";
280
+ return RealtimeError;
281
+ }({});
282
+ class RealtimeAPIError extends Error {
283
+ /**
284
+ * @param code - Error code
285
+ * @param message - Error message
286
+ * @param error - Error object
287
+ */ constructor(code, message, error){
288
+ super(`[${code}] ${message}`);
289
+ this.name = 'RealtimeAPIError';
290
+ this.code = code;
291
+ this.error = error;
292
+ }
293
+ }
294
+ class RealtimeEventHandler {
295
+ clearEventHandlers() {
296
+ this.eventHandlers = {};
297
+ }
298
+ on(eventName, callback) {
299
+ this._log(`on ${eventName} event`);
300
+ this.eventHandlers[eventName] = this.eventHandlers[eventName] || [];
301
+ this.eventHandlers[eventName].push(callback);
302
+ return callback;
303
+ }
304
+ off(eventName, callback) {
305
+ this._log(`off ${eventName} event`);
306
+ const handlers = this.eventHandlers[eventName] || [];
307
+ if (callback) {
308
+ const index = handlers.indexOf(callback);
309
+ if (-1 === index) {
310
+ console.warn(`Could not turn off specified event listener for "${eventName}": not found as a listener`);
311
+ return;
312
+ }
313
+ handlers.splice(index, 1);
314
+ } else delete this.eventHandlers[eventName];
315
+ }
316
+ // eslint-disable-next-line max-params
317
+ _dispatchToHandlers(eventName, event, handlers, prefix) {
318
+ for (const handler of handlers)if (!prefix || eventName.startsWith(prefix)) try {
319
+ handler(eventName, event);
320
+ } catch (e) {
321
+ throw new RealtimeAPIError(error_RealtimeError.HANDLER_MESSAGE_ERROR, `Failed to handle message: ${eventName}`);
322
+ }
323
+ }
324
+ dispatch(eventName, event) {
325
+ let consoleLog = !(arguments.length > 2) || void 0 === arguments[2] || arguments[2];
326
+ if (consoleLog) this._log(`dispatch ${eventName} event`, event);
327
+ const handlers = (this.eventHandlers[eventName] || []).slice();
328
+ this._dispatchToHandlers(eventName, event, handlers);
329
+ const allHandlers = (this.eventHandlers[event_names.ALL] || []).slice();
330
+ this._dispatchToHandlers(eventName, event, allHandlers);
331
+ const allClientHandlers = (this.eventHandlers[event_names.ALL_CLIENT] || []).slice();
332
+ this._dispatchToHandlers(eventName, event, allClientHandlers, 'client.');
333
+ const allServerHandlers = (this.eventHandlers[event_names.ALL_SERVER] || []).slice();
334
+ this._dispatchToHandlers(eventName, event, allServerHandlers, 'server.');
335
+ }
336
+ _log(message, event) {
337
+ if (this._debug) console.log(`[RealtimeClient] ${message}`, event);
338
+ }
339
+ constructor(debug = false){
340
+ this.eventHandlers = {};
341
+ this._debug = debug;
342
+ }
343
+ }
344
+ const extension_ainr_namespaceObject = require("@volcengine/rtc/extension-ainr");
345
+ var extension_ainr_default = /*#__PURE__*/ __webpack_require__.n(extension_ainr_namespaceObject);
346
+ class EngineClient extends RealtimeEventHandler {
347
+ bindEngineEvents() {
348
+ this.engine.on(rtc_default().events.onUserMessageReceived, this.handleMessage);
349
+ this.engine.on(rtc_default().events.onUserJoined, this.handleUserJoin);
350
+ this.engine.on(rtc_default().events.onUserLeave, this.handleUserLeave);
351
+ this.engine.on(rtc_default().events.onError, this.handleEventError);
352
+ this.engine.on(rtc_default().events.onNetworkQuality, this.handleNetworkQuality);
353
+ if (this._isSupportVideo) this.engine.on(rtc_default().events.onPlayerEvent, this.handlePlayerEvent);
354
+ if (this._debug) {
355
+ this.engine.on(rtc_default().events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
356
+ this.engine.on(rtc_default().events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
357
+ }
358
+ }
359
+ removeEventListener() {
360
+ this.engine.off(rtc_default().events.onUserMessageReceived, this.handleMessage);
361
+ this.engine.off(rtc_default().events.onUserJoined, this.handleUserJoin);
362
+ this.engine.off(rtc_default().events.onUserLeave, this.handleUserLeave);
363
+ this.engine.off(rtc_default().events.onError, this.handleEventError);
364
+ this.engine.off(rtc_default().events.onNetworkQuality, this.handleNetworkQuality);
365
+ if (this._isSupportVideo) this.engine.off(rtc_default().events.onPlayerEvent, this.handlePlayerEvent);
366
+ if (this._debug) {
367
+ this.engine.off(rtc_default().events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
368
+ this.engine.off(rtc_default().events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
369
+ }
370
+ }
371
+ _parseMessage(event) {
372
+ try {
373
+ return JSON.parse(event.message);
374
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
375
+ } catch (e) {
376
+ throw new RealtimeAPIError(error_RealtimeError.PARSE_MESSAGE_ERROR, (null == e ? void 0 : e.message) || 'Unknown error');
377
+ }
378
+ }
379
+ handleMessage(event) {
380
+ try {
381
+ const message = this._parseMessage(event);
382
+ this.dispatch(`server.${message.event_type}`, message);
383
+ } catch (e) {
384
+ if (e instanceof RealtimeAPIError) {
385
+ if (e.code === error_RealtimeError.PARSE_MESSAGE_ERROR) this.dispatch(event_names.ERROR, {
386
+ message: `Failed to parse message: ${event.message}`,
387
+ error: e
388
+ });
389
+ else if (e.code === error_RealtimeError.HANDLER_MESSAGE_ERROR) this.dispatch(event_names.ERROR, {
390
+ message: `Failed to handle message: ${event.message}`,
391
+ error: e
392
+ });
393
+ } else this.dispatch(event_names.ERROR, e);
394
+ }
395
+ }
396
+ handleEventError(e) {
397
+ this.dispatch(event_names.ERROR, e);
398
+ }
399
+ handleUserJoin(event) {
400
+ this.joinUserId = event.userInfo.userId;
401
+ this.dispatch(event_names.BOT_JOIN, event);
402
+ }
403
+ handleUserLeave(event) {
404
+ this.dispatch(event_names.BOT_LEAVE, event);
405
+ }
406
+ handlePlayerEvent(event) {
407
+ this.dispatch(event_names.PLAYER_EVENT, event);
408
+ }
409
+ handleNetworkQuality(uplinkNetworkQuality, downlinkNetworkQuality) {
410
+ this.dispatch(event_names.NETWORK_QUALITY, {
411
+ uplinkNetworkQuality,
412
+ downlinkNetworkQuality
413
+ });
414
+ }
415
+ async joinRoom(options) {
416
+ const { token, roomId, uid, audioMutedDefault, videoOnDefault, isAutoSubscribeAudio } = options;
417
+ try {
418
+ await this.engine.joinRoom(token, roomId, {
419
+ userId: uid
420
+ }, {
421
+ isAutoPublish: !audioMutedDefault,
422
+ isAutoSubscribeAudio,
423
+ isAutoSubscribeVideo: this._isSupportVideo && videoOnDefault
424
+ });
425
+ } catch (e) {
426
+ if (e instanceof Error) throw new RealtimeAPIError(error_RealtimeError.CONNECTION_ERROR, e.message);
427
+ }
428
+ }
429
+ async setAudioInputDevice(deviceId) {
430
+ const devices = await getAudioDevices();
431
+ if (-1 === devices.audioInputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio input device not found: ${deviceId}`);
432
+ this.engine.stopAudioCapture();
433
+ await this.engine.startAudioCapture(deviceId);
434
+ }
435
+ async setAudioOutputDevice(deviceId) {
436
+ const devices = await getAudioDevices({
437
+ video: false
438
+ });
439
+ if (-1 === devices.audioOutputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio output device not found: ${deviceId}`);
440
+ await this.engine.setAudioPlaybackDevice(deviceId);
441
+ }
442
+ async setVideoInputDevice(deviceId) {
443
+ let isAutoCapture = !(arguments.length > 1) || void 0 === arguments[1] || arguments[1];
444
+ var _this__videoConfig;
445
+ const devices = await getAudioDevices({
446
+ video: true
447
+ });
448
+ if (!isMobileVideoDevice(deviceId) && -1 === devices.videoInputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Video input device not found: ${deviceId}`);
449
+ await this.changeVideoState(false);
450
+ if (isScreenShareDevice(deviceId)) {
451
+ if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN) this.engine.setLocalVideoPlayer(rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN);
452
+ if (isAutoCapture) {
453
+ var _this__videoConfig1;
454
+ this.engine.setVideoSourceType(rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN, rtc_namespaceObject.VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
455
+ await this.engine.startScreenCapture(null === (_this__videoConfig1 = this._videoConfig) || void 0 === _this__videoConfig1 ? void 0 : _this__videoConfig1.screenConfig);
456
+ await this.engine.publishScreen(rtc_namespaceObject.MediaType.VIDEO);
457
+ }
458
+ this._streamIndex = rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN;
459
+ } else {
460
+ if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN) this.engine.setLocalVideoPlayer(rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN);
461
+ if (isAutoCapture) await this.engine.startVideoCapture(deviceId);
462
+ this._streamIndex = rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN;
463
+ }
464
+ this.engine.setLocalVideoPlayer(this._streamIndex, {
465
+ renderDom: (null === (_this__videoConfig = this._videoConfig) || void 0 === _this__videoConfig ? void 0 : _this__videoConfig.renderDom) || 'local-player',
466
+ userId: this._roomUserId
467
+ });
468
+ }
469
+ async createLocalStream(userId, videoConfig) {
470
+ this._roomUserId = userId;
471
+ const devices = await getAudioDevices({
472
+ video: this._isSupportVideo
473
+ });
474
+ if (!devices.audioInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get audio devices');
475
+ if (this._isSupportVideo && !devices.videoInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get video devices');
476
+ await this.engine.startAudioCapture(devices.audioInputs[0].deviceId);
477
+ if (this._isSupportVideo) this.setVideoInputDevice((null == videoConfig ? void 0 : videoConfig.videoInputDeviceId) || devices.videoInputs[0].deviceId, null == videoConfig ? void 0 : videoConfig.videoOnDefault);
478
+ }
479
+ async disconnect() {
480
+ try {
481
+ await this.engine.leaveRoom();
482
+ this.removeEventListener();
483
+ this.clearEventHandlers();
484
+ rtc_default().destroyEngine(this.engine);
485
+ } catch (e) {
486
+ this.dispatch(event_names.ERROR, e);
487
+ throw e;
488
+ }
489
+ }
490
+ async changeAudioState(isMicOn) {
491
+ try {
492
+ if (isMicOn) await this.engine.publishStream(rtc_namespaceObject.MediaType.AUDIO);
493
+ else await this.engine.unpublishStream(rtc_namespaceObject.MediaType.AUDIO);
494
+ } catch (e) {
495
+ this.dispatch(event_names.ERROR, e);
496
+ throw e;
497
+ }
498
+ }
499
+ async changeVideoState(isVideoOn) {
500
+ try {
501
+ if (isVideoOn) {
502
+ if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN) await this.engine.startVideoCapture();
503
+ else {
504
+ var _this__videoConfig;
505
+ this.engine.setVideoSourceType(rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN, rtc_namespaceObject.VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
506
+ await this.engine.startScreenCapture(null === (_this__videoConfig = this._videoConfig) || void 0 === _this__videoConfig ? void 0 : _this__videoConfig.screenConfig);
507
+ await this.engine.publishScreen(rtc_namespaceObject.MediaType.VIDEO);
508
+ }
509
+ } else if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN) await this.engine.stopVideoCapture();
510
+ else {
511
+ await this.engine.stopScreenCapture();
512
+ await this.engine.unpublishScreen(rtc_namespaceObject.MediaType.VIDEO);
513
+ }
514
+ } catch (e) {
515
+ this.dispatch(event_names.ERROR, e);
516
+ throw e;
517
+ }
518
+ }
519
+ async stop() {
520
+ try {
521
+ const result = await this.engine.sendUserMessage(this.joinUserId, JSON.stringify({
522
+ id: 'event_1',
523
+ event_type: 'conversation.chat.cancel',
524
+ data: {}
525
+ }));
526
+ this._log(`interrupt ${this.joinUserId} ${result}`);
527
+ } catch (e) {
528
+ this.dispatch(event_names.ERROR, e);
529
+ throw e;
530
+ }
531
+ }
532
+ async sendMessage(message) {
533
+ try {
534
+ const result = await this.engine.sendUserMessage(this.joinUserId, JSON.stringify(message));
535
+ this._log(`sendMessage ${this.joinUserId} ${JSON.stringify(message)} ${result}`);
536
+ } catch (e) {
537
+ this.dispatch(event_names.ERROR, e);
538
+ throw e;
539
+ }
540
+ }
541
+ enableAudioPropertiesReport(config) {
542
+ this.engine.enableAudioPropertiesReport(config);
543
+ }
544
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
545
+ handleLocalAudioPropertiesReport(event) {
546
+ var _event__audioPropertiesInfo, _event_;
547
+ if (this._debug && (null === (_event_ = event[0]) || void 0 === _event_ ? void 0 : null === (_event__audioPropertiesInfo = _event_.audioPropertiesInfo) || void 0 === _event__audioPropertiesInfo ? void 0 : _event__audioPropertiesInfo.linearVolume) > 0) console.log('handleLocalAudioPropertiesReport', event);
548
+ }
549
+ handleRemoteAudioPropertiesReport(event) {
550
+ if (this._debug) console.log('handleRemoteAudioPropertiesReport', event);
551
+ }
552
+ async enableAudioNoiseReduction() {
553
+ var _this_engine;
554
+ await (null === (_this_engine = this.engine) || void 0 === _this_engine ? void 0 : _this_engine.setAudioCaptureConfig({
555
+ noiseSuppression: true,
556
+ echoCancellation: true,
557
+ autoGainControl: true
558
+ }));
559
+ }
560
+ async initAIAnsExtension() {
561
+ const AIAnsExtension = new (extension_ainr_default())();
562
+ await this.engine.registerExtension(AIAnsExtension);
563
+ this._AIAnsExtension = AIAnsExtension;
564
+ }
565
+ changeAIAnsExtension(enable) {
566
+ if (enable) {
567
+ var _this__AIAnsExtension;
568
+ null === (_this__AIAnsExtension = this._AIAnsExtension) || void 0 === _this__AIAnsExtension || _this__AIAnsExtension.enable();
569
+ } else {
570
+ var _this__AIAnsExtension1;
571
+ null === (_this__AIAnsExtension1 = this._AIAnsExtension) || void 0 === _this__AIAnsExtension1 || _this__AIAnsExtension1.disable();
572
+ }
573
+ }
574
+ async startAudioPlaybackDeviceTest() {
575
+ try {
576
+ await this.engine.startAudioPlaybackDeviceTest('audio-test.wav', 200);
577
+ } catch (e) {
578
+ this.dispatch(event_names.ERROR, e);
579
+ throw e;
580
+ }
581
+ }
582
+ stopAudioPlaybackDeviceTest() {
583
+ try {
584
+ this.engine.stopAudioPlaybackDeviceTest();
585
+ } catch (e) {
586
+ this.dispatch(event_names.ERROR, e);
587
+ throw e;
588
+ }
589
+ }
590
+ getRtcEngine() {
591
+ return this.engine;
592
+ }
593
+ // eslint-disable-next-line max-params
594
+ constructor(appId, debug = false, isTestEnv = false, isSupportVideo = false, videoConfig){
595
+ super(debug), this.joinUserId = '', this._AIAnsExtension = null, this._isSupportVideo = false;
596
+ if (isTestEnv) rtc_default().setParameter('ICE_CONFIG_REQUEST_URLS', [
597
+ 'rtc-test.bytedance.com'
598
+ ]);
599
+ this.engine = rtc_default().createEngine(appId);
600
+ this.handleMessage = this.handleMessage.bind(this);
601
+ this.handleUserJoin = this.handleUserJoin.bind(this);
602
+ this.handleUserLeave = this.handleUserLeave.bind(this);
603
+ this.handleEventError = this.handleEventError.bind(this);
604
+ this.handlePlayerEvent = this.handlePlayerEvent.bind(this);
605
+ this.handleNetworkQuality = this.handleNetworkQuality.bind(this);
606
+ // Debug only
607
+ this.handleLocalAudioPropertiesReport = this.handleLocalAudioPropertiesReport.bind(this);
608
+ this.handleRemoteAudioPropertiesReport = this.handleRemoteAudioPropertiesReport.bind(this);
609
+ this._isSupportVideo = isSupportVideo;
610
+ this._videoConfig = videoConfig;
611
+ }
612
+ }
613
+ // Only use for test
614
+ const TEST_APP_ID = '6705332c79516e015e3e5f0c';
615
+ class RealtimeClient extends RealtimeEventHandler {
616
+ /**
617
+ * en: Establish a connection to the Coze API and join the room
618
+ *
619
+ * zh: 建立与 Coze API 的连接并加入房间
620
+ */ async connect() {
621
+ var _this__config_videoConfig;
622
+ const { botId, conversationId, voiceId, getRoomInfo } = this._config;
623
+ this.dispatch(event_names.CONNECTING, {});
624
+ let roomInfo;
625
+ try {
626
+ // Step1 get token
627
+ if (getRoomInfo) roomInfo = await getRoomInfo();
628
+ else {
629
+ const config = {};
630
+ if (this._config.prologueContent) config.prologue_content = this._config.prologueContent;
631
+ if (this._config.videoConfig) {
632
+ if (isScreenShareDevice(this._config.videoConfig.videoInputDeviceId)) config.video_config = {
633
+ stream_video_type: 'screen'
634
+ };
635
+ else config.video_config = {
636
+ stream_video_type: 'main'
637
+ };
638
+ }
639
+ roomInfo = await this._api.audio.rooms.create({
640
+ bot_id: botId,
641
+ conversation_id: conversationId || void 0,
642
+ voice_id: voiceId && voiceId.length > 0 ? voiceId : void 0,
643
+ connector_id: this._config.connectorId,
644
+ uid: this._config.userId || void 0,
645
+ workflow_id: this._config.workflowId || void 0,
646
+ config
647
+ });
648
+ }
649
+ } catch (error) {
650
+ this.dispatch(event_names.ERROR, error);
651
+ throw new RealtimeAPIError(error_RealtimeError.CREATE_ROOM_ERROR, error instanceof Error ? error.message : 'Unknown error', error);
652
+ }
653
+ this._isTestEnv = TEST_APP_ID === roomInfo.app_id;
654
+ // Step2 create engine
655
+ this._client = new EngineClient(roomInfo.app_id, this._config.debug, this._isTestEnv, this._isSupportVideo, this._config.videoConfig);
656
+ // Step3 bind engine events
657
+ this._client.bindEngineEvents();
658
+ this._client.on(event_names.ALL, (eventName, data)=>{
659
+ this.dispatch(eventName, data, false);
660
+ });
661
+ if (this._config.suppressStationaryNoise) {
662
+ await this._client.enableAudioNoiseReduction();
663
+ this.dispatch(event_names.SUPPRESS_STATIONARY_NOISE, {});
664
+ }
665
+ if (this._config.suppressNonStationaryNoise) try {
666
+ await this._client.initAIAnsExtension();
667
+ this._client.changeAIAnsExtension(true);
668
+ this.dispatch(event_names.SUPPRESS_NON_STATIONARY_NOISE, {});
669
+ } catch (error) {
670
+ console.warn('Config suppressNonStationaryNoise is not supported', error);
671
+ }
672
+ var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault, _this__config_isAutoSubscribeAudio;
673
+ // Step4 join room
674
+ await this._client.joinRoom({
675
+ token: roomInfo.token,
676
+ roomId: roomInfo.room_id,
677
+ uid: roomInfo.uid,
678
+ audioMutedDefault: null !== (_this__config_audioMutedDefault = this._config.audioMutedDefault) && void 0 !== _this__config_audioMutedDefault && _this__config_audioMutedDefault,
679
+ videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault,
680
+ isAutoSubscribeAudio: null === (_this__config_isAutoSubscribeAudio = this._config.isAutoSubscribeAudio) || void 0 === _this__config_isAutoSubscribeAudio || _this__config_isAutoSubscribeAudio
681
+ });
682
+ // Step5 create local stream
683
+ await this._client.createLocalStream(roomInfo.uid, this._config.videoConfig);
684
+ // step6 set connected and dispatch connected event
685
+ this.isConnected = true;
686
+ this.dispatch(event_names.CONNECTED, {
687
+ roomId: roomInfo.room_id,
688
+ uid: roomInfo.uid,
689
+ token: roomInfo.token,
690
+ appId: roomInfo.app_id
691
+ });
692
+ }
693
+ /**
694
+ * en: Interrupt the current conversation
695
+ *
696
+ * zh: 中断当前对话
697
+ */ async interrupt() {
698
+ var _this__client;
699
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.stop());
700
+ this.dispatch(event_names.INTERRUPTED, {});
701
+ }
702
+ /**
703
+ * en: Disconnect from the current session
704
+ *
705
+ * zh: 断开与当前会话的连接
706
+ */ async disconnect() {
707
+ var _this__client;
708
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.disconnect());
709
+ this.isConnected = false;
710
+ this._client = null;
711
+ this.dispatch(event_names.DISCONNECTED, {});
712
+ }
713
+ /**
714
+ * en: Send a message to the bot
715
+ *
716
+ * zh: 发送消息给Bot
717
+ */ async sendMessage(message) {
718
+ var _this__client;
719
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.sendMessage(message));
720
+ const eventType = 'string' == typeof message.event_type ? message.event_type : 'unknown_event';
721
+ this.dispatch(`client.${eventType}`, message);
722
+ }
723
+ /**
724
+ * en: Enable or disable audio
725
+ *
726
+ * zh: 启用或禁用音频
727
+ */ async setAudioEnable(isEnable) {
728
+ var _this__client;
729
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.changeAudioState(isEnable));
730
+ if (isEnable) this.dispatch(event_names.AUDIO_UNMUTED, {});
731
+ else this.dispatch(event_names.AUDIO_MUTED, {});
732
+ }
733
+ async setVideoEnable(isEnable) {
734
+ var _this__client;
735
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.changeVideoState(isEnable));
736
+ if (isEnable) this.dispatch(event_names.VIDEO_ON, {});
737
+ else this.dispatch(event_names.VIDEO_OFF, {});
738
+ }
739
+ /**
740
+ * en: Enable audio properties reporting (debug mode only)
741
+ *
742
+ * zh: 启用音频属性报告(仅限调试模式)
743
+ */ enableAudioPropertiesReport(config) {
744
+ if (this._config.debug) {
745
+ var _this__client;
746
+ null === (_this__client = this._client) || void 0 === _this__client || _this__client.enableAudioPropertiesReport(config);
747
+ return true;
748
+ }
749
+ console.warn('enableAudioPropertiesReport is not supported in non-debug mode');
750
+ return false;
751
+ }
752
+ /**
753
+ * en: Start audio playback device test (debug mode only)
754
+ *
755
+ * zh: 开始音频播放设备测试(仅限调试模式)
756
+ */ async startAudioPlaybackDeviceTest() {
757
+ if (this._config.debug) {
758
+ var _this__client;
759
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.startAudioPlaybackDeviceTest());
760
+ } else console.warn('startAudioPlaybackDeviceTest is not supported in non-debug mode');
761
+ }
762
+ /**
763
+ * en: Stop audio playback device test (debug mode only)
764
+ *
765
+ * zh: 停止音频播放设备测试(仅限调试模式)
766
+ */ stopAudioPlaybackDeviceTest() {
767
+ if (this._config.debug) {
768
+ var _this__client;
769
+ null === (_this__client = this._client) || void 0 === _this__client || _this__client.stopAudioPlaybackDeviceTest();
770
+ } else console.warn('stopAudioPlaybackDeviceTest is not supported in non-debug mode');
771
+ }
772
+ /**
773
+ * en: Set the audio input device
774
+ *
775
+ * zh: 设置音频输入设备
776
+ */ async setAudioInputDevice(deviceId) {
777
+ var _this__client;
778
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setAudioInputDevice(deviceId));
779
+ this.dispatch(event_names.AUDIO_INPUT_DEVICE_CHANGED, {
780
+ deviceId
781
+ });
782
+ }
783
+ /**
784
+ * en: Set the audio output device
785
+ *
786
+ * zh: 设置音频输出设备
787
+ */ async setAudioOutputDevice(deviceId) {
788
+ var _this__client;
789
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setAudioOutputDevice(deviceId));
790
+ this.dispatch(event_names.AUDIO_OUTPUT_DEVICE_CHANGED, {
791
+ deviceId
792
+ });
793
+ }
794
+ /**
795
+ * en: Set the video input device
796
+ *
797
+ * zh: 设置视频输入设备
798
+ */ async setVideoInputDevice(deviceId) {
799
+ var _this__client;
800
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setVideoInputDevice(deviceId));
801
+ this.dispatch(event_names.VIDEO_INPUT_DEVICE_CHANGED, {
802
+ deviceId
803
+ });
804
+ }
805
+ /**
806
+ * en: Get the RTC engine instance, for detail visit https://www.volcengine.com/docs/6348/104481
807
+ *
808
+ * zh: 获取 RTC 引擎实例,详情请访问 https://www.volcengine.com/docs/6348/104481
809
+ */ getRtcEngine() {
810
+ var _this__client;
811
+ return null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.getRtcEngine();
812
+ }
813
+ /**
814
+ * Constructor for initializing a RealtimeClient instance.
815
+ *
816
+ * 构造函数,初始化RealtimeClient实例。
817
+ *
818
+ * @param config
819
+ * @param config.accessToken - Required, Access Token. |
820
+ * 必填,Access Token。
821
+ * @param config.botId - Required, Bot Id. |
822
+ * 必填,Bot Id。
823
+ * @param config.voiceId - Optional, Voice Id. |
824
+ * 可选,音色Id。
825
+ * @param config.conversationId - Optional, Conversation Id. |
826
+ * 可选,会话Id。
827
+ * @param config.userId - Optional, User Id. |
828
+ * 可选,用户Id。
829
+ * @param config.baseURL - Optional, defaults to "https://api.coze.cn". |
830
+ * 可选,默认值为 "https://api.coze.cn"。
831
+ * @param config.debug - Optional, defaults to false.
832
+ * 可选,默认值为 false。
833
+ * @param config.allowPersonalAccessTokenInBrowser
834
+ * - Optional, whether to allow personal access tokens in browser environment. |
835
+ * 可选,是否允许在浏览器环境中使用个人访问令牌。
836
+ * @param config.audioMutedDefault - Optional, whether audio is muted by default, defaults to false. |
837
+ * 可选,默认是否静音,默认值为 false。
838
+ * @param config.connectorId - Required, Connector Id. |
839
+ * 必填,渠道 Id。
840
+ * @param config.suppressStationaryNoise - Optional, suppress stationary noise, defaults to false. |
841
+ * 可选,默认是否抑制静态噪声,默认值为 false。
842
+ * @param config.suppressNonStationaryNoise - Optional, suppress non-stationary noise, defaults to false. |
843
+ * 可选,默认是否抑制非静态噪声,默认值为 false。
844
+ * @param config.isAutoSubscribeAudio - Optional, whether to automatically subscribe to bot reply audio streams, defaults to true. |
845
+ * @param config.videoConfig - Optional, Video configuration. |
846
+ * 可选,视频配置。
847
+ * @param config.videoConfig.videoOnDefault - Optional, Whether to turn on video by default, defaults to true. |
848
+ * 可选,默认是否开启视频,默认值为 true。
849
+ * @param config.videoConfig.renderDom - Optional, The DOM element to render the video stream to. |
850
+ * 可选,渲染视频流的 DOM 元素。
851
+ * @param config.videoConfig.videoInputDeviceId - Optional, The device ID of the video input device to use. |
852
+ * 可选,视频输入设备的设备 ID。
853
+ * @param config.videoConfig.screenConfig - Optional, Screen share configuration if videoInputDeviceId is 'screenShare' see https://www.volcengine.com/docs/6348/104481#screenconfig for more details. |
854
+ * 可选,屏幕共享配置,如果 videoInputDeviceId 是 'screenShare',请参考 https://www.volcengine.com/docs/6348/104481#screenconfig 了解更多详情。
855
+ * @param config.prologueContent - Optional, Prologue content. | 可选,开场白内容。
856
+ */ constructor(config){
857
+ super(config.debug), this._client = null, this.isConnected = false, this._isTestEnv = false, this._isSupportVideo = false;
858
+ this._config = config;
859
+ var _this__config_baseURL;
860
+ const defaultBaseURL = null !== (_this__config_baseURL = this._config.baseURL) && void 0 !== _this__config_baseURL ? _this__config_baseURL : 'https://api.coze.cn';
861
+ this._config.baseURL = defaultBaseURL;
862
+ // init api
863
+ this._api = new api_namespaceObject.CozeAPI({
864
+ token: this._config.accessToken,
865
+ baseURL: defaultBaseURL,
866
+ allowPersonalAccessTokenInBrowser: this._config.allowPersonalAccessTokenInBrowser
867
+ });
868
+ this._isSupportVideo = !!config.videoConfig;
869
+ }
870
+ }
871
+ var __webpack_export_target__ = exports;
872
+ for(var i in __webpack_exports__)__webpack_export_target__[i] = __webpack_exports__[i];
873
+ if (__webpack_exports__.__esModule) Object.defineProperty(__webpack_export_target__, '__esModule', {
874
+ value: true
875
+ });