@coze/realtime-api 1.1.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,864 @@
1
+ "use strict";
2
+ // The require scope
3
+ var __webpack_require__ = {};
4
+ /************************************************************************/ // webpack/runtime/compat_get_default_export
5
+ (()=>{
6
+ // getDefaultExport function for compatibility with non-ESM modules
7
+ __webpack_require__.n = function(module) {
8
+ var getter = module && module.__esModule ? function() {
9
+ return module['default'];
10
+ } : function() {
11
+ return module;
12
+ };
13
+ __webpack_require__.d(getter, {
14
+ a: getter
15
+ });
16
+ return getter;
17
+ };
18
+ })();
19
+ // webpack/runtime/define_property_getters
20
+ (()=>{
21
+ __webpack_require__.d = function(exports1, definition) {
22
+ for(var key in definition)if (__webpack_require__.o(definition, key) && !__webpack_require__.o(exports1, key)) Object.defineProperty(exports1, key, {
23
+ enumerable: true,
24
+ get: definition[key]
25
+ });
26
+ };
27
+ })();
28
+ // webpack/runtime/has_own_property
29
+ (()=>{
30
+ __webpack_require__.o = function(obj, prop) {
31
+ return Object.prototype.hasOwnProperty.call(obj, prop);
32
+ };
33
+ })();
34
+ // webpack/runtime/make_namespace_object
35
+ (()=>{
36
+ // define __esModule on exports
37
+ __webpack_require__.r = function(exports1) {
38
+ if ('undefined' != typeof Symbol && Symbol.toStringTag) Object.defineProperty(exports1, Symbol.toStringTag, {
39
+ value: 'Module'
40
+ });
41
+ Object.defineProperty(exports1, '__esModule', {
42
+ value: true
43
+ });
44
+ };
45
+ })();
46
+ /************************************************************************/ var __webpack_exports__ = {};
47
+ // ESM COMPAT FLAG
48
+ __webpack_require__.r(__webpack_exports__);
49
+ // EXPORTS
50
+ __webpack_require__.d(__webpack_exports__, {
51
+ RealtimeAPIError: ()=>/* reexport */ RealtimeAPIError,
52
+ RealtimeUtils: ()=>/* reexport */ utils_namespaceObject,
53
+ RealtimeError: ()=>/* reexport */ error_RealtimeError,
54
+ EventNames: ()=>/* reexport */ event_names,
55
+ RealtimeClient: ()=>/* binding */ RealtimeClient
56
+ });
57
+ // NAMESPACE OBJECT: ./src/utils.ts
58
+ var utils_namespaceObject = {};
59
+ __webpack_require__.r(utils_namespaceObject);
60
+ __webpack_require__.d(utils_namespaceObject, {
61
+ checkDevicePermission: ()=>checkDevicePermission,
62
+ checkPermission: ()=>checkPermission,
63
+ getAudioDevices: ()=>getAudioDevices,
64
+ isScreenShareDevice: ()=>isScreenShareDevice,
65
+ isScreenShareSupported: ()=>isScreenShareSupported,
66
+ sleep: ()=>sleep
67
+ });
68
+ const api_namespaceObject = require("@coze/api");
69
+ const rtc_namespaceObject = require("@volcengine/rtc");
70
+ var rtc_default = /*#__PURE__*/ __webpack_require__.n(rtc_namespaceObject);
71
+ /**
72
+ + * Delays execution for the specified duration
73
+ + * @param milliseconds The time to sleep in milliseconds
74
+ + * @throws {Error} If milliseconds is negative
75
+ + * @returns Promise that resolves after the specified duration
76
+ + */ const sleep = (milliseconds)=>{
77
+ if (milliseconds < 0) throw new Error('Sleep duration must be non-negative');
78
+ return new Promise((resolve)=>setTimeout(resolve, milliseconds));
79
+ };
80
+ /**
81
+ * @deprecated use checkDevicePermission instead
82
+ * Check microphone permission,return boolean
83
+ */ const checkPermission = async function() {
84
+ let { audio = true, video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
85
+ try {
86
+ const result = await rtc_default().enableDevices({
87
+ audio,
88
+ video
89
+ });
90
+ return result.audio;
91
+ } catch (error) {
92
+ console.error('Failed to check device permissions:', error);
93
+ return false;
94
+ }
95
+ };
96
+ /**
97
+ * Checks device permissions for audio and video
98
+ * @param checkVideo Whether to check video permissions (default: false)
99
+ * @returns Promise that resolves with the device permission status
100
+ */ const checkDevicePermission = async function() {
101
+ let checkVideo = arguments.length > 0 && void 0 !== arguments[0] && arguments[0];
102
+ return await rtc_default().enableDevices({
103
+ audio: true,
104
+ video: checkVideo
105
+ });
106
+ };
107
+ /**
108
+ * Get audio devices
109
+ * @returns Promise<AudioDevices> Object containing arrays of audio input and output devices
110
+ */ const getAudioDevices = async function() {
111
+ let { video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
112
+ let devices = [];
113
+ if (video) {
114
+ devices = await rtc_default().enumerateDevices();
115
+ if (isScreenShareSupported()) // @ts-expect-error - add screenShare device to devices
116
+ devices.push({
117
+ deviceId: 'screenShare',
118
+ kind: 'videoinput',
119
+ label: 'Screen Share',
120
+ groupId: 'screenShare'
121
+ });
122
+ } else devices = await [
123
+ ...await rtc_default().enumerateAudioCaptureDevices(),
124
+ ...await rtc_default().enumerateAudioPlaybackDevices()
125
+ ];
126
+ if (!(null == devices ? void 0 : devices.length)) return {
127
+ audioInputs: [],
128
+ audioOutputs: [],
129
+ videoInputs: []
130
+ };
131
+ return {
132
+ audioInputs: devices.filter((i)=>i.deviceId && 'audioinput' === i.kind),
133
+ audioOutputs: devices.filter((i)=>i.deviceId && 'audiooutput' === i.kind),
134
+ videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind)
135
+ };
136
+ };
137
+ const isScreenShareDevice = (deviceId)=>'screenShare' === deviceId;
138
+ /**
139
+ * Check if browser supports screen sharing
140
+ * 检查浏览器是否支持屏幕共享
141
+ */ function isScreenShareSupported() {
142
+ var _navigator_mediaDevices, _navigator;
143
+ return !!(null === (_navigator = navigator) || void 0 === _navigator ? void 0 : null === (_navigator_mediaDevices = _navigator.mediaDevices) || void 0 === _navigator_mediaDevices ? void 0 : _navigator_mediaDevices.getDisplayMedia);
144
+ }
145
+ var event_names_EventNames = /*#__PURE__*/ function(EventNames) {
146
+ /**
147
+ * en: All events
148
+ * zh: 所有事件
149
+ */ EventNames["ALL"] = "realtime.event";
150
+ /**
151
+ * en: All client events
152
+ * zh: 所有客户端事件
153
+ */ EventNames["ALL_CLIENT"] = "client.*";
154
+ /**
155
+ * en: All server events
156
+ * zh: 所有服务端事件
157
+ */ EventNames["ALL_SERVER"] = "server.*";
158
+ /**
159
+ * en: Client connected
160
+ * zh: 客户端连接
161
+ */ EventNames["CONNECTED"] = "client.connected";
162
+ /**
163
+ * en: Client connecting
164
+ * zh: 客户端连接中
165
+ */ EventNames["CONNECTING"] = "client.connecting";
166
+ /**
167
+ * en: Client interrupted
168
+ * zh: 客户端中断
169
+ */ EventNames["INTERRUPTED"] = "client.interrupted";
170
+ /**
171
+ * en: Client disconnected
172
+ * zh: 客户端断开
173
+ */ EventNames["DISCONNECTED"] = "client.disconnected";
174
+ /**
175
+ * en: Client audio unmuted
176
+ * zh: 客户端音频未静音
177
+ */ EventNames["AUDIO_UNMUTED"] = "client.audio.unmuted";
178
+ /**
179
+ * en: Client audio muted
180
+ * zh: 客户端音频静音
181
+ */ EventNames["AUDIO_MUTED"] = "client.audio.muted";
182
+ /**
183
+ * en: Client video on
184
+ * zh: 客户端视频开启
185
+ */ EventNames["VIDEO_ON"] = "client.video.on";
186
+ /**
187
+ * en: Client video off
188
+ * zh: 客户端视频关闭
189
+ */ EventNames["VIDEO_OFF"] = "client.video.off";
190
+ /**
191
+ * en: Client video event
192
+ * zh: 客户端视频事件
193
+ */ EventNames["PLAYER_EVENT"] = "client.video.event";
194
+ /**
195
+ * en: Client error
196
+ * zh: 客户端错误
197
+ */ EventNames["ERROR"] = "client.error";
198
+ /**
199
+ * en: Audio noise reduction enabled
200
+ * zh: 抑制平稳噪声
201
+ */ EventNames["SUPPRESS_STATIONARY_NOISE"] = "client.suppress.stationary.noise";
202
+ /**
203
+ * en: Suppress non-stationary noise
204
+ * zh: 抑制非平稳噪声
205
+ */ EventNames["SUPPRESS_NON_STATIONARY_NOISE"] = "client.suppress.non.stationary.noise";
206
+ /**
207
+ * en: Audio input device changed
208
+ * zh: 音频输入设备改变
209
+ */ EventNames["AUDIO_INPUT_DEVICE_CHANGED"] = "client.input.device.changed";
210
+ /**
211
+ * en: Audio output device changed
212
+ * zh: 音频输出设备改变
213
+ */ EventNames["AUDIO_OUTPUT_DEVICE_CHANGED"] = "client.output.device.changed";
214
+ /**
215
+ * en: Video input device changed
216
+ * zh: 视频输入设备改变
217
+ */ EventNames["VIDEO_INPUT_DEVICE_CHANGED"] = "client.video.input.device.changed";
218
+ /**
219
+ * en: Network quality changed
220
+ * zh: 网络质量改变
221
+ */ EventNames["NETWORK_QUALITY"] = "client.network.quality";
222
+ /**
223
+ * en: Bot joined
224
+ * zh: Bot 加入
225
+ */ EventNames["BOT_JOIN"] = "server.bot.join";
226
+ /**
227
+ * en: Bot left
228
+ * zh: Bot 离开
229
+ */ EventNames["BOT_LEAVE"] = "server.bot.leave";
230
+ /**
231
+ * en: Audio speech started
232
+ * zh: 开始说话
233
+ */ EventNames["AUDIO_AGENT_SPEECH_STARTED"] = "server.audio.agent.speech_started";
234
+ /**
235
+ * en: Audio speech stopped
236
+ * zh: 停止说话
237
+ */ EventNames["AUDIO_AGENT_SPEECH_STOPPED"] = "server.audio.agent.speech_stopped";
238
+ /**
239
+ * en: Server error
240
+ * zh: 服务端错误
241
+ */ EventNames["SERVER_ERROR"] = "server.error";
242
+ /**
243
+ * en: User speech started
244
+ * zh: 用户开始说话
245
+ */ EventNames["AUDIO_USER_SPEECH_STARTED"] = "server.audio.user.speech_started";
246
+ /**
247
+ * en: User speech stopped
248
+ * zh: 用户停止说话
249
+ */ EventNames["AUDIO_USER_SPEECH_STOPPED"] = "server.audio.user.speech_stopped";
250
+ /**
251
+ * en: User successfully enters the room
252
+ * zh: 用户成功进入房间后,会收到该事件
253
+ */ EventNames["SESSION_CREATED"] = "server.session.created";
254
+ /**
255
+ * en: Session updated
256
+ * zh: 会话更新
257
+ */ EventNames["SESSION_UPDATE"] = "server.session.update";
258
+ return EventNames;
259
+ }(event_names_EventNames || {});
260
+ /* ESM default export */ const event_names = event_names_EventNames;
261
+ var error_RealtimeError = /*#__PURE__*/ function(RealtimeError) {
262
+ RealtimeError["DEVICE_ACCESS_ERROR"] = "DEVICE_ACCESS_ERROR";
263
+ RealtimeError["STREAM_CREATION_ERROR"] = "STREAM_CREATION_ERROR";
264
+ RealtimeError["CONNECTION_ERROR"] = "CONNECTION_ERROR";
265
+ RealtimeError["DISCONNECTION_ERROR"] = "DISCONNECTION_ERROR";
266
+ RealtimeError["INTERRUPT_ERROR"] = "INTERRUPT_ERROR";
267
+ RealtimeError["EVENT_HANDLER_ERROR"] = "EVENT_HANDLER_ERROR";
268
+ RealtimeError["PERMISSION_DENIED"] = "PERMISSION_DENIED";
269
+ RealtimeError["NETWORK_ERROR"] = "NETWORK_ERROR";
270
+ RealtimeError["INVALID_STATE"] = "INVALID_STATE";
271
+ RealtimeError["CREATE_ROOM_ERROR"] = "CREATE_ROOM_ERROR";
272
+ RealtimeError["PARSE_MESSAGE_ERROR"] = "PARSE_MESSAGE_ERROR";
273
+ RealtimeError["HANDLER_MESSAGE_ERROR"] = "HANDLER_MESSAGE_ERROR";
274
+ return RealtimeError;
275
+ }({});
276
+ class RealtimeAPIError extends Error {
277
+ /**
278
+ * @param code - Error code
279
+ * @param message - Error message
280
+ * @param error - Error object
281
+ */ constructor(code, message, error){
282
+ super(`[${code}] ${message}`);
283
+ this.name = 'RealtimeAPIError';
284
+ this.code = code;
285
+ this.error = error;
286
+ }
287
+ }
288
+ class RealtimeEventHandler {
289
+ clearEventHandlers() {
290
+ this.eventHandlers = {};
291
+ }
292
+ on(eventName, callback) {
293
+ this._log(`on ${eventName} event`);
294
+ this.eventHandlers[eventName] = this.eventHandlers[eventName] || [];
295
+ this.eventHandlers[eventName].push(callback);
296
+ return callback;
297
+ }
298
+ off(eventName, callback) {
299
+ this._log(`off ${eventName} event`);
300
+ const handlers = this.eventHandlers[eventName] || [];
301
+ if (callback) {
302
+ const index = handlers.indexOf(callback);
303
+ if (-1 === index) {
304
+ console.warn(`Could not turn off specified event listener for "${eventName}": not found as a listener`);
305
+ return;
306
+ }
307
+ handlers.splice(index, 1);
308
+ } else delete this.eventHandlers[eventName];
309
+ }
310
+ // eslint-disable-next-line max-params
311
+ _dispatchToHandlers(eventName, event, handlers, prefix) {
312
+ for (const handler of handlers)if (!prefix || eventName.startsWith(prefix)) try {
313
+ handler(eventName, event);
314
+ } catch (e) {
315
+ throw new RealtimeAPIError(error_RealtimeError.HANDLER_MESSAGE_ERROR, `Failed to handle message: ${eventName}`);
316
+ }
317
+ }
318
+ dispatch(eventName, event) {
319
+ let consoleLog = !(arguments.length > 2) || void 0 === arguments[2] || arguments[2];
320
+ if (consoleLog) this._log(`dispatch ${eventName} event`, event);
321
+ const handlers = (this.eventHandlers[eventName] || []).slice();
322
+ this._dispatchToHandlers(eventName, event, handlers);
323
+ const allHandlers = (this.eventHandlers[event_names.ALL] || []).slice();
324
+ this._dispatchToHandlers(eventName, event, allHandlers);
325
+ const allClientHandlers = (this.eventHandlers[event_names.ALL_CLIENT] || []).slice();
326
+ this._dispatchToHandlers(eventName, event, allClientHandlers, 'client.');
327
+ const allServerHandlers = (this.eventHandlers[event_names.ALL_SERVER] || []).slice();
328
+ this._dispatchToHandlers(eventName, event, allServerHandlers, 'server.');
329
+ }
330
+ _log(message, event) {
331
+ if (this._debug) console.log(`[RealtimeClient] ${message}`, event);
332
+ }
333
+ constructor(debug = false){
334
+ this.eventHandlers = {};
335
+ this._debug = debug;
336
+ }
337
+ }
338
+ const extension_ainr_namespaceObject = require("@volcengine/rtc/extension-ainr");
339
+ var extension_ainr_default = /*#__PURE__*/ __webpack_require__.n(extension_ainr_namespaceObject);
340
+ class EngineClient extends RealtimeEventHandler {
341
+ bindEngineEvents() {
342
+ this.engine.on(rtc_default().events.onUserMessageReceived, this.handleMessage);
343
+ this.engine.on(rtc_default().events.onUserJoined, this.handleUserJoin);
344
+ this.engine.on(rtc_default().events.onUserLeave, this.handleUserLeave);
345
+ this.engine.on(rtc_default().events.onError, this.handleEventError);
346
+ this.engine.on(rtc_default().events.onNetworkQuality, this.handleNetworkQuality);
347
+ if (this._isSupportVideo) this.engine.on(rtc_default().events.onPlayerEvent, this.handlePlayerEvent);
348
+ if (this._debug) {
349
+ this.engine.on(rtc_default().events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
350
+ this.engine.on(rtc_default().events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
351
+ }
352
+ }
353
+ removeEventListener() {
354
+ this.engine.off(rtc_default().events.onUserMessageReceived, this.handleMessage);
355
+ this.engine.off(rtc_default().events.onUserJoined, this.handleUserJoin);
356
+ this.engine.off(rtc_default().events.onUserLeave, this.handleUserLeave);
357
+ this.engine.off(rtc_default().events.onError, this.handleEventError);
358
+ this.engine.off(rtc_default().events.onNetworkQuality, this.handleNetworkQuality);
359
+ if (this._isSupportVideo) this.engine.off(rtc_default().events.onPlayerEvent, this.handlePlayerEvent);
360
+ if (this._debug) {
361
+ this.engine.off(rtc_default().events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
362
+ this.engine.off(rtc_default().events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
363
+ }
364
+ }
365
+ _parseMessage(event) {
366
+ try {
367
+ return JSON.parse(event.message);
368
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
369
+ } catch (e) {
370
+ throw new RealtimeAPIError(error_RealtimeError.PARSE_MESSAGE_ERROR, (null == e ? void 0 : e.message) || 'Unknown error');
371
+ }
372
+ }
373
+ handleMessage(event) {
374
+ try {
375
+ const message = this._parseMessage(event);
376
+ this.dispatch(`server.${message.event_type}`, message);
377
+ } catch (e) {
378
+ if (e instanceof RealtimeAPIError) {
379
+ if (e.code === error_RealtimeError.PARSE_MESSAGE_ERROR) this.dispatch(event_names.ERROR, {
380
+ message: `Failed to parse message: ${event.message}`,
381
+ error: e
382
+ });
383
+ else if (e.code === error_RealtimeError.HANDLER_MESSAGE_ERROR) this.dispatch(event_names.ERROR, {
384
+ message: `Failed to handle message: ${event.message}`,
385
+ error: e
386
+ });
387
+ } else this.dispatch(event_names.ERROR, e);
388
+ }
389
+ }
390
+ handleEventError(e) {
391
+ this.dispatch(event_names.ERROR, e);
392
+ }
393
+ handleUserJoin(event) {
394
+ this.joinUserId = event.userInfo.userId;
395
+ this.dispatch(event_names.BOT_JOIN, event);
396
+ }
397
+ handleUserLeave(event) {
398
+ this.dispatch(event_names.BOT_LEAVE, event);
399
+ }
400
+ handlePlayerEvent(event) {
401
+ this.dispatch(event_names.PLAYER_EVENT, event);
402
+ }
403
+ handleNetworkQuality(uplinkNetworkQuality, downlinkNetworkQuality) {
404
+ this.dispatch(event_names.NETWORK_QUALITY, {
405
+ uplinkNetworkQuality,
406
+ downlinkNetworkQuality
407
+ });
408
+ }
409
+ async joinRoom(options) {
410
+ const { token, roomId, uid, audioMutedDefault, videoOnDefault, isAutoSubscribeAudio } = options;
411
+ try {
412
+ await this.engine.joinRoom(token, roomId, {
413
+ userId: uid
414
+ }, {
415
+ isAutoPublish: !audioMutedDefault,
416
+ isAutoSubscribeAudio,
417
+ isAutoSubscribeVideo: this._isSupportVideo && videoOnDefault
418
+ });
419
+ } catch (e) {
420
+ if (e instanceof Error) throw new RealtimeAPIError(error_RealtimeError.CONNECTION_ERROR, e.message);
421
+ }
422
+ }
423
+ async setAudioInputDevice(deviceId) {
424
+ const devices = await getAudioDevices();
425
+ if (-1 === devices.audioInputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio input device not found: ${deviceId}`);
426
+ this.engine.stopAudioCapture();
427
+ await this.engine.startAudioCapture(deviceId);
428
+ }
429
+ async setAudioOutputDevice(deviceId) {
430
+ const devices = await getAudioDevices({
431
+ video: false
432
+ });
433
+ if (-1 === devices.audioOutputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio output device not found: ${deviceId}`);
434
+ await this.engine.setAudioPlaybackDevice(deviceId);
435
+ }
436
+ async setVideoInputDevice(deviceId) {
437
+ let isAutoCapture = !(arguments.length > 1) || void 0 === arguments[1] || arguments[1];
438
+ var _this__videoConfig;
439
+ const devices = await getAudioDevices({
440
+ video: true
441
+ });
442
+ if (-1 === devices.videoInputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Video input device not found: ${deviceId}`);
443
+ await this.changeVideoState(false);
444
+ if (isScreenShareDevice(deviceId)) {
445
+ if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN) this.engine.setLocalVideoPlayer(rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN);
446
+ if (isAutoCapture) {
447
+ var _this__videoConfig1;
448
+ this.engine.setVideoSourceType(rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN, rtc_namespaceObject.VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
449
+ await this.engine.startScreenCapture(null === (_this__videoConfig1 = this._videoConfig) || void 0 === _this__videoConfig1 ? void 0 : _this__videoConfig1.screenConfig);
450
+ await this.engine.publishScreen(rtc_namespaceObject.MediaType.VIDEO);
451
+ }
452
+ this._streamIndex = rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN;
453
+ } else {
454
+ if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN) this.engine.setLocalVideoPlayer(rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN);
455
+ if (isAutoCapture) await this.engine.startVideoCapture(deviceId);
456
+ this._streamIndex = rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN;
457
+ }
458
+ this.engine.setLocalVideoPlayer(this._streamIndex, {
459
+ renderDom: (null === (_this__videoConfig = this._videoConfig) || void 0 === _this__videoConfig ? void 0 : _this__videoConfig.renderDom) || 'local-player',
460
+ userId: this._roomUserId
461
+ });
462
+ }
463
+ async createLocalStream(userId, videoConfig) {
464
+ this._roomUserId = userId;
465
+ const devices = await getAudioDevices({
466
+ video: this._isSupportVideo
467
+ });
468
+ if (!devices.audioInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get audio devices');
469
+ if (this._isSupportVideo && !devices.videoInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get video devices');
470
+ await this.engine.startAudioCapture(devices.audioInputs[0].deviceId);
471
+ if (this._isSupportVideo) this.setVideoInputDevice((null == videoConfig ? void 0 : videoConfig.videoInputDeviceId) || devices.videoInputs[0].deviceId, null == videoConfig ? void 0 : videoConfig.videoOnDefault);
472
+ }
473
+ async disconnect() {
474
+ try {
475
+ await this.engine.leaveRoom();
476
+ this.removeEventListener();
477
+ this.clearEventHandlers();
478
+ rtc_default().destroyEngine(this.engine);
479
+ } catch (e) {
480
+ this.dispatch(event_names.ERROR, e);
481
+ throw e;
482
+ }
483
+ }
484
+ async changeAudioState(isMicOn) {
485
+ try {
486
+ if (isMicOn) await this.engine.publishStream(rtc_namespaceObject.MediaType.AUDIO);
487
+ else await this.engine.unpublishStream(rtc_namespaceObject.MediaType.AUDIO);
488
+ } catch (e) {
489
+ this.dispatch(event_names.ERROR, e);
490
+ throw e;
491
+ }
492
+ }
493
+ async changeVideoState(isVideoOn) {
494
+ try {
495
+ if (isVideoOn) {
496
+ if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN) await this.engine.startVideoCapture();
497
+ else {
498
+ var _this__videoConfig;
499
+ this.engine.setVideoSourceType(rtc_namespaceObject.StreamIndex.STREAM_INDEX_SCREEN, rtc_namespaceObject.VideoSourceType.VIDEO_SOURCE_TYPE_INTERNAL);
500
+ await this.engine.startScreenCapture(null === (_this__videoConfig = this._videoConfig) || void 0 === _this__videoConfig ? void 0 : _this__videoConfig.screenConfig);
501
+ await this.engine.publishScreen(rtc_namespaceObject.MediaType.VIDEO);
502
+ }
503
+ } else if (this._streamIndex === rtc_namespaceObject.StreamIndex.STREAM_INDEX_MAIN) await this.engine.stopVideoCapture();
504
+ else {
505
+ await this.engine.stopScreenCapture();
506
+ await this.engine.unpublishScreen(rtc_namespaceObject.MediaType.VIDEO);
507
+ }
508
+ } catch (e) {
509
+ this.dispatch(event_names.ERROR, e);
510
+ throw e;
511
+ }
512
+ }
513
+ async stop() {
514
+ try {
515
+ const result = await this.engine.sendUserMessage(this.joinUserId, JSON.stringify({
516
+ id: 'event_1',
517
+ event_type: 'conversation.chat.cancel',
518
+ data: {}
519
+ }));
520
+ this._log(`interrupt ${this.joinUserId} ${result}`);
521
+ } catch (e) {
522
+ this.dispatch(event_names.ERROR, e);
523
+ throw e;
524
+ }
525
+ }
526
+ async sendMessage(message) {
527
+ try {
528
+ const result = await this.engine.sendUserMessage(this.joinUserId, JSON.stringify(message));
529
+ this._log(`sendMessage ${this.joinUserId} ${JSON.stringify(message)} ${result}`);
530
+ } catch (e) {
531
+ this.dispatch(event_names.ERROR, e);
532
+ throw e;
533
+ }
534
+ }
535
+ enableAudioPropertiesReport(config) {
536
+ this.engine.enableAudioPropertiesReport(config);
537
+ }
538
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
539
+ handleLocalAudioPropertiesReport(event) {
540
+ var _event__audioPropertiesInfo, _event_;
541
+ if (this._debug && (null === (_event_ = event[0]) || void 0 === _event_ ? void 0 : null === (_event__audioPropertiesInfo = _event_.audioPropertiesInfo) || void 0 === _event__audioPropertiesInfo ? void 0 : _event__audioPropertiesInfo.linearVolume) > 0) console.log('handleLocalAudioPropertiesReport', event);
542
+ }
543
+ handleRemoteAudioPropertiesReport(event) {
544
+ if (this._debug) console.log('handleRemoteAudioPropertiesReport', event);
545
+ }
546
+ async enableAudioNoiseReduction() {
547
+ var _this_engine;
548
+ await (null === (_this_engine = this.engine) || void 0 === _this_engine ? void 0 : _this_engine.setAudioCaptureConfig({
549
+ noiseSuppression: true,
550
+ echoCancellation: true,
551
+ autoGainControl: true
552
+ }));
553
+ }
554
+ async initAIAnsExtension() {
555
+ const AIAnsExtension = new (extension_ainr_default())();
556
+ await this.engine.registerExtension(AIAnsExtension);
557
+ this._AIAnsExtension = AIAnsExtension;
558
+ }
559
+ changeAIAnsExtension(enable) {
560
+ if (enable) {
561
+ var _this__AIAnsExtension;
562
+ null === (_this__AIAnsExtension = this._AIAnsExtension) || void 0 === _this__AIAnsExtension || _this__AIAnsExtension.enable();
563
+ } else {
564
+ var _this__AIAnsExtension1;
565
+ null === (_this__AIAnsExtension1 = this._AIAnsExtension) || void 0 === _this__AIAnsExtension1 || _this__AIAnsExtension1.disable();
566
+ }
567
+ }
568
+ async startAudioPlaybackDeviceTest() {
569
+ try {
570
+ await this.engine.startAudioPlaybackDeviceTest('audio-test.wav', 200);
571
+ } catch (e) {
572
+ this.dispatch(event_names.ERROR, e);
573
+ throw e;
574
+ }
575
+ }
576
+ stopAudioPlaybackDeviceTest() {
577
+ try {
578
+ this.engine.stopAudioPlaybackDeviceTest();
579
+ } catch (e) {
580
+ this.dispatch(event_names.ERROR, e);
581
+ throw e;
582
+ }
583
+ }
584
+ getRtcEngine() {
585
+ return this.engine;
586
+ }
587
+ // eslint-disable-next-line max-params
588
+ constructor(appId, debug = false, isTestEnv = false, isSupportVideo = false, videoConfig){
589
+ super(debug), this.joinUserId = '', this._AIAnsExtension = null, this._isSupportVideo = false;
590
+ if (isTestEnv) rtc_default().setParameter('ICE_CONFIG_REQUEST_URLS', [
591
+ 'rtc-test.bytedance.com'
592
+ ]);
593
+ this.engine = rtc_default().createEngine(appId);
594
+ this.handleMessage = this.handleMessage.bind(this);
595
+ this.handleUserJoin = this.handleUserJoin.bind(this);
596
+ this.handleUserLeave = this.handleUserLeave.bind(this);
597
+ this.handleEventError = this.handleEventError.bind(this);
598
+ this.handlePlayerEvent = this.handlePlayerEvent.bind(this);
599
+ this.handleNetworkQuality = this.handleNetworkQuality.bind(this);
600
+ // Debug only
601
+ this.handleLocalAudioPropertiesReport = this.handleLocalAudioPropertiesReport.bind(this);
602
+ this.handleRemoteAudioPropertiesReport = this.handleRemoteAudioPropertiesReport.bind(this);
603
+ this._isSupportVideo = isSupportVideo;
604
+ this._videoConfig = videoConfig;
605
+ }
606
+ }
607
+ // Only use for test
608
+ const TEST_APP_ID = '6705332c79516e015e3e5f0c';
609
+ class RealtimeClient extends RealtimeEventHandler {
610
+ /**
611
+ * en: Establish a connection to the Coze API and join the room
612
+ *
613
+ * zh: 建立与 Coze API 的连接并加入房间
614
+ */ async connect() {
615
+ var _this__config_videoConfig;
616
+ const { botId, conversationId, voiceId, getRoomInfo } = this._config;
617
+ this.dispatch(event_names.CONNECTING, {});
618
+ let roomInfo;
619
+ try {
620
+ // Step1 get token
621
+ if (getRoomInfo) roomInfo = await getRoomInfo();
622
+ else {
623
+ let config;
624
+ if (this._config.videoConfig) config = isScreenShareDevice(this._config.videoConfig.videoInputDeviceId) ? {
625
+ video_config: {
626
+ stream_video_type: 'screen'
627
+ }
628
+ } : {
629
+ video_config: {
630
+ stream_video_type: 'main'
631
+ }
632
+ };
633
+ roomInfo = await this._api.audio.rooms.create({
634
+ bot_id: botId,
635
+ conversation_id: conversationId || void 0,
636
+ voice_id: voiceId && voiceId.length > 0 ? voiceId : void 0,
637
+ connector_id: this._config.connectorId,
638
+ uid: this._config.userId || void 0,
639
+ workflow_id: this._config.workflowId || void 0,
640
+ config
641
+ });
642
+ }
643
+ } catch (error) {
644
+ this.dispatch(event_names.ERROR, error);
645
+ throw new RealtimeAPIError(error_RealtimeError.CREATE_ROOM_ERROR, error instanceof Error ? error.message : 'Unknown error', error);
646
+ }
647
+ this._isTestEnv = TEST_APP_ID === roomInfo.app_id;
648
+ // Step2 create engine
649
+ this._client = new EngineClient(roomInfo.app_id, this._config.debug, this._isTestEnv, this._isSupportVideo, this._config.videoConfig);
650
+ // Step3 bind engine events
651
+ this._client.bindEngineEvents();
652
+ this._client.on(event_names.ALL, (eventName, data)=>{
653
+ this.dispatch(eventName, data, false);
654
+ });
655
+ if (this._config.suppressStationaryNoise) {
656
+ await this._client.enableAudioNoiseReduction();
657
+ this.dispatch(event_names.SUPPRESS_STATIONARY_NOISE, {});
658
+ }
659
+ if (this._config.suppressNonStationaryNoise) try {
660
+ await this._client.initAIAnsExtension();
661
+ this._client.changeAIAnsExtension(true);
662
+ this.dispatch(event_names.SUPPRESS_NON_STATIONARY_NOISE, {});
663
+ } catch (error) {
664
+ console.warn('Config suppressNonStationaryNoise is not supported', error);
665
+ }
666
+ var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault, _this__config_isAutoSubscribeAudio;
667
+ // Step4 join room
668
+ await this._client.joinRoom({
669
+ token: roomInfo.token,
670
+ roomId: roomInfo.room_id,
671
+ uid: roomInfo.uid,
672
+ audioMutedDefault: null !== (_this__config_audioMutedDefault = this._config.audioMutedDefault) && void 0 !== _this__config_audioMutedDefault && _this__config_audioMutedDefault,
673
+ videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault,
674
+ isAutoSubscribeAudio: null === (_this__config_isAutoSubscribeAudio = this._config.isAutoSubscribeAudio) || void 0 === _this__config_isAutoSubscribeAudio || _this__config_isAutoSubscribeAudio
675
+ });
676
+ // Step5 create local stream
677
+ await this._client.createLocalStream(roomInfo.uid, this._config.videoConfig);
678
+ // step6 set connected and dispatch connected event
679
+ this.isConnected = true;
680
+ this.dispatch(event_names.CONNECTED, {
681
+ roomId: roomInfo.room_id,
682
+ uid: roomInfo.uid,
683
+ token: roomInfo.token,
684
+ appId: roomInfo.app_id
685
+ });
686
+ }
687
+ /**
688
+ * en: Interrupt the current conversation
689
+ *
690
+ * zh: 中断当前对话
691
+ */ async interrupt() {
692
+ var _this__client;
693
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.stop());
694
+ this.dispatch(event_names.INTERRUPTED, {});
695
+ }
696
+ /**
697
+ * en: Disconnect from the current session
698
+ *
699
+ * zh: 断开与当前会话的连接
700
+ */ async disconnect() {
701
+ var _this__client;
702
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.disconnect());
703
+ this.isConnected = false;
704
+ this._client = null;
705
+ this.dispatch(event_names.DISCONNECTED, {});
706
+ }
707
+ /**
708
+ * en: Send a message to the bot
709
+ *
710
+ * zh: 发送消息给Bot
711
+ */ async sendMessage(message) {
712
+ var _this__client;
713
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.sendMessage(message));
714
+ const eventType = 'string' == typeof message.event_type ? message.event_type : 'unknown_event';
715
+ this.dispatch(`client.${eventType}`, message);
716
+ }
717
+ /**
718
+ * en: Enable or disable audio
719
+ *
720
+ * zh: 启用或禁用音频
721
+ */ async setAudioEnable(isEnable) {
722
+ var _this__client;
723
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.changeAudioState(isEnable));
724
+ if (isEnable) this.dispatch(event_names.AUDIO_UNMUTED, {});
725
+ else this.dispatch(event_names.AUDIO_MUTED, {});
726
+ }
727
+ async setVideoEnable(isEnable) {
728
+ var _this__client;
729
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.changeVideoState(isEnable));
730
+ if (isEnable) this.dispatch(event_names.VIDEO_ON, {});
731
+ else this.dispatch(event_names.VIDEO_OFF, {});
732
+ }
733
+ /**
734
+ * en: Enable audio properties reporting (debug mode only)
735
+ *
736
+ * zh: 启用音频属性报告(仅限调试模式)
737
+ */ enableAudioPropertiesReport(config) {
738
+ if (this._config.debug) {
739
+ var _this__client;
740
+ null === (_this__client = this._client) || void 0 === _this__client || _this__client.enableAudioPropertiesReport(config);
741
+ return true;
742
+ }
743
+ console.warn('enableAudioPropertiesReport is not supported in non-debug mode');
744
+ return false;
745
+ }
746
+ /**
747
+ * en: Start audio playback device test (debug mode only)
748
+ *
749
+ * zh: 开始音频播放设备测试(仅限调试模式)
750
+ */ async startAudioPlaybackDeviceTest() {
751
+ if (this._config.debug) {
752
+ var _this__client;
753
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.startAudioPlaybackDeviceTest());
754
+ } else console.warn('startAudioPlaybackDeviceTest is not supported in non-debug mode');
755
+ }
756
+ /**
757
+ * en: Stop audio playback device test (debug mode only)
758
+ *
759
+ * zh: 停止音频播放设备测试(仅限调试模式)
760
+ */ stopAudioPlaybackDeviceTest() {
761
+ if (this._config.debug) {
762
+ var _this__client;
763
+ null === (_this__client = this._client) || void 0 === _this__client || _this__client.stopAudioPlaybackDeviceTest();
764
+ } else console.warn('stopAudioPlaybackDeviceTest is not supported in non-debug mode');
765
+ }
766
+ /**
767
+ * en: Set the audio input device
768
+ *
769
+ * zh: 设置音频输入设备
770
+ */ async setAudioInputDevice(deviceId) {
771
+ var _this__client;
772
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setAudioInputDevice(deviceId));
773
+ this.dispatch(event_names.AUDIO_INPUT_DEVICE_CHANGED, {
774
+ deviceId
775
+ });
776
+ }
777
+ /**
778
+ * en: Set the audio output device
779
+ *
780
+ * zh: 设置音频输出设备
781
+ */ async setAudioOutputDevice(deviceId) {
782
+ var _this__client;
783
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setAudioOutputDevice(deviceId));
784
+ this.dispatch(event_names.AUDIO_OUTPUT_DEVICE_CHANGED, {
785
+ deviceId
786
+ });
787
+ }
788
+ async setVideoInputDevice(deviceId) {
789
+ var _this__client;
790
+ await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.setVideoInputDevice(deviceId));
791
+ this.dispatch(event_names.VIDEO_INPUT_DEVICE_CHANGED, {
792
+ deviceId
793
+ });
794
+ }
795
+ /**
796
+ * en: Get the RTC engine instance, for detail visit https://www.volcengine.com/docs/6348/104481
797
+ *
798
+ * zh: 获取 RTC 引擎实例,详情请访问 https://www.volcengine.com/docs/6348/104481
799
+ */ getRtcEngine() {
800
+ var _this__client;
801
+ return null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.getRtcEngine();
802
+ }
803
+ /**
804
+ * Constructor for initializing a RealtimeClient instance.
805
+ *
806
+ * 构造函数,初始化RealtimeClient实例。
807
+ *
808
+ * @param config
809
+ * @param config.accessToken - Required, Access Token. |
810
+ * 必填,Access Token。
811
+ * @param config.botId - Required, Bot Id. |
812
+ * 必填,Bot Id。
813
+ * @param config.voiceId - Optional, Voice Id. |
814
+ * 可选,音色Id。
815
+ * @param config.conversationId - Optional, Conversation Id. |
816
+ * 可选,会话Id。
817
+ * @param config.userId - Optional, User Id. |
818
+ * 可选,用户Id。
819
+ * @param config.baseURL - Optional, defaults to "https://api.coze.cn". |
820
+ * 可选,默认值为 "https://api.coze.cn"。
821
+ * @param config.debug - Optional, defaults to false.
822
+ * 可选,默认值为 false。
823
+ * @param config.allowPersonalAccessTokenInBrowser
824
+ * - Optional, whether to allow personal access tokens in browser environment. |
825
+ * 可选,是否允许在浏览器环境中使用个人访问令牌。
826
+ * @param config.audioMutedDefault - Optional, whether audio is muted by default, defaults to false. |
827
+ * 可选,默认是否静音,默认值为 false。
828
+ * @param config.connectorId - Required, Connector Id. |
829
+ * 必填,渠道 Id。
830
+ * @param config.suppressStationaryNoise - Optional, suppress stationary noise, defaults to false. |
831
+ * 可选,默认是否抑制静态噪声,默认值为 false。
832
+ * @param config.suppressNonStationaryNoise - Optional, suppress non-stationary noise, defaults to false. |
833
+ * 可选,默认是否抑制非静态噪声,默认值为 false。
834
+ * @param config.isAutoSubscribeAudio - Optional, whether to automatically subscribe to bot reply audio streams, defaults to true. |
835
+ * @param config.videoConfig - Optional, Video configuration. |
836
+ * 可选,视频配置。
837
+ * @param config.videoConfig.videoOnDefault - Optional, Whether to turn on video by default, defaults to true. |
838
+ * 可选,默认是否开启视频,默认值为 true。
839
+ * @param config.videoConfig.renderDom - Optional, The DOM element to render the video stream to. |
840
+ * 可选,渲染视频流的 DOM 元素。
841
+ * @param config.videoConfig.videoInputDeviceId - Optional, The device ID of the video input device to use. |
842
+ * 可选,视频输入设备的设备 ID。
843
+ * @param config.videoConfig.screenConfig - Optional, Screen share configuration if videoInputDeviceId is 'screenShare' see https://www.volcengine.com/docs/6348/104481#screenconfig for more details. |
844
+ * 可选,屏幕共享配置,如果 videoInputDeviceId 是 'screenShare',请参考 https://www.volcengine.com/docs/6348/104481#screenconfig 了解更多详情。
845
+ */ constructor(config){
846
+ super(config.debug), this._client = null, this.isConnected = false, this._isTestEnv = false, this._isSupportVideo = false;
847
+ this._config = config;
848
+ var _this__config_baseURL;
849
+ const defaultBaseURL = null !== (_this__config_baseURL = this._config.baseURL) && void 0 !== _this__config_baseURL ? _this__config_baseURL : 'https://api.coze.cn';
850
+ this._config.baseURL = defaultBaseURL;
851
+ // init api
852
+ this._api = new api_namespaceObject.CozeAPI({
853
+ token: this._config.accessToken,
854
+ baseURL: defaultBaseURL,
855
+ allowPersonalAccessTokenInBrowser: this._config.allowPersonalAccessTokenInBrowser
856
+ });
857
+ this._isSupportVideo = !!config.videoConfig;
858
+ }
859
+ }
860
+ var __webpack_export_target__ = exports;
861
+ for(var i in __webpack_exports__)__webpack_export_target__[i] = __webpack_exports__[i];
862
+ if (__webpack_exports__.__esModule) Object.defineProperty(__webpack_export_target__, '__esModule', {
863
+ value: true
864
+ });