@coze/realtime-api 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,11 @@
1
1
  /*! For license information please see index.cjs.LICENSE.txt */
2
2
  "use strict";
3
3
  var __webpack_modules__ = {
4
- "?d7f0": function() {
4
+ "?e272": function() {
5
5
  /* (ignored) */ },
6
- "?fcd3": function() {
6
+ "?5742": function() {
7
7
  /* (ignored) */ },
8
- "?d2d5": function() {
8
+ "?9caf": function() {
9
9
  /* (ignored) */ }
10
10
  };
11
11
  /************************************************************************/ // The module cache
@@ -73,7 +73,7 @@ __webpack_require__.d(__webpack_exports__, {
73
73
  EventNames: ()=>/* reexport */ event_handler_EventNames,
74
74
  RealtimeClient: ()=>/* binding */ RealtimeClient
75
75
  });
76
- // NAMESPACE OBJECT: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7/node_modules/axios/lib/platform/common/utils.js
76
+ // NAMESPACE OBJECT: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7_debug@4.3.7/node_modules/axios/lib/platform/common/utils.js
77
77
  var common_utils_namespaceObject = {};
78
78
  __webpack_require__.r(common_utils_namespaceObject);
79
79
  __webpack_require__.d(common_utils_namespaceObject, {
@@ -2135,7 +2135,7 @@ const trackStream = (stream, chunkSize, onProgress, onFinish)=>{
2135
2135
  }, {
2136
2136
  highWaterMark: 2
2137
2137
  });
2138
- }; // CONCATENATED MODULE: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7/node_modules/axios/lib/adapters/fetch.js
2138
+ }; // CONCATENATED MODULE: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7_debug@4.3.7/node_modules/axios/lib/adapters/fetch.js
2139
2139
  const isFetchSupported = 'function' == typeof fetch && 'function' == typeof Request && 'function' == typeof Response;
2140
2140
  const isReadableStreamSupported = isFetchSupported && 'function' == typeof ReadableStream;
2141
2141
  // used only inside the fetch adapter
@@ -2857,11 +2857,11 @@ axios.default = axios;
2857
2857
  // so that it can keep same with es module or cjs
2858
2858
  const { Axios: axios_Axios, AxiosError: axios_AxiosError, CanceledError: axios_CanceledError, isCancel: axios_isCancel, CancelToken: axios_CancelToken, VERSION: axios_VERSION, all: axios_all, Cancel, isAxiosError: axios_isAxiosError, spread: axios_spread, toFormData: axios_toFormData, AxiosHeaders: axios_AxiosHeaders, HttpStatusCode: axios_HttpStatusCode, formToJSON, getAdapter, mergeConfig: axios_mergeConfig } = lib_axios;
2859
2859
  // EXTERNAL MODULE: os (ignored)
2860
- var os_ignored_ = __webpack_require__("?d2d5");
2860
+ var os_ignored_ = __webpack_require__("?9caf");
2861
2861
  // EXTERNAL MODULE: crypto (ignored)
2862
- __webpack_require__("?d7f0");
2862
+ __webpack_require__("?e272");
2863
2863
  // EXTERNAL MODULE: jsonwebtoken (ignored)
2864
- __webpack_require__("?fcd3");
2864
+ __webpack_require__("?5742");
2865
2865
  class APIResource {
2866
2866
  constructor(client){
2867
2867
  this._client = client;
@@ -3103,6 +3103,10 @@ class Messages extends APIResource {
3103
3103
  }
3104
3104
  }
3105
3105
  const uuid = ()=>(Math.random() * new Date().getTime()).toString();
3106
+ const handleAdditionalMessages = (additional_messages)=>null == additional_messages ? void 0 : additional_messages.map((i)=>({
3107
+ ...i,
3108
+ content: 'object' == typeof i.content ? JSON.stringify(i.content) : i.content
3109
+ }));
3106
3110
  class Chat extends APIResource {
3107
3111
  /**
3108
3112
  * Call the Chat API to send messages to a published Coze agent. | 调用此接口发起一次对话,支持添加上下文
@@ -3124,6 +3128,7 @@ class Chat extends APIResource {
3124
3128
  const apiUrl = `/v3/chat${conversation_id ? `?conversation_id=${conversation_id}` : ''}`;
3125
3129
  const payload = {
3126
3130
  ...rest,
3131
+ additional_messages: handleAdditionalMessages(params.additional_messages),
3127
3132
  stream: false
3128
3133
  };
3129
3134
  const result = await this._client.post(apiUrl, payload, false, options);
@@ -3149,6 +3154,7 @@ class Chat extends APIResource {
3149
3154
  const apiUrl = `/v3/chat${conversation_id ? `?conversation_id=${conversation_id}` : ''}`;
3150
3155
  const payload = {
3151
3156
  ...rest,
3157
+ additional_messages: handleAdditionalMessages(params.additional_messages),
3152
3158
  stream: false
3153
3159
  };
3154
3160
  const result = await this._client.post(apiUrl, payload, false, options);
@@ -3186,6 +3192,7 @@ class Chat extends APIResource {
3186
3192
  const apiUrl = `/v3/chat${conversation_id ? `?conversation_id=${conversation_id}` : ''}`;
3187
3193
  const payload = {
3188
3194
  ...rest,
3195
+ additional_messages: handleAdditionalMessages(params.additional_messages),
3189
3196
  stream: true
3190
3197
  };
3191
3198
  const result = await this._client.post(apiUrl, payload, true, options);
@@ -3360,6 +3367,7 @@ class Conversations extends APIResource {
3360
3367
  * @param params - Required The parameters for creating a conversation | 创建会话所需的参数
3361
3368
  * @param params.messages - Optional Messages in the conversation. | 会话中的消息内容。
3362
3369
  * @param params.meta_data - Optional Additional information when creating a message. | 创建消息时的附加消息。
3370
+ * @param params.bot_id - Optional Bind and isolate conversation on different bots. | 绑定和隔离不同Bot的会话。
3363
3371
  * @returns Information about the created conversation. | 会话的基础信息。
3364
3372
  */ async create(params, options) {
3365
3373
  const apiUrl = '/v1/conversation/create';
@@ -3377,6 +3385,27 @@ class Conversations extends APIResource {
3377
3385
  const response = await this._client.get(apiUrl, null, false, options);
3378
3386
  return response.data;
3379
3387
  }
3388
+ /**
3389
+ * List all conversations. | 列出 Bot 下所有会话。
3390
+ * @param params
3391
+ * @param params.bot_id - Required Bot ID. | Bot ID。
3392
+ * @param params.page_num - Optional The page number. | 页码,默认值为 1。
3393
+ * @param params.page_size - Optional The number of conversations per page. | 每页的会话数量,默认值为 50。
3394
+ * @returns Information about the conversations. | 会话的信息。
3395
+ */ async list(params, options) {
3396
+ const apiUrl = '/v1/conversations';
3397
+ const response = await this._client.get(apiUrl, params, false, options);
3398
+ return response.data;
3399
+ }
3400
+ /**
3401
+ * Clear a conversation. | 清空会话。
3402
+ * @param conversation_id - Required The ID of the conversation. | Conversation ID,即会话的唯一标识。
3403
+ * @returns Information about the conversation session. | 会话的会话 ID。
3404
+ */ async clear(conversation_id, options) {
3405
+ const apiUrl = `/v1/conversations/${conversation_id}/clear`;
3406
+ const response = await this._client.post(apiUrl, null, false, options);
3407
+ return response.data;
3408
+ }
3380
3409
  constructor(...args){
3381
3410
  super(...args), this.messages = new messages_Messages(this._client);
3382
3411
  }
@@ -3415,6 +3444,9 @@ class Runs extends APIResource {
3415
3444
  * @param params.bot_id - Optional The ID of the bot associated with the workflow. | 可选 与工作流关联的机器人 ID。
3416
3445
  * @param params.parameters - Optional Parameters for the workflow execution. | 可选 工作流执行的参数。
3417
3446
  * @param params.ext - Optional Additional information for the workflow execution. | 可选 工作流执行的附加信息。
3447
+ * @param params.execute_mode - Optional The mode in which to execute the workflow. | 可选 工作流执行的模式。
3448
+ * @param params.connector_id - Optional The ID of the connector to use for the workflow. | 可选 用于工作流的连接器 ID。
3449
+ * @param params.app_id - Optional The ID of the app. | 可选 要进行会话聊天的 App ID
3418
3450
  * @returns RunWorkflowData | 工作流运行数据
3419
3451
  */ async create(params, options) {
3420
3452
  const apiUrl = '/v1/workflow/run';
@@ -3429,6 +3461,9 @@ class Runs extends APIResource {
3429
3461
  * @param params.bot_id - Optional The ID of the bot associated with the workflow. | 可选 与工作流关联的机器人 ID。
3430
3462
  * @param params.parameters - Optional Parameters for the workflow execution. | 可选 工作流执行的参数。
3431
3463
  * @param params.ext - Optional Additional information for the workflow execution. | 可选 工作流执行的附加信息。
3464
+ * @param params.execute_mode - Optional The mode in which to execute the workflow. | 可选 工作流执行的模式。
3465
+ * @param params.connector_id - Optional The ID of the connector to use for the workflow. | 可选 用于工作流的连接器 ID。
3466
+ * @param params.app_id - Optional The ID of the app. | 可选 要进行会话聊天的 App ID
3432
3467
  * @returns Stream<WorkflowEvent, { id: string; event: string; data: string }> | 工作流事件流
3433
3468
  */ async *stream(params, options) {
3434
3469
  const apiUrl = '/v1/workflow/stream_run';
@@ -3624,7 +3659,7 @@ class esm_Audio extends APIResource {
3624
3659
  super(...args), this.rooms = new Rooms(this._client), this.voices = new Voices(this._client), this.speech = new Speech(this._client);
3625
3660
  }
3626
3661
  }
3627
- var package_namespaceObject = JSON.parse('{"name":"@coze/api","version":"1.0.10","description":"Official Coze Node.js SDK for seamless AI integration into your applications | 扣子官方 Node.js SDK,助您轻松集成 AI 能力到应用中","keywords":["coze","ai","nodejs","sdk","chatbot","typescript"],"homepage":"https://github.com/coze-dev/coze-js/tree/main/packages/coze-js","bugs":{"url":"https://github.com/coze-dev/coze-js/issues"},"repository":{"type":"git","url":"https://github.com/coze-dev/coze-js.git","directory":"packages/coze-js"},"license":"MIT","author":"Leeight <leeight@gmail.com>","type":"module","exports":{".":{"require":"./dist/cjs/index.cjs","import":"./dist/esm/index.js","types":"./dist/types/index.d.ts"}},"main":"dist/cjs/index.cjs","module":"dist/esm/index.js","browser":{"crypto":false,"os":false,"jsonwebtoken":false},"types":"dist/types/index.d.ts","files":["dist","LICENSE","README.md","!**/*.tsbuildinfo"],"scripts":{"build":"rm -rf dist && rslib build","format":"prettier --write .","lint":"eslint ./ --cache --quiet","prepublishOnly":"npm run build","start":"rm -rf dist && rslib build -w","test":"vitest","test:cov":"vitest --coverage --run"},"dependencies":{"jsonwebtoken":"^9.0.2"},"devDependencies":{"@coze-infra/eslint-config":"workspace:*","@coze-infra/ts-config":"workspace:*","@coze-infra/vitest-config":"workspace:*","@rslib/core":"0.0.18","@swc/core":"^1.3.14","@types/jsonwebtoken":"^9.0.0","@types/node":"^20","@types/uuid":"^9.0.1","@types/whatwg-fetch":"^0.0.33","@vitest/coverage-v8":"~2.1.4","axios":"^1.7.7","typescript":"^5.5.3","vitest":"~2.1.4"},"peerDependencies":{"axios":"^1.7.1"}}'); // CONCATENATED MODULE: ./src/version.ts
3662
+ var package_namespaceObject = JSON.parse('{"name":"@coze/api","version":"1.0.14","description":"Official Coze Node.js SDK for seamless AI integration into your applications | 扣子官方 Node.js SDK,助您轻松集成 AI 能力到应用中","keywords":["coze","ai","nodejs","sdk","chatbot","typescript"],"homepage":"https://github.com/coze-dev/coze-js/tree/main/packages/coze-js","bugs":{"url":"https://github.com/coze-dev/coze-js/issues"},"repository":{"type":"git","url":"https://github.com/coze-dev/coze-js.git","directory":"packages/coze-js"},"license":"MIT","author":"Leeight <leeight@gmail.com>","type":"module","exports":{".":{"require":"./dist/cjs/index.cjs","import":"./dist/esm/index.js","types":"./dist/types/index.d.ts"}},"main":"dist/cjs/index.cjs","module":"dist/esm/index.js","browser":{"crypto":false,"os":false,"jsonwebtoken":false},"types":"dist/types/index.d.ts","files":["dist","LICENSE","README.md","!**/*.tsbuildinfo"],"scripts":{"build":"rm -rf dist && rslib build","format":"prettier --write .","lint":"eslint ./ --cache --quiet","prepublishOnly":"npm run build","start":"rm -rf dist && rslib build -w","test":"vitest","test:cov":"vitest --coverage --run"},"dependencies":{"jsonwebtoken":"^9.0.2"},"devDependencies":{"@coze-infra/eslint-config":"workspace:*","@coze-infra/ts-config":"workspace:*","@coze-infra/vitest-config":"workspace:*","@rslib/core":"0.0.18","@swc/core":"^1.3.14","@types/jsonwebtoken":"^9.0.0","@types/node":"^20","@types/uuid":"^9.0.1","@types/whatwg-fetch":"^0.0.33","@vitest/coverage-v8":"~2.1.4","axios":"^1.7.7","typescript":"^5.5.3","vitest":"~2.1.4"},"peerDependencies":{"axios":"^1.7.1"}}'); // CONCATENATED MODULE: ./src/version.ts
3628
3663
  const { version: esm_version } = package_namespaceObject;
3629
3664
  const getEnv = ()=>{
3630
3665
  const nodeVersion = process.version.slice(1); // Remove 'v' prefix
@@ -3687,6 +3722,11 @@ const getNodeClientUserAgent = ()=>{
3687
3722
  async function fetchAPI(url) {
3688
3723
  let options = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : {};
3689
3724
  const axiosInstance = options.axiosInstance || lib_axios;
3725
+ // Add version check for streaming requests
3726
+ if (options.isStreaming && isAxiosStatic(axiosInstance)) {
3727
+ const axiosVersion = axiosInstance.VERSION || lib_axios.VERSION;
3728
+ if (!axiosVersion || compareVersions(axiosVersion, '1.7.1') < 0) throw new CozeError('Streaming requests require axios version 1.7.1 or higher. Please upgrade your axios version.');
3729
+ }
3690
3730
  const response = await axiosInstance({
3691
3731
  url,
3692
3732
  responseType: options.isStreaming ? 'stream' : 'json',
@@ -3699,12 +3739,12 @@ async function fetchAPI(url) {
3699
3739
  async *stream () {
3700
3740
  try {
3701
3741
  const stream = response.data;
3702
- const reader = stream[Symbol.asyncIterator]();
3742
+ const reader = stream[Symbol.asyncIterator] ? stream[Symbol.asyncIterator]() : stream.getReader();
3703
3743
  const decoder = new TextDecoder();
3704
3744
  const fieldValues = {};
3705
3745
  let buffer = '';
3706
3746
  while(true){
3707
- const { done, value } = await reader.next();
3747
+ const { done, value } = await (reader.next ? reader.next() : reader.read());
3708
3748
  if (done) {
3709
3749
  if (buffer) {
3710
3750
  // If the stream ends without a newline, it means an error occurred
@@ -3738,6 +3778,21 @@ async function fetchAPI(url) {
3738
3778
  response
3739
3779
  };
3740
3780
  }
3781
+ // Add version comparison utility
3782
+ function compareVersions(v1, v2) {
3783
+ const v1Parts = v1.split('.').map(Number);
3784
+ const v2Parts = v2.split('.').map(Number);
3785
+ for(let i = 0; i < 3; i++){
3786
+ const part1 = v1Parts[i] || 0;
3787
+ const part2 = v2Parts[i] || 0;
3788
+ if (part1 > part2) return 1;
3789
+ if (part1 < part2) return -1;
3790
+ }
3791
+ return 0;
3792
+ }
3793
+ function isAxiosStatic(instance) {
3794
+ return !!(null == instance ? void 0 : instance.Axios);
3795
+ }
3741
3796
  /**
3742
3797
  * default coze base URL is api.coze.com
3743
3798
  */ const COZE_COM_BASE_URL = 'https://api.coze.com';
@@ -3761,12 +3816,15 @@ async function fetchAPI(url) {
3761
3816
  const fullUrl = `${this.baseURL}${apiUrl}`;
3762
3817
  const fetchOptions = this.buildOptions(method, body, options);
3763
3818
  fetchOptions.isStreaming = isStream;
3819
+ fetchOptions.axiosInstance = this.axiosInstance;
3764
3820
  this.debugLog(`--- request url: ${fullUrl}`);
3765
3821
  this.debugLog('--- request options:', fetchOptions);
3766
3822
  const { response, stream, json } = await fetchAPI(fullUrl, fetchOptions);
3767
3823
  this.debugLog(`--- response status: ${response.status}`);
3768
3824
  this.debugLog('--- response headers: ', response.headers);
3769
- const contentType = response.headers['content-type'];
3825
+ var _response_headers;
3826
+ // Taro use `header`
3827
+ const contentType = (null !== (_response_headers = response.headers) && void 0 !== _response_headers ? _response_headers : response.header)['content-type'];
3770
3828
  if (isStream) {
3771
3829
  if (contentType && contentType.includes('application/json')) {
3772
3830
  const result = await json();
@@ -3814,6 +3872,7 @@ async function fetchAPI(url) {
3814
3872
  this.baseURL = config.baseURL || COZE_COM_BASE_URL;
3815
3873
  this.token = config.token;
3816
3874
  this.axiosOptions = config.axiosOptions || {};
3875
+ this.axiosInstance = config.axiosInstance;
3817
3876
  this.debug = config.debug || false;
3818
3877
  this.allowPersonalAccessTokenInBrowser = config.allowPersonalAccessTokenInBrowser || false;
3819
3878
  this.headers = config.headers;
@@ -12637,8 +12696,8 @@ function getBrowser() {
12637
12696
  }
12638
12697
  var _navigator$userAgent$, isFirefox = "mozilla" === getBrowser(), isSafari = "safari" === getBrowser(), isChrome = "chrome-stable" === getBrowser(), isCriOS = !isSSR2() && /CriOS/i.test(userAgentString), isEdgeForDesktop = !isSSR2() && /Edg\//i.test(userAgentString), isEdgeForAndroid = !isSSR2() && /EdgA/i.test(userAgentString), isEdgeForIOS = !isSSR2() && /EdgiOS/i.test(userAgentString), isEdge = isEdgeForDesktop || isEdgeForAndroid || isEdgeForIOS, isDingTalk = !isSSR2() && /DingTalk/i.test(navigator.userAgent), isOpera = !isSSR2() && /OPR\//.test(navigator.userAgent), isIPad = !isSSR2() && (!!/(iPad)/i.exec(userAgentString) || /Macintosh/i.test(userAgentString) && "ontouchend" in document), isMac = !isSSR2() && /Macintosh/i.test(userAgentString), isWeChat = !isSSR2() && /MicroMessenger/i.test(userAgentString), isMobile = !isSSR2() && _includesInstanceProperty(_context$3 = userAgentString.toLowerCase()).call(_context$3, "mobile"), isIOS = !isSSR2() && !!/(iPhone|iPad|iPod)/i.exec(userAgentString), isAndroid = !isSSR2() && /Android/i.test(userAgentString), isWindows = !isSSR2() && /Windows/i.test(userAgentString), isOpenHarmony = !isSSR2() && /OpenHarmony/i.test(userAgentString), sv = 0, sv2 = "0", index_esm_min_v = !isSSR2() && (null === (_userAgentString$matc = userAgentString.match(/version\/(\d+)/i)) || void 0 === _userAgentString$matc ? void 0 : _userAgentString$matc[1]);
12639
12698
  isSafari && index_esm_min_v && (sv = Number(index_esm_min_v), sv2 = null === (_navigator$userAgent$ = navigator.userAgent.match(/version\/(\d+\.\d+)/i)) || void 0 === _navigator$userAgent$ ? void 0 : _navigator$userAgent$[1]);
12640
- var v2 = !isSSR2() && (null === (_userAgentString$matc2 = userAgentString.match(/Firefox\/(\d+)/i)) || void 0 === _userAgentString$matc2 ? void 0 : _userAgentString$matc2[1]);
12641
- isFirefox && v2 && (sv = Number(v2));
12699
+ var index_esm_min_v2 = !isSSR2() && (null === (_userAgentString$matc2 = userAgentString.match(/Firefox\/(\d+)/i)) || void 0 === _userAgentString$matc2 ? void 0 : _userAgentString$matc2[1]);
12700
+ isFirefox && index_esm_min_v2 && (sv = Number(index_esm_min_v2));
12642
12701
  var safariVersion = sv, firefoxVersion = sv, safariMinorVersion = sv2, iOSVersion = null !== (_ref = !isSSR2() && (null === (_userAgentString$matc3 = userAgentString.match(/ ([\d_]+) like Mac OS X/i)) || void 0 === _userAgentString$matc3 || null === (_userAgentString$matc4 = _userAgentString$matc3[1]) || void 0 === _userAgentString$matc4 ? void 0 : _mapInstanceProperty(_context2 = _userAgentString$matc4.split("_")).call(_context2, function(e) {
12643
12702
  return _parseInt$7(e);
12644
12703
  }))) && void 0 !== _ref ? _ref : [], cv = 0, cvs = !isSSR2() && (null === (_userAgentString$matc5 = userAgentString.match(/Chrome\/(\d+)/i)) || void 0 === _userAgentString$matc5 ? void 0 : _userAgentString$matc5[1]);
@@ -38239,19 +38298,22 @@ var VERTC = _createClass(function e() {
38239
38298
  /**
38240
38299
  * Get audio devices
38241
38300
  * @returns Promise<AudioDevices> Object containing arrays of audio input and output devices
38242
- */ const getAudioDevices = async ()=>{
38243
- const devices = await index_esm_min_index.enumerateDevices();
38301
+ */ const getAudioDevices = async function() {
38302
+ let { video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
38303
+ let devices = [];
38304
+ devices = video ? await index_esm_min_index.enumerateDevices() : await [
38305
+ ...await index_esm_min_index.enumerateAudioCaptureDevices(),
38306
+ ...await index_esm_min_index.enumerateAudioPlaybackDevices()
38307
+ ];
38244
38308
  if (!(null == devices ? void 0 : devices.length)) return {
38245
38309
  audioInputs: [],
38246
38310
  audioOutputs: [],
38247
- videoInputs: [],
38248
- videoOutputs: []
38311
+ videoInputs: []
38249
38312
  };
38250
38313
  return {
38251
38314
  audioInputs: devices.filter((i)=>i.deviceId && 'audioinput' === i.kind),
38252
38315
  audioOutputs: devices.filter((i)=>i.deviceId && 'audiooutput' === i.kind),
38253
- videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind),
38254
- videoOutputs: devices.filter((i)=>i.deviceId && 'videooutput' === i.kind)
38316
+ videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind)
38255
38317
  };
38256
38318
  };
38257
38319
  var error_RealtimeError = /*#__PURE__*/ function(RealtimeError) {
@@ -38354,6 +38416,34 @@ var event_handler_EventNames = /*#__PURE__*/ function(EventNames) {
38354
38416
  * en: Bot left
38355
38417
  * zh: Bot 离开
38356
38418
  */ EventNames["BOT_LEAVE"] = "server.bot.leave";
38419
+ /**
38420
+ * en: Audio speech started
38421
+ * zh: 开始说话
38422
+ */ EventNames["AUDIO_AGENT_SPEECH_STARTED"] = "server.audio.agent.speech_started";
38423
+ /**
38424
+ * en: Audio speech stopped
38425
+ * zh: 停止说话
38426
+ */ EventNames["AUDIO_SPEECH_STOPPED"] = "server.audio.speech_stopped";
38427
+ /**
38428
+ * en: Server error
38429
+ * zh: 服务端错误
38430
+ */ EventNames["SERVER_ERROR"] = "server.error";
38431
+ /**
38432
+ * en: User speech started
38433
+ * zh: 用户开始说话
38434
+ */ EventNames["AUDIO_USER_SPEECH_STARTED"] = "server.audio.user.speech_started";
38435
+ /**
38436
+ * en: User speech stopped
38437
+ * zh: 用户停止说话
38438
+ */ EventNames["AUDIO_USER_SPEECH_STOPPED"] = "server.audio.user.speech_stopped";
38439
+ /**
38440
+ * en: User successfully enters the room
38441
+ * zh: 用户成功进入房间后,会收到该事件
38442
+ */ EventNames["SESSION_CREATED"] = "server.session.created";
38443
+ /**
38444
+ * en: Session updated
38445
+ * zh: 会话更新
38446
+ */ EventNames["SESSION_UPDATE"] = "server.session.update";
38357
38447
  return EventNames;
38358
38448
  }({});
38359
38449
  class RealtimeEventHandler {
@@ -38384,7 +38474,8 @@ class RealtimeEventHandler {
38384
38474
  }
38385
38475
  }
38386
38476
  dispatch(eventName, event) {
38387
- this._log(`dispatch ${eventName} event`);
38477
+ let consoleLog = !(arguments.length > 2) || void 0 === arguments[2] || arguments[2];
38478
+ if (consoleLog) this._log(`dispatch ${eventName} event`);
38388
38479
  const handlers = (this.eventHandlers[eventName] || []).slice();
38389
38480
  this._dispatchToHandlers(eventName, event, handlers);
38390
38481
  const allHandlers = (this.eventHandlers["realtime.event"] || []).slice();
@@ -41653,7 +41744,7 @@ class EngineClient extends RealtimeEventHandler {
41653
41744
  this.engine.on(index_esm_min_index.events.onUserJoined, this.handleUserJoin);
41654
41745
  this.engine.on(index_esm_min_index.events.onUserLeave, this.handleUserLeave);
41655
41746
  this.engine.on(index_esm_min_index.events.onError, this.handleEventError);
41656
- this.engine.on(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
41747
+ if (this._isSupportVideo) this.engine.on(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
41657
41748
  if (this._debug) {
41658
41749
  this.engine.on(index_esm_min_index.events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
41659
41750
  this.engine.on(index_esm_min_index.events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
@@ -41664,7 +41755,7 @@ class EngineClient extends RealtimeEventHandler {
41664
41755
  this.engine.off(index_esm_min_index.events.onUserJoined, this.handleUserJoin);
41665
41756
  this.engine.off(index_esm_min_index.events.onUserLeave, this.handleUserLeave);
41666
41757
  this.engine.off(index_esm_min_index.events.onError, this.handleEventError);
41667
- this.engine.off(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
41758
+ if (this._isSupportVideo) this.engine.off(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
41668
41759
  if (this._debug) {
41669
41760
  this.engine.off(index_esm_min_index.events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
41670
41761
  this.engine.off(index_esm_min_index.events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
@@ -41709,13 +41800,13 @@ class EngineClient extends RealtimeEventHandler {
41709
41800
  this.dispatch(event_handler_EventNames.PLAYER_EVENT, event);
41710
41801
  }
41711
41802
  async joinRoom(options) {
41712
- const { token, roomId, uid, audioMutedDefault, videoOnDefault } = options;
41803
+ const { token, roomId, uid, audioMutedDefault, videoOnDefault, isAutoSubscribeAudio } = options;
41713
41804
  try {
41714
41805
  await this.engine.joinRoom(token, roomId, {
41715
41806
  userId: uid
41716
41807
  }, {
41717
41808
  isAutoPublish: !audioMutedDefault,
41718
- isAutoSubscribeAudio: true,
41809
+ isAutoSubscribeAudio,
41719
41810
  isAutoSubscribeVideo: this._isSupportVideo && videoOnDefault
41720
41811
  });
41721
41812
  } catch (e) {
@@ -41729,14 +41820,18 @@ class EngineClient extends RealtimeEventHandler {
41729
41820
  await this.engine.startAudioCapture(deviceId);
41730
41821
  }
41731
41822
  async setAudioOutputDevice(deviceId) {
41732
- const devices = await getAudioDevices();
41823
+ const devices = await getAudioDevices({
41824
+ video: false
41825
+ });
41733
41826
  if (-1 === devices.audioOutputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio output device not found: ${deviceId}`);
41734
41827
  await this.engine.setAudioPlaybackDevice(deviceId);
41735
41828
  }
41736
41829
  async createLocalStream(userId, videoConfig) {
41737
- const devices = await getAudioDevices();
41738
- if (!devices.audioInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get devices');
41739
- if (this._isSupportVideo && !devices.videoInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get devices');
41830
+ const devices = await getAudioDevices({
41831
+ video: this._isSupportVideo
41832
+ });
41833
+ if (!devices.audioInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get audio devices');
41834
+ if (this._isSupportVideo && !devices.videoInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get video devices');
41740
41835
  await this.engine.startAudioCapture(devices.audioInputs[0].deviceId);
41741
41836
  if (this._isSupportVideo && (null == videoConfig ? void 0 : videoConfig.videoOnDefault)) await this.engine.startVideoCapture(devices.videoInputs[0].deviceId);
41742
41837
  if (this._isSupportVideo) this.engine.setLocalVideoPlayer(StreamIndex$1.STREAM_INDEX_MAIN, {
@@ -41889,7 +41984,7 @@ class RealtimeClient extends RealtimeEventHandler {
41889
41984
  // Step3 bind engine events
41890
41985
  this._client.bindEngineEvents();
41891
41986
  this._client.on(event_handler_EventNames.ALL, (eventName, data)=>{
41892
- this.dispatch(eventName, data);
41987
+ this.dispatch(eventName, data, false);
41893
41988
  });
41894
41989
  if (this._config.suppressStationaryNoise) {
41895
41990
  await this._client.enableAudioNoiseReduction();
@@ -41900,14 +41995,15 @@ class RealtimeClient extends RealtimeEventHandler {
41900
41995
  this._client.changeAIAnsExtension(true);
41901
41996
  this.dispatch(event_handler_EventNames.SUPPRESS_NON_STATIONARY_NOISE, {});
41902
41997
  }
41903
- var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault;
41998
+ var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault, _this__config_isAutoSubscribeAudio;
41904
41999
  // Step4 join room
41905
42000
  await this._client.joinRoom({
41906
42001
  token: roomInfo.token,
41907
42002
  roomId: roomInfo.room_id,
41908
42003
  uid: roomInfo.uid,
41909
42004
  audioMutedDefault: null !== (_this__config_audioMutedDefault = this._config.audioMutedDefault) && void 0 !== _this__config_audioMutedDefault && _this__config_audioMutedDefault,
41910
- videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault
42005
+ videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault,
42006
+ isAutoSubscribeAudio: null === (_this__config_isAutoSubscribeAudio = this._config.isAutoSubscribeAudio) || void 0 === _this__config_isAutoSubscribeAudio || _this__config_isAutoSubscribeAudio
41911
42007
  });
41912
42008
  // Step5 create local stream
41913
42009
  await this._client.createLocalStream(roomInfo.uid, this._config.videoConfig);
@@ -41919,7 +42015,6 @@ class RealtimeClient extends RealtimeEventHandler {
41919
42015
  token: roomInfo.token,
41920
42016
  appId: roomInfo.app_id
41921
42017
  });
41922
- this._log('dispatch client.connected event');
41923
42018
  }
41924
42019
  /**
41925
42020
  * en: Interrupt the current conversation
@@ -41929,7 +42024,6 @@ class RealtimeClient extends RealtimeEventHandler {
41929
42024
  var _this__client;
41930
42025
  await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.stop());
41931
42026
  this.dispatch(event_handler_EventNames.INTERRUPTED, {});
41932
- this._log('dispatch client.interrupted event');
41933
42027
  }
41934
42028
  /**
41935
42029
  * en: Disconnect from the current session
@@ -42051,6 +42145,7 @@ class RealtimeClient extends RealtimeEventHandler {
42051
42145
  * 可选,默认是否抑制静态噪声,默认值为 false。
42052
42146
  * @param config.suppressNonStationaryNoise - Optional, suppress non-stationary noise, defaults to false. |
42053
42147
  * 可选,默认是否抑制非静态噪声,默认值为 false。
42148
+ * @param config.isAutoSubscribeAudio - Optional, whether to automatically subscribe to bot reply audio streams, defaults to true. |
42054
42149
  */ constructor(config){
42055
42150
  super(config.debug), this._client = null, this.isConnected = false, this._isTestEnv = false, this._isSupportVideo = false;
42056
42151
  this._config = config;