@coze/realtime-api 1.0.1 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.cjs +129 -34
- package/dist/esm/index.js +129 -34
- package/dist/types/client.d.ts +1 -0
- package/dist/types/event-handler.d.ts +38 -4
- package/dist/types/index.d.ts +2 -0
- package/dist/types/utils.d.ts +3 -2
- package/dist/umd/index.js +129 -34
- package/package.json +2 -2
package/dist/esm/index.js
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
/*! For license information please see index.js.LICENSE.txt */
|
2
2
|
var __webpack_modules__ = {
|
3
|
-
"?
|
3
|
+
"?e272": function() {
|
4
4
|
/* (ignored) */ },
|
5
|
-
"?
|
5
|
+
"?5742": function() {
|
6
6
|
/* (ignored) */ },
|
7
|
-
"?
|
7
|
+
"?9caf": function() {
|
8
8
|
/* (ignored) */ }
|
9
9
|
};
|
10
10
|
/************************************************************************/ // The module cache
|
@@ -61,7 +61,7 @@ function __webpack_require__(moduleId) {
|
|
61
61
|
});
|
62
62
|
};
|
63
63
|
})();
|
64
|
-
/************************************************************************/ // NAMESPACE OBJECT: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7/node_modules/axios/lib/platform/common/utils.js
|
64
|
+
/************************************************************************/ // NAMESPACE OBJECT: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7_debug@4.3.7/node_modules/axios/lib/platform/common/utils.js
|
65
65
|
var common_utils_namespaceObject = {};
|
66
66
|
__webpack_require__.r(common_utils_namespaceObject);
|
67
67
|
__webpack_require__.d(common_utils_namespaceObject, {
|
@@ -2123,7 +2123,7 @@ const trackStream = (stream, chunkSize, onProgress, onFinish)=>{
|
|
2123
2123
|
}, {
|
2124
2124
|
highWaterMark: 2
|
2125
2125
|
});
|
2126
|
-
}; // CONCATENATED MODULE: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7/node_modules/axios/lib/adapters/fetch.js
|
2126
|
+
}; // CONCATENATED MODULE: ../../common/temp/default/node_modules/.pnpm/axios@1.7.7_debug@4.3.7/node_modules/axios/lib/adapters/fetch.js
|
2127
2127
|
const isFetchSupported = 'function' == typeof fetch && 'function' == typeof Request && 'function' == typeof Response;
|
2128
2128
|
const isReadableStreamSupported = isFetchSupported && 'function' == typeof ReadableStream;
|
2129
2129
|
// used only inside the fetch adapter
|
@@ -2845,11 +2845,11 @@ axios.default = axios;
|
|
2845
2845
|
// so that it can keep same with es module or cjs
|
2846
2846
|
const { Axios: axios_Axios, AxiosError: axios_AxiosError, CanceledError: axios_CanceledError, isCancel: axios_isCancel, CancelToken: axios_CancelToken, VERSION: axios_VERSION, all: axios_all, Cancel, isAxiosError: axios_isAxiosError, spread: axios_spread, toFormData: axios_toFormData, AxiosHeaders: axios_AxiosHeaders, HttpStatusCode: axios_HttpStatusCode, formToJSON, getAdapter, mergeConfig: axios_mergeConfig } = lib_axios;
|
2847
2847
|
// EXTERNAL MODULE: os (ignored)
|
2848
|
-
var os_ignored_ = __webpack_require__("?
|
2848
|
+
var os_ignored_ = __webpack_require__("?9caf");
|
2849
2849
|
// EXTERNAL MODULE: crypto (ignored)
|
2850
|
-
__webpack_require__("?
|
2850
|
+
__webpack_require__("?e272");
|
2851
2851
|
// EXTERNAL MODULE: jsonwebtoken (ignored)
|
2852
|
-
__webpack_require__("?
|
2852
|
+
__webpack_require__("?5742");
|
2853
2853
|
class APIResource {
|
2854
2854
|
constructor(client){
|
2855
2855
|
this._client = client;
|
@@ -3091,6 +3091,10 @@ class Messages extends APIResource {
|
|
3091
3091
|
}
|
3092
3092
|
}
|
3093
3093
|
const uuid = ()=>(Math.random() * new Date().getTime()).toString();
|
3094
|
+
const handleAdditionalMessages = (additional_messages)=>null == additional_messages ? void 0 : additional_messages.map((i)=>({
|
3095
|
+
...i,
|
3096
|
+
content: 'object' == typeof i.content ? JSON.stringify(i.content) : i.content
|
3097
|
+
}));
|
3094
3098
|
class Chat extends APIResource {
|
3095
3099
|
/**
|
3096
3100
|
* Call the Chat API to send messages to a published Coze agent. | 调用此接口发起一次对话,支持添加上下文
|
@@ -3112,6 +3116,7 @@ class Chat extends APIResource {
|
|
3112
3116
|
const apiUrl = `/v3/chat${conversation_id ? `?conversation_id=${conversation_id}` : ''}`;
|
3113
3117
|
const payload = {
|
3114
3118
|
...rest,
|
3119
|
+
additional_messages: handleAdditionalMessages(params.additional_messages),
|
3115
3120
|
stream: false
|
3116
3121
|
};
|
3117
3122
|
const result = await this._client.post(apiUrl, payload, false, options);
|
@@ -3137,6 +3142,7 @@ class Chat extends APIResource {
|
|
3137
3142
|
const apiUrl = `/v3/chat${conversation_id ? `?conversation_id=${conversation_id}` : ''}`;
|
3138
3143
|
const payload = {
|
3139
3144
|
...rest,
|
3145
|
+
additional_messages: handleAdditionalMessages(params.additional_messages),
|
3140
3146
|
stream: false
|
3141
3147
|
};
|
3142
3148
|
const result = await this._client.post(apiUrl, payload, false, options);
|
@@ -3174,6 +3180,7 @@ class Chat extends APIResource {
|
|
3174
3180
|
const apiUrl = `/v3/chat${conversation_id ? `?conversation_id=${conversation_id}` : ''}`;
|
3175
3181
|
const payload = {
|
3176
3182
|
...rest,
|
3183
|
+
additional_messages: handleAdditionalMessages(params.additional_messages),
|
3177
3184
|
stream: true
|
3178
3185
|
};
|
3179
3186
|
const result = await this._client.post(apiUrl, payload, true, options);
|
@@ -3348,6 +3355,7 @@ class Conversations extends APIResource {
|
|
3348
3355
|
* @param params - Required The parameters for creating a conversation | 创建会话所需的参数
|
3349
3356
|
* @param params.messages - Optional Messages in the conversation. | 会话中的消息内容。
|
3350
3357
|
* @param params.meta_data - Optional Additional information when creating a message. | 创建消息时的附加消息。
|
3358
|
+
* @param params.bot_id - Optional Bind and isolate conversation on different bots. | 绑定和隔离不同Bot的会话。
|
3351
3359
|
* @returns Information about the created conversation. | 会话的基础信息。
|
3352
3360
|
*/ async create(params, options) {
|
3353
3361
|
const apiUrl = '/v1/conversation/create';
|
@@ -3365,6 +3373,27 @@ class Conversations extends APIResource {
|
|
3365
3373
|
const response = await this._client.get(apiUrl, null, false, options);
|
3366
3374
|
return response.data;
|
3367
3375
|
}
|
3376
|
+
/**
|
3377
|
+
* List all conversations. | 列出 Bot 下所有会话。
|
3378
|
+
* @param params
|
3379
|
+
* @param params.bot_id - Required Bot ID. | Bot ID。
|
3380
|
+
* @param params.page_num - Optional The page number. | 页码,默认值为 1。
|
3381
|
+
* @param params.page_size - Optional The number of conversations per page. | 每页的会话数量,默认值为 50。
|
3382
|
+
* @returns Information about the conversations. | 会话的信息。
|
3383
|
+
*/ async list(params, options) {
|
3384
|
+
const apiUrl = '/v1/conversations';
|
3385
|
+
const response = await this._client.get(apiUrl, params, false, options);
|
3386
|
+
return response.data;
|
3387
|
+
}
|
3388
|
+
/**
|
3389
|
+
* Clear a conversation. | 清空会话。
|
3390
|
+
* @param conversation_id - Required The ID of the conversation. | Conversation ID,即会话的唯一标识。
|
3391
|
+
* @returns Information about the conversation session. | 会话的会话 ID。
|
3392
|
+
*/ async clear(conversation_id, options) {
|
3393
|
+
const apiUrl = `/v1/conversations/${conversation_id}/clear`;
|
3394
|
+
const response = await this._client.post(apiUrl, null, false, options);
|
3395
|
+
return response.data;
|
3396
|
+
}
|
3368
3397
|
constructor(...args){
|
3369
3398
|
super(...args), this.messages = new messages_Messages(this._client);
|
3370
3399
|
}
|
@@ -3403,6 +3432,9 @@ class Runs extends APIResource {
|
|
3403
3432
|
* @param params.bot_id - Optional The ID of the bot associated with the workflow. | 可选 与工作流关联的机器人 ID。
|
3404
3433
|
* @param params.parameters - Optional Parameters for the workflow execution. | 可选 工作流执行的参数。
|
3405
3434
|
* @param params.ext - Optional Additional information for the workflow execution. | 可选 工作流执行的附加信息。
|
3435
|
+
* @param params.execute_mode - Optional The mode in which to execute the workflow. | 可选 工作流执行的模式。
|
3436
|
+
* @param params.connector_id - Optional The ID of the connector to use for the workflow. | 可选 用于工作流的连接器 ID。
|
3437
|
+
* @param params.app_id - Optional The ID of the app. | 可选 要进行会话聊天的 App ID
|
3406
3438
|
* @returns RunWorkflowData | 工作流运行数据
|
3407
3439
|
*/ async create(params, options) {
|
3408
3440
|
const apiUrl = '/v1/workflow/run';
|
@@ -3417,6 +3449,9 @@ class Runs extends APIResource {
|
|
3417
3449
|
* @param params.bot_id - Optional The ID of the bot associated with the workflow. | 可选 与工作流关联的机器人 ID。
|
3418
3450
|
* @param params.parameters - Optional Parameters for the workflow execution. | 可选 工作流执行的参数。
|
3419
3451
|
* @param params.ext - Optional Additional information for the workflow execution. | 可选 工作流执行的附加信息。
|
3452
|
+
* @param params.execute_mode - Optional The mode in which to execute the workflow. | 可选 工作流执行的模式。
|
3453
|
+
* @param params.connector_id - Optional The ID of the connector to use for the workflow. | 可选 用于工作流的连接器 ID。
|
3454
|
+
* @param params.app_id - Optional The ID of the app. | 可选 要进行会话聊天的 App ID
|
3420
3455
|
* @returns Stream<WorkflowEvent, { id: string; event: string; data: string }> | 工作流事件流
|
3421
3456
|
*/ async *stream(params, options) {
|
3422
3457
|
const apiUrl = '/v1/workflow/stream_run';
|
@@ -3612,7 +3647,7 @@ class esm_Audio extends APIResource {
|
|
3612
3647
|
super(...args), this.rooms = new Rooms(this._client), this.voices = new Voices(this._client), this.speech = new Speech(this._client);
|
3613
3648
|
}
|
3614
3649
|
}
|
3615
|
-
var package_namespaceObject = JSON.parse('{"name":"@coze/api","version":"1.0.
|
3650
|
+
var package_namespaceObject = JSON.parse('{"name":"@coze/api","version":"1.0.14","description":"Official Coze Node.js SDK for seamless AI integration into your applications | 扣子官方 Node.js SDK,助您轻松集成 AI 能力到应用中","keywords":["coze","ai","nodejs","sdk","chatbot","typescript"],"homepage":"https://github.com/coze-dev/coze-js/tree/main/packages/coze-js","bugs":{"url":"https://github.com/coze-dev/coze-js/issues"},"repository":{"type":"git","url":"https://github.com/coze-dev/coze-js.git","directory":"packages/coze-js"},"license":"MIT","author":"Leeight <leeight@gmail.com>","type":"module","exports":{".":{"require":"./dist/cjs/index.cjs","import":"./dist/esm/index.js","types":"./dist/types/index.d.ts"}},"main":"dist/cjs/index.cjs","module":"dist/esm/index.js","browser":{"crypto":false,"os":false,"jsonwebtoken":false},"types":"dist/types/index.d.ts","files":["dist","LICENSE","README.md","!**/*.tsbuildinfo"],"scripts":{"build":"rm -rf dist && rslib build","format":"prettier --write .","lint":"eslint ./ --cache --quiet","prepublishOnly":"npm run build","start":"rm -rf dist && rslib build -w","test":"vitest","test:cov":"vitest --coverage --run"},"dependencies":{"jsonwebtoken":"^9.0.2"},"devDependencies":{"@coze-infra/eslint-config":"workspace:*","@coze-infra/ts-config":"workspace:*","@coze-infra/vitest-config":"workspace:*","@rslib/core":"0.0.18","@swc/core":"^1.3.14","@types/jsonwebtoken":"^9.0.0","@types/node":"^20","@types/uuid":"^9.0.1","@types/whatwg-fetch":"^0.0.33","@vitest/coverage-v8":"~2.1.4","axios":"^1.7.7","typescript":"^5.5.3","vitest":"~2.1.4"},"peerDependencies":{"axios":"^1.7.1"}}'); // CONCATENATED MODULE: ./src/version.ts
|
3616
3651
|
const { version: esm_version } = package_namespaceObject;
|
3617
3652
|
const getEnv = ()=>{
|
3618
3653
|
const nodeVersion = process.version.slice(1); // Remove 'v' prefix
|
@@ -3675,6 +3710,11 @@ const getNodeClientUserAgent = ()=>{
|
|
3675
3710
|
async function fetchAPI(url) {
|
3676
3711
|
let options = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : {};
|
3677
3712
|
const axiosInstance = options.axiosInstance || lib_axios;
|
3713
|
+
// Add version check for streaming requests
|
3714
|
+
if (options.isStreaming && isAxiosStatic(axiosInstance)) {
|
3715
|
+
const axiosVersion = axiosInstance.VERSION || lib_axios.VERSION;
|
3716
|
+
if (!axiosVersion || compareVersions(axiosVersion, '1.7.1') < 0) throw new CozeError('Streaming requests require axios version 1.7.1 or higher. Please upgrade your axios version.');
|
3717
|
+
}
|
3678
3718
|
const response = await axiosInstance({
|
3679
3719
|
url,
|
3680
3720
|
responseType: options.isStreaming ? 'stream' : 'json',
|
@@ -3687,12 +3727,12 @@ async function fetchAPI(url) {
|
|
3687
3727
|
async *stream () {
|
3688
3728
|
try {
|
3689
3729
|
const stream = response.data;
|
3690
|
-
const reader = stream[Symbol.asyncIterator]();
|
3730
|
+
const reader = stream[Symbol.asyncIterator] ? stream[Symbol.asyncIterator]() : stream.getReader();
|
3691
3731
|
const decoder = new TextDecoder();
|
3692
3732
|
const fieldValues = {};
|
3693
3733
|
let buffer = '';
|
3694
3734
|
while(true){
|
3695
|
-
const { done, value } = await reader.next();
|
3735
|
+
const { done, value } = await (reader.next ? reader.next() : reader.read());
|
3696
3736
|
if (done) {
|
3697
3737
|
if (buffer) {
|
3698
3738
|
// If the stream ends without a newline, it means an error occurred
|
@@ -3726,6 +3766,21 @@ async function fetchAPI(url) {
|
|
3726
3766
|
response
|
3727
3767
|
};
|
3728
3768
|
}
|
3769
|
+
// Add version comparison utility
|
3770
|
+
function compareVersions(v1, v2) {
|
3771
|
+
const v1Parts = v1.split('.').map(Number);
|
3772
|
+
const v2Parts = v2.split('.').map(Number);
|
3773
|
+
for(let i = 0; i < 3; i++){
|
3774
|
+
const part1 = v1Parts[i] || 0;
|
3775
|
+
const part2 = v2Parts[i] || 0;
|
3776
|
+
if (part1 > part2) return 1;
|
3777
|
+
if (part1 < part2) return -1;
|
3778
|
+
}
|
3779
|
+
return 0;
|
3780
|
+
}
|
3781
|
+
function isAxiosStatic(instance) {
|
3782
|
+
return !!(null == instance ? void 0 : instance.Axios);
|
3783
|
+
}
|
3729
3784
|
/**
|
3730
3785
|
* default coze base URL is api.coze.com
|
3731
3786
|
*/ const COZE_COM_BASE_URL = 'https://api.coze.com';
|
@@ -3749,12 +3804,15 @@ async function fetchAPI(url) {
|
|
3749
3804
|
const fullUrl = `${this.baseURL}${apiUrl}`;
|
3750
3805
|
const fetchOptions = this.buildOptions(method, body, options);
|
3751
3806
|
fetchOptions.isStreaming = isStream;
|
3807
|
+
fetchOptions.axiosInstance = this.axiosInstance;
|
3752
3808
|
this.debugLog(`--- request url: ${fullUrl}`);
|
3753
3809
|
this.debugLog('--- request options:', fetchOptions);
|
3754
3810
|
const { response, stream, json } = await fetchAPI(fullUrl, fetchOptions);
|
3755
3811
|
this.debugLog(`--- response status: ${response.status}`);
|
3756
3812
|
this.debugLog('--- response headers: ', response.headers);
|
3757
|
-
|
3813
|
+
var _response_headers;
|
3814
|
+
// Taro use `header`
|
3815
|
+
const contentType = (null !== (_response_headers = response.headers) && void 0 !== _response_headers ? _response_headers : response.header)['content-type'];
|
3758
3816
|
if (isStream) {
|
3759
3817
|
if (contentType && contentType.includes('application/json')) {
|
3760
3818
|
const result = await json();
|
@@ -3802,6 +3860,7 @@ async function fetchAPI(url) {
|
|
3802
3860
|
this.baseURL = config.baseURL || COZE_COM_BASE_URL;
|
3803
3861
|
this.token = config.token;
|
3804
3862
|
this.axiosOptions = config.axiosOptions || {};
|
3863
|
+
this.axiosInstance = config.axiosInstance;
|
3805
3864
|
this.debug = config.debug || false;
|
3806
3865
|
this.allowPersonalAccessTokenInBrowser = config.allowPersonalAccessTokenInBrowser || false;
|
3807
3866
|
this.headers = config.headers;
|
@@ -12625,8 +12684,8 @@ function getBrowser() {
|
|
12625
12684
|
}
|
12626
12685
|
var _navigator$userAgent$, isFirefox = "mozilla" === getBrowser(), isSafari = "safari" === getBrowser(), isChrome = "chrome-stable" === getBrowser(), isCriOS = !isSSR2() && /CriOS/i.test(userAgentString), isEdgeForDesktop = !isSSR2() && /Edg\//i.test(userAgentString), isEdgeForAndroid = !isSSR2() && /EdgA/i.test(userAgentString), isEdgeForIOS = !isSSR2() && /EdgiOS/i.test(userAgentString), isEdge = isEdgeForDesktop || isEdgeForAndroid || isEdgeForIOS, isDingTalk = !isSSR2() && /DingTalk/i.test(navigator.userAgent), isOpera = !isSSR2() && /OPR\//.test(navigator.userAgent), isIPad = !isSSR2() && (!!/(iPad)/i.exec(userAgentString) || /Macintosh/i.test(userAgentString) && "ontouchend" in document), isMac = !isSSR2() && /Macintosh/i.test(userAgentString), isWeChat = !isSSR2() && /MicroMessenger/i.test(userAgentString), isMobile = !isSSR2() && _includesInstanceProperty(_context$3 = userAgentString.toLowerCase()).call(_context$3, "mobile"), isIOS = !isSSR2() && !!/(iPhone|iPad|iPod)/i.exec(userAgentString), isAndroid = !isSSR2() && /Android/i.test(userAgentString), isWindows = !isSSR2() && /Windows/i.test(userAgentString), isOpenHarmony = !isSSR2() && /OpenHarmony/i.test(userAgentString), sv = 0, sv2 = "0", index_esm_min_v = !isSSR2() && (null === (_userAgentString$matc = userAgentString.match(/version\/(\d+)/i)) || void 0 === _userAgentString$matc ? void 0 : _userAgentString$matc[1]);
|
12627
12686
|
isSafari && index_esm_min_v && (sv = Number(index_esm_min_v), sv2 = null === (_navigator$userAgent$ = navigator.userAgent.match(/version\/(\d+\.\d+)/i)) || void 0 === _navigator$userAgent$ ? void 0 : _navigator$userAgent$[1]);
|
12628
|
-
var
|
12629
|
-
isFirefox &&
|
12687
|
+
var index_esm_min_v2 = !isSSR2() && (null === (_userAgentString$matc2 = userAgentString.match(/Firefox\/(\d+)/i)) || void 0 === _userAgentString$matc2 ? void 0 : _userAgentString$matc2[1]);
|
12688
|
+
isFirefox && index_esm_min_v2 && (sv = Number(index_esm_min_v2));
|
12630
12689
|
var safariVersion = sv, firefoxVersion = sv, safariMinorVersion = sv2, iOSVersion = null !== (_ref = !isSSR2() && (null === (_userAgentString$matc3 = userAgentString.match(/ ([\d_]+) like Mac OS X/i)) || void 0 === _userAgentString$matc3 || null === (_userAgentString$matc4 = _userAgentString$matc3[1]) || void 0 === _userAgentString$matc4 ? void 0 : _mapInstanceProperty(_context2 = _userAgentString$matc4.split("_")).call(_context2, function(e) {
|
12631
12690
|
return _parseInt$7(e);
|
12632
12691
|
}))) && void 0 !== _ref ? _ref : [], cv = 0, cvs = !isSSR2() && (null === (_userAgentString$matc5 = userAgentString.match(/Chrome\/(\d+)/i)) || void 0 === _userAgentString$matc5 ? void 0 : _userAgentString$matc5[1]);
|
@@ -38227,19 +38286,22 @@ var VERTC = _createClass(function e() {
|
|
38227
38286
|
/**
|
38228
38287
|
* Get audio devices
|
38229
38288
|
* @returns Promise<AudioDevices> Object containing arrays of audio input and output devices
|
38230
|
-
*/ const getAudioDevices = async ()
|
38231
|
-
|
38289
|
+
*/ const getAudioDevices = async function() {
|
38290
|
+
let { video = false } = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
|
38291
|
+
let devices = [];
|
38292
|
+
devices = video ? await index_esm_min_index.enumerateDevices() : await [
|
38293
|
+
...await index_esm_min_index.enumerateAudioCaptureDevices(),
|
38294
|
+
...await index_esm_min_index.enumerateAudioPlaybackDevices()
|
38295
|
+
];
|
38232
38296
|
if (!(null == devices ? void 0 : devices.length)) return {
|
38233
38297
|
audioInputs: [],
|
38234
38298
|
audioOutputs: [],
|
38235
|
-
videoInputs: []
|
38236
|
-
videoOutputs: []
|
38299
|
+
videoInputs: []
|
38237
38300
|
};
|
38238
38301
|
return {
|
38239
38302
|
audioInputs: devices.filter((i)=>i.deviceId && 'audioinput' === i.kind),
|
38240
38303
|
audioOutputs: devices.filter((i)=>i.deviceId && 'audiooutput' === i.kind),
|
38241
|
-
videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind)
|
38242
|
-
videoOutputs: devices.filter((i)=>i.deviceId && 'videooutput' === i.kind)
|
38304
|
+
videoInputs: devices.filter((i)=>i.deviceId && 'videoinput' === i.kind)
|
38243
38305
|
};
|
38244
38306
|
};
|
38245
38307
|
var error_RealtimeError = /*#__PURE__*/ function(RealtimeError) {
|
@@ -38342,6 +38404,34 @@ var event_handler_EventNames = /*#__PURE__*/ function(EventNames) {
|
|
38342
38404
|
* en: Bot left
|
38343
38405
|
* zh: Bot 离开
|
38344
38406
|
*/ EventNames["BOT_LEAVE"] = "server.bot.leave";
|
38407
|
+
/**
|
38408
|
+
* en: Audio speech started
|
38409
|
+
* zh: 开始说话
|
38410
|
+
*/ EventNames["AUDIO_AGENT_SPEECH_STARTED"] = "server.audio.agent.speech_started";
|
38411
|
+
/**
|
38412
|
+
* en: Audio speech stopped
|
38413
|
+
* zh: 停止说话
|
38414
|
+
*/ EventNames["AUDIO_SPEECH_STOPPED"] = "server.audio.speech_stopped";
|
38415
|
+
/**
|
38416
|
+
* en: Server error
|
38417
|
+
* zh: 服务端错误
|
38418
|
+
*/ EventNames["SERVER_ERROR"] = "server.error";
|
38419
|
+
/**
|
38420
|
+
* en: User speech started
|
38421
|
+
* zh: 用户开始说话
|
38422
|
+
*/ EventNames["AUDIO_USER_SPEECH_STARTED"] = "server.audio.user.speech_started";
|
38423
|
+
/**
|
38424
|
+
* en: User speech stopped
|
38425
|
+
* zh: 用户停止说话
|
38426
|
+
*/ EventNames["AUDIO_USER_SPEECH_STOPPED"] = "server.audio.user.speech_stopped";
|
38427
|
+
/**
|
38428
|
+
* en: User successfully enters the room
|
38429
|
+
* zh: 用户成功进入房间后,会收到该事件
|
38430
|
+
*/ EventNames["SESSION_CREATED"] = "server.session.created";
|
38431
|
+
/**
|
38432
|
+
* en: Session updated
|
38433
|
+
* zh: 会话更新
|
38434
|
+
*/ EventNames["SESSION_UPDATE"] = "server.session.update";
|
38345
38435
|
return EventNames;
|
38346
38436
|
}({});
|
38347
38437
|
class RealtimeEventHandler {
|
@@ -38372,7 +38462,8 @@ class RealtimeEventHandler {
|
|
38372
38462
|
}
|
38373
38463
|
}
|
38374
38464
|
dispatch(eventName, event) {
|
38375
|
-
|
38465
|
+
let consoleLog = !(arguments.length > 2) || void 0 === arguments[2] || arguments[2];
|
38466
|
+
if (consoleLog) this._log(`dispatch ${eventName} event`);
|
38376
38467
|
const handlers = (this.eventHandlers[eventName] || []).slice();
|
38377
38468
|
this._dispatchToHandlers(eventName, event, handlers);
|
38378
38469
|
const allHandlers = (this.eventHandlers["realtime.event"] || []).slice();
|
@@ -41641,7 +41732,7 @@ class EngineClient extends RealtimeEventHandler {
|
|
41641
41732
|
this.engine.on(index_esm_min_index.events.onUserJoined, this.handleUserJoin);
|
41642
41733
|
this.engine.on(index_esm_min_index.events.onUserLeave, this.handleUserLeave);
|
41643
41734
|
this.engine.on(index_esm_min_index.events.onError, this.handleEventError);
|
41644
|
-
this.engine.on(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
|
41735
|
+
if (this._isSupportVideo) this.engine.on(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
|
41645
41736
|
if (this._debug) {
|
41646
41737
|
this.engine.on(index_esm_min_index.events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
|
41647
41738
|
this.engine.on(index_esm_min_index.events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
|
@@ -41652,7 +41743,7 @@ class EngineClient extends RealtimeEventHandler {
|
|
41652
41743
|
this.engine.off(index_esm_min_index.events.onUserJoined, this.handleUserJoin);
|
41653
41744
|
this.engine.off(index_esm_min_index.events.onUserLeave, this.handleUserLeave);
|
41654
41745
|
this.engine.off(index_esm_min_index.events.onError, this.handleEventError);
|
41655
|
-
this.engine.off(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
|
41746
|
+
if (this._isSupportVideo) this.engine.off(index_esm_min_index.events.onPlayerEvent, this.handlePlayerEvent);
|
41656
41747
|
if (this._debug) {
|
41657
41748
|
this.engine.off(index_esm_min_index.events.onLocalAudioPropertiesReport, this.handleLocalAudioPropertiesReport);
|
41658
41749
|
this.engine.off(index_esm_min_index.events.onRemoteAudioPropertiesReport, this.handleRemoteAudioPropertiesReport);
|
@@ -41697,13 +41788,13 @@ class EngineClient extends RealtimeEventHandler {
|
|
41697
41788
|
this.dispatch(event_handler_EventNames.PLAYER_EVENT, event);
|
41698
41789
|
}
|
41699
41790
|
async joinRoom(options) {
|
41700
|
-
const { token, roomId, uid, audioMutedDefault, videoOnDefault } = options;
|
41791
|
+
const { token, roomId, uid, audioMutedDefault, videoOnDefault, isAutoSubscribeAudio } = options;
|
41701
41792
|
try {
|
41702
41793
|
await this.engine.joinRoom(token, roomId, {
|
41703
41794
|
userId: uid
|
41704
41795
|
}, {
|
41705
41796
|
isAutoPublish: !audioMutedDefault,
|
41706
|
-
isAutoSubscribeAudio
|
41797
|
+
isAutoSubscribeAudio,
|
41707
41798
|
isAutoSubscribeVideo: this._isSupportVideo && videoOnDefault
|
41708
41799
|
});
|
41709
41800
|
} catch (e) {
|
@@ -41717,14 +41808,18 @@ class EngineClient extends RealtimeEventHandler {
|
|
41717
41808
|
await this.engine.startAudioCapture(deviceId);
|
41718
41809
|
}
|
41719
41810
|
async setAudioOutputDevice(deviceId) {
|
41720
|
-
const devices = await getAudioDevices(
|
41811
|
+
const devices = await getAudioDevices({
|
41812
|
+
video: false
|
41813
|
+
});
|
41721
41814
|
if (-1 === devices.audioOutputs.findIndex((i)=>i.deviceId === deviceId)) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, `Audio output device not found: ${deviceId}`);
|
41722
41815
|
await this.engine.setAudioPlaybackDevice(deviceId);
|
41723
41816
|
}
|
41724
41817
|
async createLocalStream(userId, videoConfig) {
|
41725
|
-
const devices = await getAudioDevices(
|
41726
|
-
|
41727
|
-
|
41818
|
+
const devices = await getAudioDevices({
|
41819
|
+
video: this._isSupportVideo
|
41820
|
+
});
|
41821
|
+
if (!devices.audioInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get audio devices');
|
41822
|
+
if (this._isSupportVideo && !devices.videoInputs.length) throw new RealtimeAPIError(error_RealtimeError.DEVICE_ACCESS_ERROR, 'Failed to get video devices');
|
41728
41823
|
await this.engine.startAudioCapture(devices.audioInputs[0].deviceId);
|
41729
41824
|
if (this._isSupportVideo && (null == videoConfig ? void 0 : videoConfig.videoOnDefault)) await this.engine.startVideoCapture(devices.videoInputs[0].deviceId);
|
41730
41825
|
if (this._isSupportVideo) this.engine.setLocalVideoPlayer(StreamIndex$1.STREAM_INDEX_MAIN, {
|
@@ -41877,7 +41972,7 @@ class RealtimeClient extends RealtimeEventHandler {
|
|
41877
41972
|
// Step3 bind engine events
|
41878
41973
|
this._client.bindEngineEvents();
|
41879
41974
|
this._client.on(event_handler_EventNames.ALL, (eventName, data)=>{
|
41880
|
-
this.dispatch(eventName, data);
|
41975
|
+
this.dispatch(eventName, data, false);
|
41881
41976
|
});
|
41882
41977
|
if (this._config.suppressStationaryNoise) {
|
41883
41978
|
await this._client.enableAudioNoiseReduction();
|
@@ -41888,14 +41983,15 @@ class RealtimeClient extends RealtimeEventHandler {
|
|
41888
41983
|
this._client.changeAIAnsExtension(true);
|
41889
41984
|
this.dispatch(event_handler_EventNames.SUPPRESS_NON_STATIONARY_NOISE, {});
|
41890
41985
|
}
|
41891
|
-
var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault;
|
41986
|
+
var _this__config_audioMutedDefault, _this__config_videoConfig_videoOnDefault, _this__config_isAutoSubscribeAudio;
|
41892
41987
|
// Step4 join room
|
41893
41988
|
await this._client.joinRoom({
|
41894
41989
|
token: roomInfo.token,
|
41895
41990
|
roomId: roomInfo.room_id,
|
41896
41991
|
uid: roomInfo.uid,
|
41897
41992
|
audioMutedDefault: null !== (_this__config_audioMutedDefault = this._config.audioMutedDefault) && void 0 !== _this__config_audioMutedDefault && _this__config_audioMutedDefault,
|
41898
|
-
videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault
|
41993
|
+
videoOnDefault: null === (_this__config_videoConfig_videoOnDefault = null === (_this__config_videoConfig = this._config.videoConfig) || void 0 === _this__config_videoConfig ? void 0 : _this__config_videoConfig.videoOnDefault) || void 0 === _this__config_videoConfig_videoOnDefault || _this__config_videoConfig_videoOnDefault,
|
41994
|
+
isAutoSubscribeAudio: null === (_this__config_isAutoSubscribeAudio = this._config.isAutoSubscribeAudio) || void 0 === _this__config_isAutoSubscribeAudio || _this__config_isAutoSubscribeAudio
|
41899
41995
|
});
|
41900
41996
|
// Step5 create local stream
|
41901
41997
|
await this._client.createLocalStream(roomInfo.uid, this._config.videoConfig);
|
@@ -41907,7 +42003,6 @@ class RealtimeClient extends RealtimeEventHandler {
|
|
41907
42003
|
token: roomInfo.token,
|
41908
42004
|
appId: roomInfo.app_id
|
41909
42005
|
});
|
41910
|
-
this._log('dispatch client.connected event');
|
41911
42006
|
}
|
41912
42007
|
/**
|
41913
42008
|
* en: Interrupt the current conversation
|
@@ -41917,7 +42012,6 @@ class RealtimeClient extends RealtimeEventHandler {
|
|
41917
42012
|
var _this__client;
|
41918
42013
|
await (null === (_this__client = this._client) || void 0 === _this__client ? void 0 : _this__client.stop());
|
41919
42014
|
this.dispatch(event_handler_EventNames.INTERRUPTED, {});
|
41920
|
-
this._log('dispatch client.interrupted event');
|
41921
42015
|
}
|
41922
42016
|
/**
|
41923
42017
|
* en: Disconnect from the current session
|
@@ -42039,6 +42133,7 @@ class RealtimeClient extends RealtimeEventHandler {
|
|
42039
42133
|
* 可选,默认是否抑制静态噪声,默认值为 false。
|
42040
42134
|
* @param config.suppressNonStationaryNoise - Optional, suppress non-stationary noise, defaults to false. |
|
42041
42135
|
* 可选,默认是否抑制非静态噪声,默认值为 false。
|
42136
|
+
* @param config.isAutoSubscribeAudio - Optional, whether to automatically subscribe to bot reply audio streams, defaults to true. |
|
42042
42137
|
*/ constructor(config){
|
42043
42138
|
super(config.debug), this._client = null, this.isConnected = false, this._isTestEnv = false, this._isSupportVideo = false;
|
42044
42139
|
this._config = config;
|
package/dist/types/client.d.ts
CHANGED
@@ -21,6 +21,7 @@ export declare class EngineClient extends RealtimeEventHandler {
|
|
21
21
|
uid: string;
|
22
22
|
audioMutedDefault?: boolean;
|
23
23
|
videoOnDefault?: boolean;
|
24
|
+
isAutoSubscribeAudio?: boolean;
|
24
25
|
}): Promise<void>;
|
25
26
|
setAudioInputDevice(deviceId: string): Promise<void>;
|
26
27
|
setAudioOutputDevice(deviceId: string): Promise<void>;
|
@@ -88,9 +88,44 @@ export declare enum EventNames {
|
|
88
88
|
* en: Bot left
|
89
89
|
* zh: Bot 离开
|
90
90
|
*/
|
91
|
-
BOT_LEAVE = "server.bot.leave"
|
91
|
+
BOT_LEAVE = "server.bot.leave",
|
92
|
+
/**
|
93
|
+
* en: Audio speech started
|
94
|
+
* zh: 开始说话
|
95
|
+
*/
|
96
|
+
AUDIO_AGENT_SPEECH_STARTED = "server.audio.agent.speech_started",
|
97
|
+
/**
|
98
|
+
* en: Audio speech stopped
|
99
|
+
* zh: 停止说话
|
100
|
+
*/
|
101
|
+
AUDIO_SPEECH_STOPPED = "server.audio.speech_stopped",
|
102
|
+
/**
|
103
|
+
* en: Server error
|
104
|
+
* zh: 服务端错误
|
105
|
+
*/
|
106
|
+
SERVER_ERROR = "server.error",
|
107
|
+
/**
|
108
|
+
* en: User speech started
|
109
|
+
* zh: 用户开始说话
|
110
|
+
*/
|
111
|
+
AUDIO_USER_SPEECH_STARTED = "server.audio.user.speech_started",
|
112
|
+
/**
|
113
|
+
* en: User speech stopped
|
114
|
+
* zh: 用户停止说话
|
115
|
+
*/
|
116
|
+
AUDIO_USER_SPEECH_STOPPED = "server.audio.user.speech_stopped",
|
117
|
+
/**
|
118
|
+
* en: User successfully enters the room
|
119
|
+
* zh: 用户成功进入房间后,会收到该事件
|
120
|
+
*/
|
121
|
+
SESSION_CREATED = "server.session.created",
|
122
|
+
/**
|
123
|
+
* en: Session updated
|
124
|
+
* zh: 会话更新
|
125
|
+
*/
|
126
|
+
SESSION_UPDATE = "server.session.update"
|
92
127
|
}
|
93
|
-
type EventCallback = (eventName: string, event: unknown) => void;
|
128
|
+
export type EventCallback = (eventName: string, event: unknown) => void;
|
94
129
|
export declare class RealtimeEventHandler {
|
95
130
|
private eventHandlers;
|
96
131
|
protected _debug: boolean;
|
@@ -99,7 +134,6 @@ export declare class RealtimeEventHandler {
|
|
99
134
|
on(eventName: string, callback: EventCallback): EventCallback;
|
100
135
|
off(eventName: string, callback: EventCallback): void;
|
101
136
|
private _dispatchToHandlers;
|
102
|
-
dispatch(eventName: string, event: unknown): void;
|
137
|
+
dispatch(eventName: string, event: unknown, consoleLog?: boolean): void;
|
103
138
|
_log(message: string): void;
|
104
139
|
}
|
105
|
-
export {};
|
package/dist/types/index.d.ts
CHANGED
@@ -22,6 +22,7 @@ export interface RealtimeClientConfig {
|
|
22
22
|
suppressStationaryNoise?: boolean /** optional, Suppress stationary noise, defaults to false */;
|
23
23
|
suppressNonStationaryNoise?: boolean /** optional, Suppress non-stationary noise, defaults to false */;
|
24
24
|
videoConfig?: VideoConfig /** optional, Video configuration */;
|
25
|
+
isAutoSubscribeAudio?: boolean /** optional, Whether to automatically subscribe to bot reply audio streams, defaults to true */;
|
25
26
|
}
|
26
27
|
declare class RealtimeClient extends RealtimeEventHandler {
|
27
28
|
private _config;
|
@@ -59,6 +60,7 @@ declare class RealtimeClient extends RealtimeEventHandler {
|
|
59
60
|
* 可选,默认是否抑制静态噪声,默认值为 false。
|
60
61
|
* @param config.suppressNonStationaryNoise - Optional, suppress non-stationary noise, defaults to false. |
|
61
62
|
* 可选,默认是否抑制非静态噪声,默认值为 false。
|
63
|
+
* @param config.isAutoSubscribeAudio - Optional, whether to automatically subscribe to bot reply audio streams, defaults to true. |
|
62
64
|
*/
|
63
65
|
constructor(config: RealtimeClientConfig);
|
64
66
|
/**
|
package/dist/types/utils.d.ts
CHANGED
@@ -16,9 +16,10 @@ export declare const checkPermission: ({ audio, video, }?: {
|
|
16
16
|
* Get audio devices
|
17
17
|
* @returns Promise<AudioDevices> Object containing arrays of audio input and output devices
|
18
18
|
*/
|
19
|
-
export declare const getAudioDevices: (
|
19
|
+
export declare const getAudioDevices: ({ video, }?: {
|
20
|
+
video?: boolean;
|
21
|
+
}) => Promise<{
|
20
22
|
audioInputs: MediaDeviceInfo[];
|
21
23
|
audioOutputs: MediaDeviceInfo[];
|
22
24
|
videoInputs: MediaDeviceInfo[];
|
23
|
-
videoOutputs: MediaDeviceInfo[];
|
24
25
|
}>;
|