wechaty-web-panel 1.6.40 → 1.6.42

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/CHANGELOG.md +6 -0
  2. package/dist/cjs/src/botInstance/dify.js +0 -1
  3. package/dist/cjs/src/botInstance/sdk/difyClient.js +1 -1
  4. package/dist/cjs/src/common/index.d.ts +1 -1
  5. package/dist/cjs/src/common/index.js +19 -1
  6. package/dist/cjs/src/common/multiReply.d.ts +5 -20
  7. package/dist/cjs/src/common/multiReply.js +12 -1
  8. package/dist/cjs/src/handlers/on-heartbeat.js +12 -7
  9. package/dist/cjs/src/handlers/on-message.js +100 -15
  10. package/dist/cjs/src/package-json.js +1 -1
  11. package/dist/cjs/src/proxy/mqtt.js +1 -1
  12. package/dist/cjs/src/proxy/multimodal.d.ts +19 -0
  13. package/dist/cjs/src/proxy/multimodal.js +92 -0
  14. package/dist/cjs/src/proxy/superagent.js +24 -20
  15. package/dist/cjs/src/service/gpt4vService.js +30 -50
  16. package/dist/cjs/src/service/msg-filters.d.ts +9 -0
  17. package/dist/cjs/src/service/msg-filters.js +114 -75
  18. package/dist/esm/src/botInstance/dify.js +0 -1
  19. package/dist/esm/src/botInstance/sdk/difyClient.js +1 -1
  20. package/dist/esm/src/common/index.d.ts +1 -1
  21. package/dist/esm/src/common/index.js +19 -1
  22. package/dist/esm/src/common/multiReply.d.ts +5 -20
  23. package/dist/esm/src/common/multiReply.js +12 -1
  24. package/dist/esm/src/handlers/on-heartbeat.js +12 -7
  25. package/dist/esm/src/handlers/on-message.js +110 -25
  26. package/dist/esm/src/package-json.js +1 -1
  27. package/dist/esm/src/proxy/mqtt.js +1 -1
  28. package/dist/esm/src/proxy/multimodal.d.ts +19 -0
  29. package/dist/esm/src/proxy/multimodal.js +84 -0
  30. package/dist/esm/src/proxy/superagent.js +24 -20
  31. package/dist/esm/src/service/gpt4vService.js +31 -51
  32. package/dist/esm/src/service/msg-filters.d.ts +9 -0
  33. package/dist/esm/src/service/msg-filters.js +120 -81
  34. package/package.json +1 -1
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  ## 更新日志
2
2
 
3
+ ### V1.6.42(2024-02-06)
4
+ 1、修复图像识别的问题
5
+
6
+ ### V1.6.41(2024-02-06)
7
+ 1、添加语音识别及多模态
8
+
3
9
  ### V1.6.40(2024-01-25)
4
10
  1、支持 Dify 的智能助手模式,支持生成图片和识图
5
11
  2、使用 Dify 可以开启联网模式,网站内容总结
@@ -79,7 +79,6 @@ class DifyAi {
79
79
  return [{ type: 1, content: '上下文已重置' }];
80
80
  }
81
81
  }
82
- console.log('this.chatOption[uid]', this.chatOption[uid]);
83
82
  const { conversationId, text, files } = systemMessage ? await this.difyChat.sendMessage(content, { ...this.chatOption[uid], systemMessage, timeoutMs: this.config.timeoutMs * 1000 || 80 * 1000, user: uid }) : await this.difyChat.sendMessage(content, { ...this.chatOption[uid], timeoutMs: this.config.timeoutMs * 1000 || 80 * 1000, user: uid });
84
83
  if (this.config.filter) {
85
84
  const censor = await this.contentCensor.checkText(text);
@@ -179,7 +179,7 @@ class ChatClient extends DifyClient {
179
179
  res = JSON.parse(stream.substring(6)) || {};
180
180
  }
181
181
  catch (e) {
182
- console.log('json 解析错误,不影响输出', e);
182
+ // console.log('json 解析错误,不影响输出', e)
183
183
  return;
184
184
  }
185
185
  if (!res.event || res.event === 'error' || res.status === 400) {
@@ -65,7 +65,7 @@ export function addRoom(that: any, contact: any, roomName: any, replys: any): Pr
65
65
  * @param contact
66
66
  * @param msg
67
67
  * @param isRoom
68
- * type 1 文字 2 图片url 3 图片base64 4 url链接 5 小程序 6 名片
68
+ * type 1 文字 2 图片url 3 图片base64 4 url链接 5 小程序 6 名片 7 富文本 8 语音
69
69
  */
70
70
  export function contactSay(contact: any, msg: any, isRoom?: boolean): Promise<void>;
71
71
  /**
@@ -272,6 +272,15 @@ async function roomSay(room, contact, msg) {
272
272
  });
273
273
  await room.say(miniProgram);
274
274
  }
275
+ else if (msg.type === 8 && msg.url && msg.voiceLength) {
276
+ const fileBox = file_box_1.FileBox.fromUrl(msg.url);
277
+ fileBox.mimeType = "audio/silk";
278
+ fileBox.mediaType = "audio/silk";
279
+ fileBox.metadata = {
280
+ voiceLength: msg.voiceLength,
281
+ };
282
+ await room.say(fileBox);
283
+ }
275
284
  }
276
285
  catch (e) {
277
286
  console.log('群回复错误', e);
@@ -283,7 +292,7 @@ exports.roomSay = roomSay;
283
292
  * @param contact
284
293
  * @param msg
285
294
  * @param isRoom
286
- * type 1 文字 2 图片url 3 图片base64 4 url链接 5 小程序 6 名片
295
+ * type 1 文字 2 图片url 3 图片base64 4 url链接 5 小程序 6 名片 7 富文本 8 语音
287
296
  */
288
297
  async function contactSay(contact, msg, isRoom = false) {
289
298
  const config = await (0, configDb_js_1.allConfig)();
@@ -343,6 +352,15 @@ async function contactSay(contact, msg, isRoom = false) {
343
352
  });
344
353
  await contact.say(miniProgram);
345
354
  }
355
+ else if (msg.type === 8 && msg.url && msg.voiceLength) {
356
+ const fileBox = file_box_1.FileBox.fromUrl(msg.url);
357
+ fileBox.mimeType = "audio/silk";
358
+ fileBox.mediaType = "audio/silk";
359
+ fileBox.metadata = {
360
+ voiceLength: msg.voiceLength,
361
+ };
362
+ await contact.say(fileBox);
363
+ }
346
364
  }
347
365
  catch (e) {
348
366
  console.log('私聊发送消息失败', e);
@@ -4,18 +4,9 @@ export class BotManage {
4
4
  userBotDict: {};
5
5
  userTimeDict: {};
6
6
  maxuser: number;
7
- creatBot(username: any, content: any): Promise<{
8
- type: number;
9
- content: any;
10
- }[] | undefined>;
11
- updateBot(username: any, content: any, config: any): Promise<{
12
- type: number;
13
- content: any;
14
- }[] | undefined>;
15
- talk(username: any, content: any, config: any): Promise<{
16
- type: number;
17
- content: any;
18
- }[] | undefined>;
7
+ creatBot(username: any, content: any): Promise<any>;
8
+ updateBot(username: any, content: any, config: any): Promise<any>;
9
+ talk(username: any, content: any, config: any): Promise<any>;
19
10
  removeBot(uid: any): void;
20
11
  getBotList(): {};
21
12
  /**
@@ -23,17 +14,11 @@ export class BotManage {
23
14
  * @param {*} username 用户名
24
15
  * @returns
25
16
  */
26
- generateImage(username: any, question: any, config: any): Promise<{
27
- type: number;
28
- content: any;
29
- }[]>;
17
+ generateImage(username: any, question: any, config: any): Promise<any>;
30
18
  getImage(username: any, content: any, step: any): {
31
19
  type: number;
32
20
  content: string;
33
21
  }[];
34
- run(userId: any, content: any, config: any): Promise<{
35
- type: number;
36
- content: any;
37
- }[] | undefined>;
22
+ run(userId: any, content: any, config: any): Promise<any>;
38
23
  }
39
24
  //# sourceMappingURL=multiReply.d.ts.map
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.BotManage = void 0;
4
4
  const gpt4v_js_1 = require("../botInstance/gpt4v.js");
5
+ const multimodal_js_1 = require("../proxy/multimodal.js");
5
6
  class MultiReply {
6
7
  constructor() {
7
8
  this.step = 0; // 当前step
@@ -84,7 +85,7 @@ class BotManage {
84
85
  const replys = await (0, gpt4v_js_1.getDify4vReply)(images, question, config, username);
85
86
  return replys;
86
87
  }
87
- else {
88
+ else if (config.robotType === 6) {
88
89
  for (let id of this.userBotDict[username].imageIds) {
89
90
  const msg = await this.Bot.Message.find({ id });
90
91
  const file = await msg.toFileBox();
@@ -94,6 +95,16 @@ class BotManage {
94
95
  const replys = await (0, gpt4v_js_1.get4vReply)(images, question, config);
95
96
  return replys;
96
97
  }
98
+ else {
99
+ for (let id of this.userBotDict[username].imageIds) {
100
+ const msg = await this.Bot.Message.find({ id });
101
+ const file = await msg.toFileBox();
102
+ const base = await file.toDataURL();
103
+ images.push(base);
104
+ }
105
+ const replys = await (0, multimodal_js_1.getImageVision)(images, question, config);
106
+ return replys;
107
+ }
97
108
  }
98
109
  getImage(username, content, step) {
99
110
  if (this.userBotDict[username].imageIds.length === 5) {
@@ -3,14 +3,19 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const aibotk_js_1 = require("../proxy/aibotk.js");
4
4
  const index_js_1 = require("../lib/index.js");
5
5
  async function onHeartBeat(str) {
6
- if (!str) {
7
- await (0, aibotk_js_1.sendHeartBeat)('dead');
6
+ try {
7
+ if (!str) {
8
+ await (0, aibotk_js_1.sendHeartBeat)('dead');
9
+ }
10
+ else if (str.type === 'scan') {
11
+ await (0, aibotk_js_1.sendHeartBeat)('scan');
12
+ }
13
+ else if (str.includes('heartbeat')) {
14
+ (0, index_js_1.throttle)((0, aibotk_js_1.sendHeartBeat)('live'), 30000);
15
+ }
8
16
  }
9
- else if (str.type === 'scan') {
10
- await (0, aibotk_js_1.sendHeartBeat)('scan');
11
- }
12
- else if (str.includes('heartbeat')) {
13
- (0, index_js_1.throttle)((0, aibotk_js_1.sendHeartBeat)('live'), 30000);
17
+ catch (e) {
18
+ console.log('心跳更新失败', e);
14
19
  }
15
20
  }
16
21
  exports.default = onHeartBeat;
@@ -10,10 +10,12 @@ const roomDb_js_1 = require("../db/roomDb.js");
10
10
  const hook_js_1 = require("../common/hook.js");
11
11
  const puppet_type_js_1 = require("../const/puppet-type.js");
12
12
  const gpt4vService_js_1 = require("../service/gpt4vService.js");
13
+ const multimodal_js_1 = require("../proxy/multimodal.js");
14
+ const msg_filters_js_1 = require("../service/msg-filters.js");
13
15
  const ignoreRecord = [
14
- { type: "include", word: "加入了群聊" },
15
- { type: "include", word: "与群里其他人都不是朋友关系" },
16
- { type: "include", word: "收到一条暂不支持的消息类型" }
16
+ { type: 'include', word: '加入了群聊' },
17
+ { type: 'include', word: '与群里其他人都不是朋友关系' },
18
+ { type: 'include', word: '收到一条暂不支持的消息类型' }
17
19
  ];
18
20
  /**
19
21
  * 检测是否属于忽略的消息
@@ -26,7 +28,7 @@ function checkIgnore(msg, list) {
26
28
  for (let item of list) {
27
29
  const word = item.word;
28
30
  const type = item.type;
29
- if ((type === "start" && msg.startsWith(word)) || (type === "end" && msg.endsWith(word)) || (type === "equal" && msg === word) || (type === "include" && msg.includes(word))) {
31
+ if ((type === 'start' && msg.startsWith(word)) || (type === 'end' && msg.endsWith(word)) || (type === 'equal' && msg === word) || (type === 'include' && msg.includes(word))) {
30
32
  return true;
31
33
  }
32
34
  }
@@ -46,7 +48,7 @@ async function dispatchFriendFilterByMsgType(that, msg) {
46
48
  const contact = msg.talker(); // 发消息人
47
49
  const name = await contact.name();
48
50
  const isOfficial = contact.type() === that.Contact.Type.Official;
49
- let content = "";
51
+ let content = '';
50
52
  let replys = [];
51
53
  const res = await (0, hook_js_1.privateForward)({ that, msg, name, config });
52
54
  if (res) {
@@ -84,7 +86,44 @@ async function dispatchFriendFilterByMsgType(that, msg) {
84
86
  }
85
87
  }
86
88
  else {
87
- console.log("公众号消息");
89
+ console.log('公众号消息');
90
+ }
91
+ break;
92
+ case that.Message.Type.Audio:
93
+ let finalConfig = await (0, msg_filters_js_1.getCustomConfig)({ name, id: contact.id, roomName: '', roomId: '', room: false, type: 'openWhisper' });
94
+ if (finalConfig) {
95
+ const audioFileBox = await msg.toFileBox();
96
+ const text = await (0, multimodal_js_1.getVoiceText)(audioFileBox, finalConfig.botConfig.whisperConfig);
97
+ console.log('语音解析结果', text);
98
+ const keyword = finalConfig.botConfig.whisperConfig?.keywords?.length ? finalConfig.botConfig?.whisperConfig.keywords?.find((item) => text.includes(item)) : true;
99
+ const isIgnore = checkIgnore(content.trim(), aibotConfig.ignoreMessages);
100
+ if (text.trim() && !isIgnore && keyword) {
101
+ const gpt4vReplys = await (0, gpt4vService_js_1.getGpt4vChat)({
102
+ that,
103
+ room: false,
104
+ roomId: '',
105
+ uniqueId: contact.id,
106
+ id: contact.id,
107
+ roomName: '',
108
+ isMention: false,
109
+ name,
110
+ msgContent: { type: 1, content: text }
111
+ });
112
+ if (gpt4vReplys.length) {
113
+ for (let reply of gpt4vReplys) {
114
+ await index_js_1.contactSay.call(that, contact, reply);
115
+ }
116
+ return;
117
+ }
118
+ replys = await (0, reply_js_1.getContactTextReply)(that, contact, text.trim());
119
+ for (let reply of replys) {
120
+ await (0, index_js_2.delay)(1000);
121
+ await index_js_1.contactSay.call(that, contact, reply);
122
+ }
123
+ }
124
+ else {
125
+ console.log('语音解析结果没有匹配到需要回复的关键词');
126
+ }
88
127
  }
89
128
  break;
90
129
  case that.Message.Type.Emoticon:
@@ -143,7 +182,7 @@ async function dispatchFriendFilterByMsgType(that, msg) {
143
182
  }
144
183
  }
145
184
  catch (error) {
146
- console.log("监听消息错误", error);
185
+ console.log('监听消息错误', error);
147
186
  }
148
187
  }
149
188
  /**
@@ -164,8 +203,8 @@ async function dispatchRoomFilterByMsgType(that, room, msg) {
164
203
  const isFriend = contact.friend();
165
204
  const type = msg.type();
166
205
  const receiver = msg.to();
167
- let content = "";
168
- let replys = "";
206
+ let content = '';
207
+ let replys = '';
169
208
  let contactId = contact.id;
170
209
  let contactAvatar = await contact.avatar();
171
210
  const userSelfName = that.currentUser?.name() || that.userSelf()?.name();
@@ -174,7 +213,7 @@ async function dispatchRoomFilterByMsgType(that, room, msg) {
174
213
  content = msg.text();
175
214
  const mentionSelf = await msg.mentionSelf() || content.includes(`@${userSelfName}`);
176
215
  const receiverName = receiver?.name();
177
- content = content.replace('@' + receiverName, "").replace('@' + userSelfName, "").replace(/@[^,,::\s@]+/g, "").trim();
216
+ content = content.replace('@' + receiverName, '').replace('@' + userSelfName, '').replace(/@[^,,::\s@]+/g, '').trim();
178
217
  console.log(`群名: ${roomName} 发消息人: ${contactName} 内容: ${content} | 机器人被@:${mentionSelf ? '是' : '否'}`);
179
218
  // 检测是否需要这条消息
180
219
  const isIgnore = checkIgnore(content, aibotConfig.ignoreMessages);
@@ -215,9 +254,9 @@ async function dispatchRoomFilterByMsgType(that, room, msg) {
215
254
  await index_js_1.roomSay.call(that, room, contact, reply);
216
255
  }
217
256
  const cloudRoom = config.cloudRoom;
218
- if (role === "vip" && cloudRoom.includes(roomName) && !checkIgnore(content, ignoreRecord)) {
257
+ if (role === 'vip' && cloudRoom.includes(roomName) && !checkIgnore(content, ignoreRecord)) {
219
258
  const regex = /(<([^>]+)>)/ig;
220
- content = content.replace(regex, "");
259
+ content = content.replace(regex, '');
221
260
  void (0, roomDb_js_1.addRoomRecord)({
222
261
  roomName,
223
262
  roomId: room.id,
@@ -257,6 +296,52 @@ async function dispatchRoomFilterByMsgType(that, room, msg) {
257
296
  break;
258
297
  case that.Message.Type.Audio:
259
298
  console.log(`群名: ${roomName} 发消息人: ${contactName} 发了一个语音`);
299
+ let finalConfig = await (0, msg_filters_js_1.getCustomConfig)({ name: contactName, id: contactId, roomName, roomId: room.id, room, type: 'openWhisper' });
300
+ if (finalConfig) {
301
+ const audioFileBox = await msg.toFileBox();
302
+ const text = await (0, multimodal_js_1.getVoiceText)(audioFileBox, finalConfig.botConfig.whisperConfig);
303
+ console.log('语音解析结果', text);
304
+ const keyword = finalConfig.botConfig.whisperConfig?.keywords?.length ? finalConfig.botConfig?.whisperConfig?.keywords?.find((item) => text.includes(item)) : true;
305
+ const isIgnore = checkIgnore(content.trim(), aibotConfig.ignoreMessages);
306
+ if (text.trim() && !isIgnore && keyword) {
307
+ const gpt4vReplys = await (0, gpt4vService_js_1.getGpt4vChat)({
308
+ that,
309
+ room,
310
+ roomId: room.id,
311
+ id: contactId,
312
+ uniqueId: `${room.id}-${contactId}`,
313
+ roomName,
314
+ isMention: true,
315
+ name: contactName,
316
+ msgContent: { type: 1, content: text }
317
+ });
318
+ if (gpt4vReplys.length) {
319
+ for (let reply of gpt4vReplys) {
320
+ await index_js_1.roomSay.call(that, room, contact, reply);
321
+ }
322
+ return;
323
+ }
324
+ replys = await await (0, reply_js_1.getRoomTextReply)({
325
+ that,
326
+ content: text,
327
+ isFriend,
328
+ name: contactName,
329
+ id: contactId,
330
+ roomId: room.id,
331
+ avatar: contactAvatar,
332
+ room,
333
+ roomName,
334
+ isMention: true
335
+ });
336
+ for (let reply of replys) {
337
+ await (0, index_js_2.delay)(1000);
338
+ await index_js_1.roomSay.call(that, room, contact, reply);
339
+ }
340
+ }
341
+ else {
342
+ console.log('语音解析结果没有匹配到需要回复的关键词');
343
+ }
344
+ }
260
345
  break;
261
346
  case that.Message.Type.MiniProgram:
262
347
  console.log(`群名: ${roomName} 发消息人: ${contactName} 发了一个小程序`);
@@ -285,7 +370,7 @@ async function dispatchRoomFilterByMsgType(that, room, msg) {
285
370
  }
286
371
  }
287
372
  catch (e) {
288
- console.log("error", e);
373
+ console.log('error', e);
289
374
  }
290
375
  }
291
376
  async function onMessage(msg) {
@@ -301,7 +386,7 @@ async function onMessage(msg) {
301
386
  const contact = msg.talker(); // 发消息人
302
387
  const contactName = contact.name();
303
388
  await dispatchRoomFilterByMsgType(this, room, msg);
304
- if (role === "vip" && roomName !== contactName) {
389
+ if (role === 'vip' && roomName !== contactName) {
305
390
  const roomAsyncList = config.roomAsyncList || [];
306
391
  if (roomAsyncList.length) {
307
392
  await (0, room_async_service_js_1.dispatchAsync)(this, msg, roomAsyncList);
@@ -313,7 +398,7 @@ async function onMessage(msg) {
313
398
  }
314
399
  }
315
400
  catch (e) {
316
- console.log("监听消息失败", e);
401
+ console.log('监听消息失败', e);
317
402
  }
318
403
  }
319
404
  exports.default = onMessage;
@@ -6,7 +6,7 @@ exports.packageJson = void 0;
6
6
  */
7
7
  exports.packageJson = {
8
8
  "name": "wechaty-web-panel",
9
- "version": "1.6.40",
9
+ "version": "1.6.42",
10
10
  "description": "智能微秘书插件",
11
11
  "exports": {
12
12
  ".": {
@@ -204,7 +204,7 @@ async function initMqtt(that) {
204
204
  console.log(`获取到输入的验证码:${global_js_1.default.getVerifyCode()},正在填入`);
205
205
  const verifyCode = global_js_1.default.getVerifyCode(); // 通过一些途径输入验证码
206
206
  try {
207
- await that.enterVerifyCode(id, verifyCode); // 如果没抛错,则说明输入成功,会推送登录事件
207
+ await that.enterVerifyCode(global_js_1.default.getVerifyId(), verifyCode); // 如果没抛错,则说明输入成功,会推送登录事件
208
208
  }
209
209
  catch (e) {
210
210
  console.log('验证码校验错误:', e.message);
@@ -0,0 +1,19 @@
1
+ /**
2
+ * 语音转换文字
3
+ * @param file
4
+ * @param aiConfig
5
+ * @returns {Promise<*|string>}
6
+ */
7
+ export function getVoiceText(file: any, aiConfig: any): Promise<any | string>;
8
+ /**
9
+ * 识别图像
10
+ * @param images
11
+ * @param question
12
+ * @param config
13
+ * @returns {Promise<*|[{type: number, content: string}]>}
14
+ */
15
+ export function getImageVision(images: any, question: any, config: any): Promise<any | [{
16
+ type: number;
17
+ content: string;
18
+ }]>;
19
+ //# sourceMappingURL=multimodal.d.ts.map
@@ -0,0 +1,92 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getImageVision = exports.getVoiceText = void 0;
7
+ const axios_1 = __importDefault(require("axios"));
8
+ const form_data_1 = __importDefault(require("form-data"));
9
+ const config_js_1 = require("./config.js");
10
+ const aiDb_js_1 = require("../db/aiDb.js");
11
+ /**
12
+ * 语音转换文字
13
+ * @param file
14
+ * @param aiConfig
15
+ * @returns {Promise<*|string>}
16
+ */
17
+ async function getVoiceText(file, aiConfig) {
18
+ try {
19
+ const env = await (0, aiDb_js_1.getAibotConfig)();
20
+ const { apiKey } = env;
21
+ const base64 = await file.toBase64();
22
+ const readable = Buffer.from(base64, 'base64');
23
+ const formData = new form_data_1.default();
24
+ formData.append('file', readable, { contentType: file.mediaType, filename: file.name });
25
+ formData.append('aiConfig', JSON.stringify(aiConfig));
26
+ let config = {
27
+ method: 'post',
28
+ maxBodyLength: Infinity,
29
+ timeout: 30000,
30
+ url: config_js_1.AIBOTK_OUTAPI + '/voice/text',
31
+ headers: {
32
+ ...formData.getHeaders(),
33
+ Authorization: `Bearer ${apiKey}`,
34
+ },
35
+ data: formData
36
+ };
37
+ const result = await axios_1.default.request(config);
38
+ if (result.data.code === 200) {
39
+ return result.data.data;
40
+ }
41
+ else {
42
+ console.log('语音转换出错', result.data.message);
43
+ return '';
44
+ }
45
+ }
46
+ catch (e) {
47
+ console.log(`语音转换出错: ${e}`);
48
+ return '';
49
+ }
50
+ }
51
+ exports.getVoiceText = getVoiceText;
52
+ /**
53
+ * 识别图像
54
+ * @param images
55
+ * @param question
56
+ * @param config
57
+ * @returns {Promise<*|[{type: number, content: string}]>}
58
+ */
59
+ async function getImageVision(images, question, config) {
60
+ try {
61
+ const env = await (0, aiDb_js_1.getAibotConfig)();
62
+ const { apiKey } = env;
63
+ const reqConfig = {
64
+ method: 'post',
65
+ maxBodyLength: Infinity,
66
+ timeout: 30000,
67
+ url: config_js_1.AIBOTK_OUTAPI + '/image/vision',
68
+ headers: {
69
+ Authorization: `Bearer ${apiKey}`,
70
+ },
71
+ data: {
72
+ images,
73
+ question,
74
+ config
75
+ }
76
+ };
77
+ const result = await axios_1.default.request(reqConfig);
78
+ if (result.data.code === 200) {
79
+ return result.data.data;
80
+ }
81
+ else {
82
+ console.log('识别图像出错', result.data.message);
83
+ return [{ type: 1, content: '' }];
84
+ }
85
+ }
86
+ catch (e) {
87
+ console.log('识别图像出错', e);
88
+ return [{ type: 1, content: '' }];
89
+ }
90
+ }
91
+ exports.getImageVision = getImageVision;
92
+ //# sourceMappingURL=multimodal.js.map
@@ -70,19 +70,21 @@ function get({ url, params, contentType = 'application/json', platform = 'tx', a
70
70
  console.log('请求出错', err);
71
71
  reject(err);
72
72
  }
73
- if (spider) {
74
- // 如果是爬取内容,直接返回页面html
75
- resolve(res.text);
76
- }
77
73
  else {
78
- // 如果是非爬虫,返回格式化后的内容
79
- res = res && res.text && JSON.parse(res.text) || {};
80
- if (platform !== 'chuan') {
81
- if ((res.code !== 200 && platform === 'tx') || (res.code !== 200 && platform === 'aibot') || (res.code !== 0 && platform === 'qi') || (res.code !== 100000 && platform === 'tl')) {
82
- console.error(`接口${url}请求失败`, res.msg || res.text);
74
+ if (spider) {
75
+ // 如果是爬取内容,直接返回页面html
76
+ resolve(res.text);
77
+ }
78
+ else {
79
+ // 如果是非爬虫,返回格式化后的内容
80
+ res = res && res.text && JSON.parse(res.text) || {};
81
+ if (platform !== 'chuan') {
82
+ if ((res.code !== 200 && platform === 'tx') || (res.code !== 200 && platform === 'aibot') || (res.code !== 0 && platform === 'qi') || (res.code !== 100000 && platform === 'tl')) {
83
+ console.error(`接口${url}请求失败`, res.msg || res.text);
84
+ }
83
85
  }
86
+ resolve(res);
84
87
  }
85
- resolve(res);
86
88
  }
87
89
  });
88
90
  });
@@ -106,19 +108,21 @@ function post({ url, params, contentType = 'application/json', authorization = '
106
108
  console.log('请求出错', err);
107
109
  reject(err);
108
110
  }
109
- if (spider) {
110
- // 如果是爬取内容,直接返回页面html
111
- resolve(res.text);
112
- }
113
111
  else {
114
- // 如果是非爬虫,返回格式化后的内容
115
- res = res && res.text && JSON.parse(res.text) || {};
116
- if (platform !== 'chuan') {
117
- if ((res.code !== 200 && platform === 'tx') || (res.code !== 200 && platform === 'aibot') || (res.code !== 100000 && platform === 'tl')) {
118
- console.error(`接口请求失败${url}`, res.msg || res.text || res.error);
112
+ if (spider) {
113
+ // 如果是爬取内容,直接返回页面html
114
+ resolve(res.text);
115
+ }
116
+ else {
117
+ // 如果是非爬虫,返回格式化后的内容
118
+ res = res && res.text && JSON.parse(res.text) || {};
119
+ if (platform !== 'chuan') {
120
+ if ((res.code !== 200 && platform === 'tx') || (res.code !== 200 && platform === 'aibot') || (res.code !== 100000 && platform === 'tl')) {
121
+ console.error(`接口请求失败${url}`, res.msg || res.text || res.error);
122
+ }
119
123
  }
124
+ resolve(res);
120
125
  }
121
- resolve(res);
122
126
  }
123
127
  });
124
128
  });