wechaty-web-panel 1.6.112 → 1.6.113
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bot/chatgpt/index.js +235 -0
- package/dist/bot/coze/sdk/index.js +110 -0
- package/dist/bot/dify/sdk/index.js +461 -0
- package/dist/bot/dify/sdk/office.js +319 -0
- package/dist/bot/fastgpt/index.js +98 -0
- package/dist/bot/qanything/index.js +136 -0
- package/dist/botInstance/coze.js +167 -0
- package/dist/botInstance/cozev3.js +157 -0
- package/dist/botInstance/dify.js +160 -0
- package/dist/botInstance/fastgpt.js +130 -0
- package/dist/botInstance/gpt4v.js +95 -0
- package/dist/botInstance/officialOpenAi.js +186 -0
- package/dist/botInstance/qany.js +144 -0
- package/dist/botInstance/sdk/chatGPT4V.js +89 -0
- package/dist/botInstance/sdk/coze.js +200 -0
- package/dist/botInstance/sdk/difyClient.js +354 -0
- package/dist/botInstance/sdk/pTimeout.js +97 -0
- package/dist/botInstance/sdk/qanything.js +137 -0
- package/dist/botInstance/sdk/quick-lru.js +237 -0
- package/dist/common/hook.js +66 -0
- package/dist/common/index.js +513 -0
- package/dist/common/multiReply.js +158 -0
- package/dist/common/reply.js +23 -0
- package/dist/const/puppet-type.js +71 -0
- package/dist/db/aiDb.js +27 -0
- package/dist/db/aichatDb.js +84 -0
- package/dist/db/chatHistory.js +137 -0
- package/dist/db/configDb.js +97 -0
- package/dist/db/global.js +62 -0
- package/dist/db/gptConfig.js +85 -0
- package/dist/db/nedb.js +88 -0
- package/dist/db/puppetDb.js +58 -0
- package/dist/db/roomDb.js +83 -0
- package/dist/db/rssConfig.js +82 -0
- package/dist/db/rssHistory.js +88 -0
- package/dist/db/userDb.js +27 -0
- package/dist/handlers/on-callback-message.js +183 -0
- package/dist/handlers/on-error.js +5 -0
- package/dist/handlers/on-friend.js +62 -0
- package/dist/handlers/on-heartbeat.js +20 -0
- package/dist/handlers/on-login.js +58 -0
- package/dist/handlers/on-logout.js +17 -0
- package/dist/handlers/on-message.js +644 -0
- package/dist/handlers/on-ready.js +36 -0
- package/dist/handlers/on-record-message.js +56 -0
- package/dist/handlers/on-roomjoin.js +42 -0
- package/dist/handlers/on-roomleave.js +12 -0
- package/dist/handlers/on-roomtopic.js +16 -0
- package/dist/handlers/on-scan.js +64 -0
- package/dist/handlers/on-verifycode.js +42 -0
- package/dist/index.js +81 -69306
- package/dist/lib/contentCensor.js +23 -0
- package/dist/lib/index.js +562 -0
- package/dist/lib/oss.js +43 -0
- package/dist/lib/s3oss.js +33 -0
- package/dist/mcp/mcp-server.js +26 -0
- package/dist/mcp/src/config/database.js +51 -0
- package/dist/mcp/src/index.js +238 -0
- package/dist/mcp/src/mcp/schemas.js +178 -0
- package/dist/mcp/src/mcp/server.js +421 -0
- package/dist/mcp/src/mcp/streamable-server.js +690 -0
- package/dist/mcp/src/models/ChatMessage.js +151 -0
- package/dist/mcp/src/models/Friend.js +64 -0
- package/dist/mcp/src/models/Group.js +55 -0
- package/dist/mcp/src/models/GroupMember.js +67 -0
- package/dist/mcp/src/models/index.js +27 -0
- package/dist/mcp/src/scripts/migrate.js +21 -0
- package/dist/mcp/src/services/ChatDataService.js +284 -0
- package/dist/mcp/src/services/McpService.js +521 -0
- package/dist/mcp/src/services/McpTools.js +504 -0
- package/dist/mcp/streamable-examples.js +283 -0
- package/dist/mcp/streamable-server.js +79 -0
- package/dist/mcp/test-mcp.js +64 -0
- package/dist/mcp/test-streamable-server.js +86 -0
- package/dist/package-json.js +89 -0
- package/dist/proxy/aibotk.js +829 -0
- package/dist/proxy/api.js +431 -0
- package/dist/proxy/apib.js +587 -0
- package/dist/proxy/bot/chatgpt.js +38 -0
- package/dist/proxy/bot/coze.js +38 -0
- package/dist/proxy/bot/cozev3.js +38 -0
- package/dist/proxy/bot/dify.js +38 -0
- package/dist/proxy/bot/dispatch.js +81 -0
- package/dist/proxy/bot/fastgpt.js +27 -0
- package/dist/proxy/bot/qany.js +27 -0
- package/dist/proxy/config.js +14 -0
- package/dist/proxy/cozeAi.js +60 -0
- package/dist/proxy/cozeV3Ai.js +60 -0
- package/dist/proxy/difyAi.js +58 -0
- package/dist/proxy/fastgpt.js +55 -0
- package/dist/proxy/mqtt.js +275 -0
- package/dist/proxy/multimodal.js +122 -0
- package/dist/proxy/openAi.js +63 -0
- package/dist/proxy/outapi.js +62 -0
- package/dist/proxy/qAnyAi.js +57 -0
- package/dist/proxy/superagent.js +200 -0
- package/dist/proxy/tencent-open.js +255 -0
- package/dist/service/event-dispatch-service.js +309 -0
- package/dist/service/gpt4vService.js +45 -0
- package/dist/service/msg-filter-service.js +121 -0
- package/dist/service/msg-filters.js +645 -0
- package/dist/service/room-async-service.js +455 -0
- package/dist/task/index.js +535 -0
- package/dist/task/rss.js +174 -0
- package/package.json +2 -2
- package/src/package-json.js +2 -2
- package/tsconfig.json +3 -12
- package/dist/index.d.ts +0 -9
- package/tsconfig.cjs.json +0 -12
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { getImageVision } from "./sdk/chatGPT4V.js";
|
|
2
|
+
import { getPuppetEol, isWindowsPlatform } from '../const/puppet-type.js';
|
|
3
|
+
import { ChatClient } from './sdk/difyClient.js';
|
|
4
|
+
import { extractImageLinks } from '../lib/index.js';
|
|
5
|
+
import FormData from 'form-data';
|
|
6
|
+
export async function get4vReply(images, question, config) {
|
|
7
|
+
try {
|
|
8
|
+
const iswindows = await isWindowsPlatform();
|
|
9
|
+
const eol = await getPuppetEol();
|
|
10
|
+
const finalConfig = {
|
|
11
|
+
...config,
|
|
12
|
+
baseUrl: config.proxyPass || ''
|
|
13
|
+
};
|
|
14
|
+
const { text } = await getImageVision(images, question, finalConfig);
|
|
15
|
+
let replys = [];
|
|
16
|
+
let message = iswindows ? text.replaceAll("\n", eol) : text;
|
|
17
|
+
const imgs = extractImageLinks(message);
|
|
18
|
+
while (message.length > 1500) {
|
|
19
|
+
replys.push(message.slice(0, 1500));
|
|
20
|
+
message = message.slice(1500);
|
|
21
|
+
}
|
|
22
|
+
replys.push(message);
|
|
23
|
+
replys = replys.map(item => {
|
|
24
|
+
return {
|
|
25
|
+
type: 1,
|
|
26
|
+
content: item.trim()
|
|
27
|
+
};
|
|
28
|
+
});
|
|
29
|
+
if (imgs.length) {
|
|
30
|
+
console.log('提取到内容中的图片', imgs);
|
|
31
|
+
replys = replys.concat(imgs);
|
|
32
|
+
}
|
|
33
|
+
return replys;
|
|
34
|
+
}
|
|
35
|
+
catch (e) {
|
|
36
|
+
console.log("gpt4v报错:" + e);
|
|
37
|
+
return [{ type: 1, content: '图像识别失败,请确保你的账号有GPT-4V权限' }];
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
export async function getDify4vReply(images, question, config, userId) {
|
|
41
|
+
try {
|
|
42
|
+
const iswindows = await isWindowsPlatform();
|
|
43
|
+
const eol = await getPuppetEol();
|
|
44
|
+
const sendFiles = [];
|
|
45
|
+
const difyClient = new ChatClient({ apiKey: config.apiKey, baseUrl: config.proxyPass, debug: config.debug });
|
|
46
|
+
console.log('进入Dify图像识别模式');
|
|
47
|
+
for (const file of images) {
|
|
48
|
+
try {
|
|
49
|
+
const base64 = await file.toBase64();
|
|
50
|
+
const readable = Buffer.from(base64, 'base64');
|
|
51
|
+
const formData = new FormData();
|
|
52
|
+
formData.append('file', readable, { contentType: file.mediaType, filename: file.name });
|
|
53
|
+
formData.append('user', userId);
|
|
54
|
+
const res = await difyClient.fileUpload(formData);
|
|
55
|
+
if (res.data.id) {
|
|
56
|
+
sendFiles.push({ type: 'image', transfer_method: 'local_file', upload_file_id: res.data.id });
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
catch (e) {
|
|
60
|
+
console.log(`Dify 上传文件失败:${e}`);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
const { text, files } = await difyClient.sendMessage(question, { user: userId, timeoutMs: config.timeoutMs * 1000, files: sendFiles });
|
|
64
|
+
let replys = [];
|
|
65
|
+
let message = iswindows ? text.replaceAll("\n", eol) : text;
|
|
66
|
+
const imgs = extractImageLinks(message);
|
|
67
|
+
while (message.length > 1500) {
|
|
68
|
+
replys.push(message.slice(0, 1500));
|
|
69
|
+
message = message.slice(1500);
|
|
70
|
+
}
|
|
71
|
+
replys.push(message);
|
|
72
|
+
replys = replys.map(item => {
|
|
73
|
+
return {
|
|
74
|
+
type: 1,
|
|
75
|
+
content: item.trim()
|
|
76
|
+
};
|
|
77
|
+
});
|
|
78
|
+
if (imgs.length) {
|
|
79
|
+
console.log('提取到内容中的图片', imgs);
|
|
80
|
+
replys = replys.concat(imgs);
|
|
81
|
+
}
|
|
82
|
+
if (files.length) {
|
|
83
|
+
console.log('回复内容带文件', files);
|
|
84
|
+
files.forEach(item => {
|
|
85
|
+
replys.push({ type: 2, url: item });
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
return replys;
|
|
89
|
+
}
|
|
90
|
+
catch (e) {
|
|
91
|
+
console.log("Dify 图像识别报错:" + e);
|
|
92
|
+
return [{ type: 1, content: '图像识别失败,请确保你的账号有GPT-4V权限' }];
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
//# sourceMappingURL=gpt4v.js.map
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
import { ChatGPTAPI } from "../bot/chatgpt/index.js";
|
|
2
|
+
import { addAichatRecord } from "../db/aichatDb.js";
|
|
3
|
+
import { getPromotInfo } from "../proxy/aibotk.js";
|
|
4
|
+
import { ContentCensor } from "../lib/contentCensor.js";
|
|
5
|
+
import { getPuppetEol, isWindowsPlatform } from '../const/puppet-type.js';
|
|
6
|
+
import { v4 as uuidv4 } from "uuid";
|
|
7
|
+
import dayjs from "dayjs";
|
|
8
|
+
import { extractImageLinks } from '../lib/index.js';
|
|
9
|
+
import { getText2Speech } from "../proxy/multimodal.js";
|
|
10
|
+
let chatGPT = null;
|
|
11
|
+
class OfficialOpenAi {
|
|
12
|
+
constructor(config = {
|
|
13
|
+
temperature: 0.8,
|
|
14
|
+
top_p: 1,
|
|
15
|
+
presence_penalty: 1,
|
|
16
|
+
token: '', // token
|
|
17
|
+
debug: 0, // 开启调试
|
|
18
|
+
proxyPass: '', // 反向代理地址
|
|
19
|
+
proxyUrl: '', // 代理地址
|
|
20
|
+
showQuestion: true, // 显示原文
|
|
21
|
+
timeoutMs: 60, // 超时时间 s
|
|
22
|
+
model: '', // 模型
|
|
23
|
+
promotId: '',
|
|
24
|
+
systemMessage: '', // 预设promotion
|
|
25
|
+
}) {
|
|
26
|
+
this.chatGPT = null;
|
|
27
|
+
this.config = config;
|
|
28
|
+
this.contentCensor = null;
|
|
29
|
+
this.chatOption = {};
|
|
30
|
+
this.eol = '\n';
|
|
31
|
+
this.iswindows = false;
|
|
32
|
+
}
|
|
33
|
+
async init() {
|
|
34
|
+
this.eol = await getPuppetEol();
|
|
35
|
+
this.iswindows = await isWindowsPlatform();
|
|
36
|
+
if (this.config.promotId) {
|
|
37
|
+
const promotInfo = await getPromotInfo(this.config.promotId);
|
|
38
|
+
if (promotInfo) {
|
|
39
|
+
this.config.systemMessage = promotInfo.promot;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
if (this.config.filter) {
|
|
43
|
+
this.contentCensor = new ContentCensor(this.config.filterConfig);
|
|
44
|
+
}
|
|
45
|
+
const baseOptions = {
|
|
46
|
+
apiKey: this.config.token,
|
|
47
|
+
completionParams: { model: this.config.model, temperature: this.config?.temperature || 0.8, top_p: this.config?.top_p || 1, presence_penalty: this.config?.presence_penalty || 1 },
|
|
48
|
+
debug: this.config.debug,
|
|
49
|
+
systemMessage: this.config.systemMessage || '',
|
|
50
|
+
};
|
|
51
|
+
// increase max token limit if use gpt-4
|
|
52
|
+
if (this.config.model.toLowerCase().includes('gpt-4')) {
|
|
53
|
+
// if use 32k model
|
|
54
|
+
if (this.config.model.toLowerCase().includes('32k')) {
|
|
55
|
+
baseOptions.maxModelTokens = 32768;
|
|
56
|
+
baseOptions.maxResponseTokens = 8192;
|
|
57
|
+
} // if use GPT-4 Turbo preview 4o
|
|
58
|
+
else if (this.config.model.toLowerCase().includes('-preview') || this.config.model.toLowerCase().includes('-turbo') || this.config.model.toLowerCase().includes('gpt-4o')) {
|
|
59
|
+
baseOptions.maxModelTokens = 128000;
|
|
60
|
+
baseOptions.maxResponseTokens = 4096;
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
baseOptions.maxModelTokens = 8192;
|
|
64
|
+
baseOptions.maxResponseTokens = 4096;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
if (this.config.model.toLowerCase().includes('gpt-3.5')) {
|
|
68
|
+
if (this.config.model.toLowerCase() === 'gpt-3.5-turbo' || this.config.model.toLowerCase().includes('16k') || this.config.model.toLowerCase().includes('turbo-1106') || this.config.model.toLowerCase().includes('turbo-0125')) {
|
|
69
|
+
baseOptions.maxModelTokens = 16385;
|
|
70
|
+
baseOptions.maxResponseTokens = 4096;
|
|
71
|
+
}
|
|
72
|
+
else {
|
|
73
|
+
baseOptions.maxModelTokens = 4096;
|
|
74
|
+
baseOptions.maxResponseTokens = 1024;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
if (this.config.model.toLowerCase().includes('deepseek')) {
|
|
78
|
+
baseOptions.maxModelTokens = 65792;
|
|
79
|
+
baseOptions.maxResponseTokens = 8192;
|
|
80
|
+
}
|
|
81
|
+
if (this.config?.modelMaxToken) {
|
|
82
|
+
baseOptions.maxModelTokens = this.config.modelMaxToken;
|
|
83
|
+
}
|
|
84
|
+
if (this.config?.maxToken) {
|
|
85
|
+
baseOptions.maxResponseTokens = this.config.maxToken;
|
|
86
|
+
}
|
|
87
|
+
console.log(`请求地址:${this.config.proxyPass}`);
|
|
88
|
+
this.chatGPT = new ChatGPTAPI({
|
|
89
|
+
...baseOptions,
|
|
90
|
+
apiBaseUrl: this.config.proxyPass,
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* 重置apikey
|
|
95
|
+
* @return {Promise<void>}
|
|
96
|
+
*/
|
|
97
|
+
reset() {
|
|
98
|
+
this.chatGPT = null;
|
|
99
|
+
}
|
|
100
|
+
async getReply({ content, uid, adminId = '', systemMessage = '', file }) {
|
|
101
|
+
try {
|
|
102
|
+
if (!this.chatGPT) {
|
|
103
|
+
console.log('看到此消息说明已启用ChatGPT');
|
|
104
|
+
await this.init();
|
|
105
|
+
}
|
|
106
|
+
if (this.config.filter) {
|
|
107
|
+
const censor = await this.contentCensor.checkText(content);
|
|
108
|
+
if (!censor) {
|
|
109
|
+
console.log(`问题:${content},包含违规词,已拦截`);
|
|
110
|
+
return [{ type: 1, content: '这个话题不适合讨论,换个话题吧。' }];
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
const resetWord = ['reset', '重置', '重置对话', '忽略上下文', '重置上下文', '重新开始', '清除对话', '清除上下文'];
|
|
114
|
+
if (systemMessage || resetWord.includes(content)) {
|
|
115
|
+
console.log('重新更新上下文对话');
|
|
116
|
+
this.chatOption[uid] = null;
|
|
117
|
+
if (content === 'reset' || content === '重置') {
|
|
118
|
+
return [{ type: 1, content: '上下文已重置' }];
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
const sendParams = { ...this.chatOption[uid], timeoutMs: this.config.timeoutMs * 1000 || 80 * 1000 };
|
|
122
|
+
if (systemMessage) {
|
|
123
|
+
sendParams.systemMessage = systemMessage;
|
|
124
|
+
}
|
|
125
|
+
console.log('sendParams---', sendParams);
|
|
126
|
+
const { chatId, text, id } = await this.chatGPT.sendMessage({ message: content, file }, sendParams);
|
|
127
|
+
if (this.config.filter) {
|
|
128
|
+
const censor = await this.contentCensor.checkText(text);
|
|
129
|
+
if (!censor) {
|
|
130
|
+
console.log(`回复: ${text},包含违规词,已拦截`);
|
|
131
|
+
return [{ type: 1, content: '这个话题不适合讨论,换个话题吧。' }];
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
if (this.config.record) {
|
|
135
|
+
void addAichatRecord({ contactId: uid, adminId, input: content, output: text, time: dayjs().format('YYYY-MM-DD HH:mm:ss') });
|
|
136
|
+
}
|
|
137
|
+
this.chatOption[uid] = { chatId };
|
|
138
|
+
let replys = [];
|
|
139
|
+
if (this.config?.openTTS) {
|
|
140
|
+
replys = await getText2Speech(text, this.config.ttsConfig);
|
|
141
|
+
if (replys.length) {
|
|
142
|
+
return replys;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
let message;
|
|
146
|
+
if (this.config.showQuestion) {
|
|
147
|
+
message = `${content}${this.eol}-----------${this.eol}` + (this.iswindows ? text.replaceAll('\n', this.eol) : text);
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
message = this.iswindows ? text.replaceAll('\n', this.eol) : text;
|
|
151
|
+
}
|
|
152
|
+
let imgs = [];
|
|
153
|
+
if (this.config.filterLinkContent) {
|
|
154
|
+
const filterRes = extractImageLinks(message, true);
|
|
155
|
+
imgs = filterRes.imageReplys;
|
|
156
|
+
message = filterRes.content;
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
imgs = extractImageLinks(message);
|
|
160
|
+
}
|
|
161
|
+
console.log('imgs', imgs);
|
|
162
|
+
while (message.length > 1500) {
|
|
163
|
+
replys.push(message.slice(0, 1500));
|
|
164
|
+
message = message.slice(1500);
|
|
165
|
+
}
|
|
166
|
+
replys.push(message);
|
|
167
|
+
replys = replys.map(item => {
|
|
168
|
+
return {
|
|
169
|
+
type: 1,
|
|
170
|
+
content: item.trim()
|
|
171
|
+
};
|
|
172
|
+
});
|
|
173
|
+
if (imgs.length) {
|
|
174
|
+
console.log('提取到内容中的图片', imgs);
|
|
175
|
+
replys = replys.concat(imgs);
|
|
176
|
+
}
|
|
177
|
+
return replys;
|
|
178
|
+
}
|
|
179
|
+
catch (e) {
|
|
180
|
+
console.log('chat gpt报错:' + e);
|
|
181
|
+
return [];
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
export default OfficialOpenAi;
|
|
186
|
+
//# sourceMappingURL=officialOpenAi.js.map
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import { QAnyApi } from './sdk/qanything.js';
|
|
2
|
+
import { addAichatRecord } from '../db/aichatDb.js';
|
|
3
|
+
import { ContentCensor } from '../lib/contentCensor.js';
|
|
4
|
+
import { getPuppetEol, isWindowsPlatform } from '../const/puppet-type.js';
|
|
5
|
+
import dayjs from 'dayjs';
|
|
6
|
+
import { extractImageLinks } from '../lib/index.js';
|
|
7
|
+
import { getText2Speech } from "../proxy/multimodal.js";
|
|
8
|
+
class QAnyAi {
|
|
9
|
+
constructor(config = {
|
|
10
|
+
token: '', // api 秘钥
|
|
11
|
+
botId: '', // botId
|
|
12
|
+
proxyPass: '', // 请求地址
|
|
13
|
+
showQuestion: true, // 显示原文
|
|
14
|
+
showSuggestions: false, // 显示建议问题
|
|
15
|
+
timeoutMs: 180, // 超时时间 s
|
|
16
|
+
promotId: '',
|
|
17
|
+
systemMessage: '' // 预设promotion
|
|
18
|
+
}) {
|
|
19
|
+
this.qanyChat = null;
|
|
20
|
+
this.config = { ...config };
|
|
21
|
+
this.contentCensor = null;
|
|
22
|
+
this.chatOption = {};
|
|
23
|
+
this.eol = '\n';
|
|
24
|
+
this.iswindows = false;
|
|
25
|
+
}
|
|
26
|
+
async init() {
|
|
27
|
+
this.eol = await getPuppetEol();
|
|
28
|
+
this.iswindows = await isWindowsPlatform();
|
|
29
|
+
if (this.config.filter) {
|
|
30
|
+
this.contentCensor = new ContentCensor(this.config.filterConfig);
|
|
31
|
+
}
|
|
32
|
+
const baseOptions = {
|
|
33
|
+
apiKey: this.config.token,
|
|
34
|
+
apiBaseUrl: this.config.proxyPass,
|
|
35
|
+
debug: !!this.config.debug,
|
|
36
|
+
botId: this.config.botId,
|
|
37
|
+
};
|
|
38
|
+
console.log(`api请求地址:${this.config.proxyPass}`);
|
|
39
|
+
this.qanyChat = new QAnyApi({
|
|
40
|
+
...baseOptions,
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* 重置apikey
|
|
45
|
+
* @return {Promise<void>}
|
|
46
|
+
*/
|
|
47
|
+
reset() {
|
|
48
|
+
this.qanyChat = null;
|
|
49
|
+
}
|
|
50
|
+
async getReply(content, uid, adminId = '', systemMessage = '') {
|
|
51
|
+
try {
|
|
52
|
+
if (!this.qanyChat) {
|
|
53
|
+
console.log('启用QAnything对话平台');
|
|
54
|
+
await this.init();
|
|
55
|
+
}
|
|
56
|
+
if (this.config.filter) {
|
|
57
|
+
const censor = await this.contentCensor.checkText(content);
|
|
58
|
+
if (!censor) {
|
|
59
|
+
console.log(`问题:${content},包含违规词,已拦截`);
|
|
60
|
+
return [{ type: 1, content: '这个话题不适合讨论,换个话题吧。' }];
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
if (content === 'reset' || content === '重置') {
|
|
64
|
+
console.log('重新更新上下文对话');
|
|
65
|
+
this.chatOption[uid] = {
|
|
66
|
+
needHistory: false
|
|
67
|
+
};
|
|
68
|
+
if (content === 'reset' || content === '重置') {
|
|
69
|
+
return [{ type: 1, content: '上下文已重置' }];
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const { text, id } = await this.qanyChat.sendMessage(content, {
|
|
73
|
+
...this.chatOption[uid],
|
|
74
|
+
timeoutMs: this.config.timeoutMs * 1000 || 180 * 1000,
|
|
75
|
+
user: uid
|
|
76
|
+
});
|
|
77
|
+
if (this.config.filter) {
|
|
78
|
+
const censor = await this.contentCensor.checkText(text);
|
|
79
|
+
if (!censor) {
|
|
80
|
+
console.log(`回复: ${text},包含违规词,已拦截`);
|
|
81
|
+
return [{ type: 1, content: '这个话题不适合讨论,换个话题吧。' }];
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (this.config.record) {
|
|
85
|
+
void addAichatRecord({
|
|
86
|
+
contactId: uid,
|
|
87
|
+
adminId,
|
|
88
|
+
input: content,
|
|
89
|
+
output: text,
|
|
90
|
+
time: dayjs().format('YYYY-MM-DD HH:mm:ss')
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
// 保存对话id 对于同一个用户的对话不更新conversationId
|
|
94
|
+
this.chatOption[uid] = {
|
|
95
|
+
needHistory: true
|
|
96
|
+
};
|
|
97
|
+
let replys = [];
|
|
98
|
+
if (this.config?.openTTS) {
|
|
99
|
+
replys = await getText2Speech(text, this.config.ttsConfig);
|
|
100
|
+
if (replys.length) {
|
|
101
|
+
return replys;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
let message;
|
|
105
|
+
if (this.config.showQuestion) {
|
|
106
|
+
message = `${content}${this.eol}-----------${this.eol}` + (this.iswindows ? text.replaceAll('\n', this.eol) : text);
|
|
107
|
+
}
|
|
108
|
+
else {
|
|
109
|
+
message = this.iswindows ? text.replaceAll('\n', this.eol) : text;
|
|
110
|
+
}
|
|
111
|
+
let imgs = [];
|
|
112
|
+
if (this.config.filterLinkContent) {
|
|
113
|
+
const filterRes = extractImageLinks(message, true);
|
|
114
|
+
imgs = filterRes.imageReplys;
|
|
115
|
+
message = filterRes.content;
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
imgs = extractImageLinks(message);
|
|
119
|
+
}
|
|
120
|
+
while (message.length > 1500) {
|
|
121
|
+
replys.push(message.slice(0, 1500));
|
|
122
|
+
message = message.slice(1500);
|
|
123
|
+
}
|
|
124
|
+
replys.push(message);
|
|
125
|
+
replys = replys.map(item => {
|
|
126
|
+
return {
|
|
127
|
+
type: 1,
|
|
128
|
+
content: item.trim()
|
|
129
|
+
};
|
|
130
|
+
});
|
|
131
|
+
if (imgs.length) {
|
|
132
|
+
console.log('提取到内容中的图片', imgs);
|
|
133
|
+
replys = replys.concat(imgs);
|
|
134
|
+
}
|
|
135
|
+
return replys;
|
|
136
|
+
}
|
|
137
|
+
catch (e) {
|
|
138
|
+
console.log('QAnything请求报错:' + e);
|
|
139
|
+
return [];
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
export default QAnyAi;
|
|
144
|
+
//# sourceMappingURL=qany.js.map
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import axios from "axios";
|
|
2
|
+
export const BASE_URL = "https://api.openai.com/v1";
|
|
3
|
+
const GPT4VError = class extends Error {
|
|
4
|
+
};
|
|
5
|
+
export const routes = {
|
|
6
|
+
createVisionPreviewMessage: {
|
|
7
|
+
method: "POST",
|
|
8
|
+
url: () => '/chat/completions'
|
|
9
|
+
},
|
|
10
|
+
createDellImage: {
|
|
11
|
+
method: "POST",
|
|
12
|
+
url: () => '/images/generations'
|
|
13
|
+
},
|
|
14
|
+
};
|
|
15
|
+
async function sendRequest({ method, endpoint, data, params, stream = false, timeoutMs = 180 * 1000, apiKey, baseUrl, debug }) {
|
|
16
|
+
const headers = {
|
|
17
|
+
"Authorization": `Bearer ${apiKey}`, "Content-Type": "application/json"
|
|
18
|
+
};
|
|
19
|
+
const url = `${baseUrl}${endpoint}`;
|
|
20
|
+
let response;
|
|
21
|
+
if (debug) {
|
|
22
|
+
console.log("gpt4v request", url, { data, headers, params });
|
|
23
|
+
}
|
|
24
|
+
response = await axios({
|
|
25
|
+
method,
|
|
26
|
+
url,
|
|
27
|
+
data: data || null,
|
|
28
|
+
params: params || null,
|
|
29
|
+
headers,
|
|
30
|
+
timeout: timeoutMs,
|
|
31
|
+
responseType: stream ? "stream" : "json"
|
|
32
|
+
});
|
|
33
|
+
return response;
|
|
34
|
+
}
|
|
35
|
+
export async function getImageVision(images, question, config) {
|
|
36
|
+
const data = {
|
|
37
|
+
model: "gpt-4-vision-preview",
|
|
38
|
+
messages: [
|
|
39
|
+
{
|
|
40
|
+
role: "user",
|
|
41
|
+
content: [
|
|
42
|
+
{
|
|
43
|
+
type: "text",
|
|
44
|
+
text: question || "Please describe the content of the image in Chinese. If it contains any prohibited content, please refrain from describing it."
|
|
45
|
+
}
|
|
46
|
+
]
|
|
47
|
+
}
|
|
48
|
+
],
|
|
49
|
+
max_tokens: 300,
|
|
50
|
+
...config.completionParams
|
|
51
|
+
};
|
|
52
|
+
for (let item of images) {
|
|
53
|
+
data.messages[0].content.push({
|
|
54
|
+
type: "image_url",
|
|
55
|
+
image_url: {
|
|
56
|
+
url: item
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
if (config.debug) {
|
|
61
|
+
console.log("request data", data);
|
|
62
|
+
}
|
|
63
|
+
const res = await sendRequest({
|
|
64
|
+
apiKey: config.apiKey,
|
|
65
|
+
debug: config.debug,
|
|
66
|
+
baseUrl: config.baseUrl || BASE_URL,
|
|
67
|
+
method: routes.createVisionPreviewMessage.method,
|
|
68
|
+
endpoint: routes.createVisionPreviewMessage.url(),
|
|
69
|
+
data,
|
|
70
|
+
stream: false,
|
|
71
|
+
timeoutMs: config.timeoutMs * 1000
|
|
72
|
+
});
|
|
73
|
+
if (res.status !== 200) {
|
|
74
|
+
if (config.debug) {
|
|
75
|
+
console.log("gpt4v request error", res.data);
|
|
76
|
+
}
|
|
77
|
+
const reason = JSON.stringify(res.data);
|
|
78
|
+
const msg = `GPT4V error ${res.status}: ${reason}`;
|
|
79
|
+
const error = new GPT4VError(msg, { cause: res });
|
|
80
|
+
error.statusCode = res.status;
|
|
81
|
+
error.statusText = JSON.stringify(res.data);
|
|
82
|
+
return Promise.reject(res.message);
|
|
83
|
+
}
|
|
84
|
+
const response = res.data;
|
|
85
|
+
return {
|
|
86
|
+
text: response.choices[0].message.content
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
//# sourceMappingURL=chatGPT4V.js.map
|