wechaty-web-panel 1.6.112 → 1.6.113
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bot/chatgpt/index.js +235 -0
- package/dist/bot/coze/sdk/index.js +110 -0
- package/dist/bot/dify/sdk/index.js +461 -0
- package/dist/bot/dify/sdk/office.js +319 -0
- package/dist/bot/fastgpt/index.js +98 -0
- package/dist/bot/qanything/index.js +136 -0
- package/dist/botInstance/coze.js +167 -0
- package/dist/botInstance/cozev3.js +157 -0
- package/dist/botInstance/dify.js +160 -0
- package/dist/botInstance/fastgpt.js +130 -0
- package/dist/botInstance/gpt4v.js +95 -0
- package/dist/botInstance/officialOpenAi.js +186 -0
- package/dist/botInstance/qany.js +144 -0
- package/dist/botInstance/sdk/chatGPT4V.js +89 -0
- package/dist/botInstance/sdk/coze.js +200 -0
- package/dist/botInstance/sdk/difyClient.js +354 -0
- package/dist/botInstance/sdk/pTimeout.js +97 -0
- package/dist/botInstance/sdk/qanything.js +137 -0
- package/dist/botInstance/sdk/quick-lru.js +237 -0
- package/dist/common/hook.js +66 -0
- package/dist/common/index.js +513 -0
- package/dist/common/multiReply.js +158 -0
- package/dist/common/reply.js +23 -0
- package/dist/const/puppet-type.js +71 -0
- package/dist/db/aiDb.js +27 -0
- package/dist/db/aichatDb.js +84 -0
- package/dist/db/chatHistory.js +137 -0
- package/dist/db/configDb.js +97 -0
- package/dist/db/global.js +62 -0
- package/dist/db/gptConfig.js +85 -0
- package/dist/db/nedb.js +88 -0
- package/dist/db/puppetDb.js +58 -0
- package/dist/db/roomDb.js +83 -0
- package/dist/db/rssConfig.js +82 -0
- package/dist/db/rssHistory.js +88 -0
- package/dist/db/userDb.js +27 -0
- package/dist/handlers/on-callback-message.js +183 -0
- package/dist/handlers/on-error.js +5 -0
- package/dist/handlers/on-friend.js +62 -0
- package/dist/handlers/on-heartbeat.js +20 -0
- package/dist/handlers/on-login.js +58 -0
- package/dist/handlers/on-logout.js +17 -0
- package/dist/handlers/on-message.js +644 -0
- package/dist/handlers/on-ready.js +36 -0
- package/dist/handlers/on-record-message.js +56 -0
- package/dist/handlers/on-roomjoin.js +42 -0
- package/dist/handlers/on-roomleave.js +12 -0
- package/dist/handlers/on-roomtopic.js +16 -0
- package/dist/handlers/on-scan.js +64 -0
- package/dist/handlers/on-verifycode.js +42 -0
- package/dist/index.js +81 -69306
- package/dist/lib/contentCensor.js +23 -0
- package/dist/lib/index.js +562 -0
- package/dist/lib/oss.js +43 -0
- package/dist/lib/s3oss.js +33 -0
- package/dist/mcp/mcp-server.js +26 -0
- package/dist/mcp/src/config/database.js +51 -0
- package/dist/mcp/src/index.js +238 -0
- package/dist/mcp/src/mcp/schemas.js +178 -0
- package/dist/mcp/src/mcp/server.js +421 -0
- package/dist/mcp/src/mcp/streamable-server.js +690 -0
- package/dist/mcp/src/models/ChatMessage.js +151 -0
- package/dist/mcp/src/models/Friend.js +64 -0
- package/dist/mcp/src/models/Group.js +55 -0
- package/dist/mcp/src/models/GroupMember.js +67 -0
- package/dist/mcp/src/models/index.js +27 -0
- package/dist/mcp/src/scripts/migrate.js +21 -0
- package/dist/mcp/src/services/ChatDataService.js +284 -0
- package/dist/mcp/src/services/McpService.js +521 -0
- package/dist/mcp/src/services/McpTools.js +504 -0
- package/dist/mcp/streamable-examples.js +283 -0
- package/dist/mcp/streamable-server.js +79 -0
- package/dist/mcp/test-mcp.js +64 -0
- package/dist/mcp/test-streamable-server.js +86 -0
- package/dist/package-json.js +89 -0
- package/dist/proxy/aibotk.js +829 -0
- package/dist/proxy/api.js +431 -0
- package/dist/proxy/apib.js +587 -0
- package/dist/proxy/bot/chatgpt.js +38 -0
- package/dist/proxy/bot/coze.js +38 -0
- package/dist/proxy/bot/cozev3.js +38 -0
- package/dist/proxy/bot/dify.js +38 -0
- package/dist/proxy/bot/dispatch.js +81 -0
- package/dist/proxy/bot/fastgpt.js +27 -0
- package/dist/proxy/bot/qany.js +27 -0
- package/dist/proxy/config.js +14 -0
- package/dist/proxy/cozeAi.js +60 -0
- package/dist/proxy/cozeV3Ai.js +60 -0
- package/dist/proxy/difyAi.js +58 -0
- package/dist/proxy/fastgpt.js +55 -0
- package/dist/proxy/mqtt.js +275 -0
- package/dist/proxy/multimodal.js +122 -0
- package/dist/proxy/openAi.js +63 -0
- package/dist/proxy/outapi.js +62 -0
- package/dist/proxy/qAnyAi.js +57 -0
- package/dist/proxy/superagent.js +200 -0
- package/dist/proxy/tencent-open.js +255 -0
- package/dist/service/event-dispatch-service.js +309 -0
- package/dist/service/gpt4vService.js +45 -0
- package/dist/service/msg-filter-service.js +121 -0
- package/dist/service/msg-filters.js +645 -0
- package/dist/service/room-async-service.js +455 -0
- package/dist/task/index.js +535 -0
- package/dist/task/rss.js +174 -0
- package/package.json +2 -2
- package/src/package-json.js +2 -2
- package/tsconfig.json +3 -12
- package/dist/index.d.ts +0 -9
- package/tsconfig.cjs.json +0 -12
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
import { ChatOpenAI } from "@langchain/openai";
|
|
2
|
+
import { ConversationChain } from "langchain/chains";
|
|
3
|
+
import { BufferMemory, BufferWindowMemory, } from "langchain/memory";
|
|
4
|
+
import { SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
|
|
5
|
+
import { StringOutputParser } from "@langchain/core/output_parsers";
|
|
6
|
+
import { RedisVectorStore } from "@langchain/redis";
|
|
7
|
+
import { OpenAIEmbeddings } from "@langchain/openai";
|
|
8
|
+
import { v4 as uuidv4 } from "uuid";
|
|
9
|
+
export class ChatGPTAPI {
|
|
10
|
+
constructor({ apiKey, apiBaseUrl = "https://api.openai.com/v1", completionParams = {}, debug = false, systemMessage = "", maxModelTokens = 4096, maxResponseTokens = 1024, memoryConfig = {
|
|
11
|
+
type: "memory", // "memory" or "redis"
|
|
12
|
+
redisConfig: null, // Only used if type is "redis"
|
|
13
|
+
expireTime: null, // Time in seconds, null means no expiration
|
|
14
|
+
}, streaming = true, // Enable streaming mode by default
|
|
15
|
+
redis }) {
|
|
16
|
+
this.apiKey = apiKey;
|
|
17
|
+
this.apiBaseUrl = apiBaseUrl;
|
|
18
|
+
this.completionParams = completionParams;
|
|
19
|
+
this.debug = debug;
|
|
20
|
+
this.systemMessage = systemMessage;
|
|
21
|
+
this.maxModelTokens = maxModelTokens;
|
|
22
|
+
this.maxResponseTokens = maxResponseTokens;
|
|
23
|
+
this.memoryConfig = memoryConfig;
|
|
24
|
+
this.conversations = new Map();
|
|
25
|
+
this.streaming = streaming;
|
|
26
|
+
// Initialize Redis if needed
|
|
27
|
+
if (memoryConfig.type === "redis" && memoryConfig.redisConfig) {
|
|
28
|
+
this.redisClient = redis;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Create a new conversation chain or retrieve an existing one
|
|
33
|
+
* @param {string} chatId - Unique identifier for the conversation
|
|
34
|
+
* @param {string} systemMessage - Optional system message to override the default
|
|
35
|
+
* @param {boolean} streaming - Whether to enable streaming for this conversation
|
|
36
|
+
* @returns {Object} - Conversation object with chain and metadata
|
|
37
|
+
*/
|
|
38
|
+
async getOrCreateConversation(chatId, { systemMessage = null, streaming = null, timeoutMs = 60000 }) {
|
|
39
|
+
// If conversation exists and no specific system message is requested, return it
|
|
40
|
+
if (this.conversations.has(chatId) && !systemMessage) {
|
|
41
|
+
return this.conversations.get(chatId);
|
|
42
|
+
}
|
|
43
|
+
// Calculate context window size
|
|
44
|
+
const contextWindowSize = this.maxModelTokens - this.maxResponseTokens - 500; // Buffer for metadata
|
|
45
|
+
// Create the appropriate memory system
|
|
46
|
+
let memory;
|
|
47
|
+
if (this.memoryConfig.type === "redis") {
|
|
48
|
+
// Setup Redis vector store for memory
|
|
49
|
+
const vectorStore = new RedisVectorStore(new OpenAIEmbeddings({
|
|
50
|
+
openAIApiKey: this.apiKey,
|
|
51
|
+
configuration: {
|
|
52
|
+
baseURL: this.apiBaseUrl
|
|
53
|
+
}
|
|
54
|
+
}), {
|
|
55
|
+
redisClient: this.redisClient,
|
|
56
|
+
indexName: `chat:${chatId}`,
|
|
57
|
+
});
|
|
58
|
+
memory = new BufferMemory({
|
|
59
|
+
memoryKey: "chat_history",
|
|
60
|
+
chatHistory: vectorStore,
|
|
61
|
+
returnMessages: true,
|
|
62
|
+
outputKey: "output",
|
|
63
|
+
inputKey: "input",
|
|
64
|
+
});
|
|
65
|
+
// Set expiration if configured
|
|
66
|
+
if (this.memoryConfig.expireTime) {
|
|
67
|
+
const expireTimeSeconds = this.memoryConfig.expireTime;
|
|
68
|
+
this.redisClient.expire(`chat:${chatId}`, expireTimeSeconds);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
// Use in-memory storage
|
|
73
|
+
memory = new BufferWindowMemory({
|
|
74
|
+
memoryKey: "chat_history",
|
|
75
|
+
// k: 10, // Keep last 10 interactions by default, adjust based on token size
|
|
76
|
+
returnMessages: true,
|
|
77
|
+
outputKey: "output",
|
|
78
|
+
inputKey: "input",
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
// Determine if streaming should be enabled
|
|
82
|
+
const useStreaming = streaming !== null ? streaming : this.streaming;
|
|
83
|
+
// Create chat model
|
|
84
|
+
const chatModel = new ChatOpenAI({
|
|
85
|
+
openAIApiKey: this.apiKey,
|
|
86
|
+
modelName: this.completionParams.model,
|
|
87
|
+
temperature: this.completionParams.temperature || 0.7,
|
|
88
|
+
topP: this.completionParams.top_p || 1,
|
|
89
|
+
presencePenalty: this.completionParams.presence_penalty || 0,
|
|
90
|
+
frequencyPenalty: this.completionParams.frequency_penalty || 0,
|
|
91
|
+
maxTokens: this.maxResponseTokens,
|
|
92
|
+
timeout: timeoutMs || 60000, // 60 seconds default timeout
|
|
93
|
+
streaming: useStreaming,
|
|
94
|
+
verbose: this.debug,
|
|
95
|
+
configuration: {
|
|
96
|
+
baseURL: this.apiBaseUrl
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
// Create prompt template with system message
|
|
100
|
+
const finalSystemMessage = systemMessage || this.systemMessage || "You are a helpful assistant.";
|
|
101
|
+
const prompt = ChatPromptTemplate.fromMessages([
|
|
102
|
+
SystemMessagePromptTemplate.fromTemplate(finalSystemMessage),
|
|
103
|
+
new MessagesPlaceholder("chat_history"),
|
|
104
|
+
HumanMessagePromptTemplate.fromTemplate("{input}"),
|
|
105
|
+
]);
|
|
106
|
+
// Create conversation chain using the new Runnable API in v0.3
|
|
107
|
+
const chain = new ConversationChain({
|
|
108
|
+
memory,
|
|
109
|
+
llm: chatModel,
|
|
110
|
+
prompt,
|
|
111
|
+
verbose: this.debug,
|
|
112
|
+
outputParser: new StringOutputParser(),
|
|
113
|
+
inputKey: "input",
|
|
114
|
+
outputKey: "output"
|
|
115
|
+
});
|
|
116
|
+
// Store the conversation
|
|
117
|
+
this.conversations.set(chatId, {
|
|
118
|
+
chain,
|
|
119
|
+
createdAt: Date.now(),
|
|
120
|
+
model: this.completionParams.model,
|
|
121
|
+
contextWindowSize,
|
|
122
|
+
streaming: useStreaming
|
|
123
|
+
});
|
|
124
|
+
return this.conversations.get(chatId);
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Send a message to the model and get a response
|
|
128
|
+
* @param {string} message - The message to send
|
|
129
|
+
* @param {Object} options - Additional options
|
|
130
|
+
* @returns {Promise<Object>} - The response
|
|
131
|
+
*/
|
|
132
|
+
async sendMessage({ message, file }, options = {}) {
|
|
133
|
+
const chatId = options.chatId || uuidv4();
|
|
134
|
+
const systemMsg = options.systemMessage || this.systemMessage;
|
|
135
|
+
const timeoutMs = options.timeoutMs || 60000;
|
|
136
|
+
const streaming = options.streaming !== undefined ? options.streaming : this.streaming;
|
|
137
|
+
try {
|
|
138
|
+
if (this.debug) {
|
|
139
|
+
console.log(`Sending message to ${this.completionParams.model} with chatId: ${chatId}`);
|
|
140
|
+
console.log(`Message: ${message}`);
|
|
141
|
+
console.log(`Streaming: ${streaming}`);
|
|
142
|
+
}
|
|
143
|
+
// Get or create conversation
|
|
144
|
+
const conversation = await this.getOrCreateConversation(chatId, { systemMessage: systemMsg, streaming, timeoutMs: timeoutMs });
|
|
145
|
+
let responseText = "";
|
|
146
|
+
const inputMessage = file && file.fileUrl ? {
|
|
147
|
+
input: {
|
|
148
|
+
text: message,
|
|
149
|
+
image_url: file.fileUrl
|
|
150
|
+
}
|
|
151
|
+
} : {
|
|
152
|
+
input: message
|
|
153
|
+
};
|
|
154
|
+
// Handle streaming or non-streaming response using v0.3 API
|
|
155
|
+
if (streaming) {
|
|
156
|
+
const responsePromise = new Promise(async (resolve, reject) => {
|
|
157
|
+
try {
|
|
158
|
+
responseText = "";
|
|
159
|
+
// Using v0.3 streaming API
|
|
160
|
+
const stream = await conversation.chain.stream(inputMessage);
|
|
161
|
+
for await (const chunk of stream) {
|
|
162
|
+
if (chunk.output) {
|
|
163
|
+
responseText += chunk.output;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
resolve(responseText);
|
|
167
|
+
}
|
|
168
|
+
catch (error) {
|
|
169
|
+
reject(error);
|
|
170
|
+
}
|
|
171
|
+
});
|
|
172
|
+
// Race between response and timeout
|
|
173
|
+
await Promise.race([responsePromise]);
|
|
174
|
+
}
|
|
175
|
+
else {
|
|
176
|
+
// Non-streaming request using v0.3 API
|
|
177
|
+
const responsePromise = conversation.chain.invoke(inputMessage);
|
|
178
|
+
// Race between response and timeout
|
|
179
|
+
const response = await Promise.race([responsePromise]);
|
|
180
|
+
responseText = response.output;
|
|
181
|
+
}
|
|
182
|
+
// Return formatted response (same format for both streaming and non-streaming)
|
|
183
|
+
return {
|
|
184
|
+
chatId: chatId,
|
|
185
|
+
text: responseText,
|
|
186
|
+
id: uuidv4()
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
catch (error) {
|
|
190
|
+
if (this.debug) {
|
|
191
|
+
console.error("Error in sendMessage:", error);
|
|
192
|
+
}
|
|
193
|
+
throw error;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Clear a specific conversation or all conversations
|
|
198
|
+
* @param {string} chatId - Optional chat ID to clear a specific conversation
|
|
199
|
+
*/
|
|
200
|
+
async clearConversations(chatId = null) {
|
|
201
|
+
if (chatId) {
|
|
202
|
+
// Clear a specific conversation
|
|
203
|
+
this.conversations.delete(chatId);
|
|
204
|
+
// Clear from Redis if using Redis
|
|
205
|
+
if (this.memoryConfig.type === "redis" && this.redisClient) {
|
|
206
|
+
await this.redisClient.del(`chat:${chatId}`);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
else {
|
|
210
|
+
// Clear all conversations
|
|
211
|
+
this.conversations.clear();
|
|
212
|
+
// Would need to implement a more complex Redis cleanup if needed
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Set expiration time for a conversation
|
|
217
|
+
* @param {string} chatId - The chat ID
|
|
218
|
+
* @param {number} seconds - Time in seconds until expiration
|
|
219
|
+
*/
|
|
220
|
+
async setExpiration(chatId, seconds) {
|
|
221
|
+
if (!this.conversations.has(chatId)) {
|
|
222
|
+
throw new Error(`Conversation with ID ${chatId} not found`);
|
|
223
|
+
}
|
|
224
|
+
if (this.memoryConfig.type === "redis" && this.redisClient) {
|
|
225
|
+
await this.redisClient.expire(`chat:${chatId}`, seconds);
|
|
226
|
+
}
|
|
227
|
+
else {
|
|
228
|
+
// For in-memory, we can set a timer to delete the conversation
|
|
229
|
+
setTimeout(() => {
|
|
230
|
+
this.conversations.delete(chatId);
|
|
231
|
+
}, seconds * 1000);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { CozeAPI, ChatEventType, ChatStatus, COZE_CN_BASE_URL, RoleType } from '@coze/api';
|
|
2
|
+
// import { FileBox } from 'file-box';
|
|
3
|
+
// import fs from 'fs';
|
|
4
|
+
class CozeClient {
|
|
5
|
+
constructor({ apiKey, baseUrl = COZE_CN_BASE_URL, debug = false, systemMessage = null, stream = false, botId }) {
|
|
6
|
+
this.apiKey = apiKey;
|
|
7
|
+
this.baseUrl = baseUrl;
|
|
8
|
+
this.debug = debug;
|
|
9
|
+
this.stream = stream;
|
|
10
|
+
this.botId = botId;
|
|
11
|
+
this.systemMessage = systemMessage;
|
|
12
|
+
}
|
|
13
|
+
updateApiKey(apiKey) {
|
|
14
|
+
this.apiKey = apiKey;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
class CozeV3Api extends CozeClient {
|
|
18
|
+
async sendMessage({ query, file }, { systemMessage, user, needConversation = true, conversationId = null, timeoutMs = 100 * 1000, files = null, variables }) {
|
|
19
|
+
try {
|
|
20
|
+
const client = new CozeAPI({ baseURL: this.baseUrl, token: this.apiKey });
|
|
21
|
+
let messages = [
|
|
22
|
+
{
|
|
23
|
+
role: 'user',
|
|
24
|
+
content: query,
|
|
25
|
+
content_type: 'text',
|
|
26
|
+
},
|
|
27
|
+
];
|
|
28
|
+
if (file && file.fileUrl) {
|
|
29
|
+
const mediaMessage = [
|
|
30
|
+
{
|
|
31
|
+
type: 'text',
|
|
32
|
+
text: query,
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
type: file.fileType === '图片' ? 'image' : 'file',
|
|
36
|
+
file_url: fileUrl,
|
|
37
|
+
},
|
|
38
|
+
];
|
|
39
|
+
messages = [
|
|
40
|
+
{
|
|
41
|
+
role: 'user',
|
|
42
|
+
content: JSON.stringify(mediaMessage),
|
|
43
|
+
content_type: 'object_string',
|
|
44
|
+
},
|
|
45
|
+
];
|
|
46
|
+
}
|
|
47
|
+
// else if(file) {
|
|
48
|
+
// const fileBuffer = await file.toStream()
|
|
49
|
+
// const fileRes = await client.files.upload({file: fileBuffer})
|
|
50
|
+
// const mediaMessage = [{
|
|
51
|
+
// type: 'text',
|
|
52
|
+
// text: query,
|
|
53
|
+
// }, {
|
|
54
|
+
// type: fileType,
|
|
55
|
+
// file_id: fileRes.id
|
|
56
|
+
// }]
|
|
57
|
+
// messages = [{
|
|
58
|
+
// role: 'user',
|
|
59
|
+
// content: JSON.stringify(mediaMessage),
|
|
60
|
+
// content_type: 'object_string',
|
|
61
|
+
// }]
|
|
62
|
+
// }
|
|
63
|
+
if (!conversationId && needConversation) {
|
|
64
|
+
const res = await client.conversations.create({
|
|
65
|
+
messages,
|
|
66
|
+
timeoutMs,
|
|
67
|
+
});
|
|
68
|
+
console.log('conversation', res);
|
|
69
|
+
conversationId = res?.id;
|
|
70
|
+
messages = [];
|
|
71
|
+
}
|
|
72
|
+
const data = {
|
|
73
|
+
bot_id: this.botId,
|
|
74
|
+
custom_variables: {
|
|
75
|
+
...variables,
|
|
76
|
+
},
|
|
77
|
+
additional_messages: messages,
|
|
78
|
+
user_id: user,
|
|
79
|
+
auto_save_history: true,
|
|
80
|
+
};
|
|
81
|
+
if (systemMessage || this.systemMessage) {
|
|
82
|
+
data.custom_variables.systemMessage = systemMessage || this.systemMessage;
|
|
83
|
+
}
|
|
84
|
+
if (conversationId && needConversation) {
|
|
85
|
+
data.conversation_id = conversationId;
|
|
86
|
+
}
|
|
87
|
+
const stream = await client.chat.stream(data);
|
|
88
|
+
let answers = [];
|
|
89
|
+
let lastMessageId = '';
|
|
90
|
+
for await (const part of stream) {
|
|
91
|
+
if (part.event === ChatEventType.CONVERSATION_CHAT_FAILED || part.event === ChatEventType.ERROR) {
|
|
92
|
+
console.log('Coze 请求失败,请根据失败原因进行排查问题', JSON.stringify(part.data));
|
|
93
|
+
return { text: answer, conversationId, id: lastMessageId, files: [] };
|
|
94
|
+
}
|
|
95
|
+
if (part.event === ChatEventType.CONVERSATION_MESSAGE_COMPLETED && part.data.type === 'answer' && part.data.content_type === 'text') {
|
|
96
|
+
answers.push(part.data.content);
|
|
97
|
+
lastMessageId = part.data.id;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
console.log('messages', answers);
|
|
101
|
+
return { text: answers[0], conversationId, id: lastMessageId, files: [], messages: answers };
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
console.log('Coze 请求失败,请根据失败原因进行排查问题', JSON.stringify(error));
|
|
105
|
+
return { text: '', conversationId: conversationId, id: '', files: [], messages: [] };
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
export { CozeV3Api };
|
|
110
|
+
//# sourceMappingURL=index.js.map
|