langchain 0.0.137 → 0.0.138
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chat_models/minimax.cjs +1 -0
- package/chat_models/minimax.d.ts +1 -0
- package/chat_models/minimax.js +1 -0
- package/dist/callbacks/handlers/tracer.cjs +17 -17
- package/dist/callbacks/handlers/tracer.d.ts +1 -1
- package/dist/callbacks/handlers/tracer.js +17 -17
- package/dist/callbacks/manager.cjs +25 -10
- package/dist/callbacks/manager.d.ts +3 -2
- package/dist/callbacks/manager.js +25 -10
- package/dist/chat_models/minimax.cjs +547 -0
- package/dist/chat_models/minimax.d.ts +364 -0
- package/dist/chat_models/minimax.js +543 -0
- package/dist/chat_models/ollama.cjs +136 -0
- package/dist/chat_models/ollama.d.ts +34 -0
- package/dist/chat_models/ollama.js +136 -0
- package/dist/embeddings/minimax.cjs +152 -0
- package/dist/embeddings/minimax.d.ts +104 -0
- package/dist/embeddings/minimax.js +148 -0
- package/dist/llms/ollama.cjs +136 -0
- package/dist/llms/ollama.d.ts +34 -0
- package/dist/llms/ollama.js +136 -0
- package/dist/load/import_map.cjs +4 -2
- package/dist/load/import_map.d.ts +2 -0
- package/dist/load/import_map.js +2 -0
- package/dist/util/ollama.d.ts +34 -0
- package/dist/vectorstores/redis.cjs +17 -2
- package/dist/vectorstores/redis.d.ts +10 -1
- package/dist/vectorstores/redis.js +17 -2
- package/dist/vectorstores/zep.cjs +2 -1
- package/dist/vectorstores/zep.js +3 -2
- package/embeddings/minimax.cjs +1 -0
- package/embeddings/minimax.d.ts +1 -0
- package/embeddings/minimax.js +1 -0
- package/package.json +19 -3
|
@@ -0,0 +1,543 @@
|
|
|
1
|
+
import { BaseChatModel } from "./base.js";
|
|
2
|
+
import { AIMessage, ChatMessage, HumanMessage, } from "../schema/index.js";
|
|
3
|
+
import { getEnvironmentVariable } from "../util/env.js";
|
|
4
|
+
import { formatToOpenAIFunction } from "../tools/convert_to_openai.js";
|
|
5
|
+
/**
|
|
6
|
+
* Function that extracts the custom sender_type of a generic chat message.
|
|
7
|
+
* @param message Chat message from which to extract the custom sender_type.
|
|
8
|
+
* @returns The custom sender_type of the chat message.
|
|
9
|
+
*/
|
|
10
|
+
function extractGenericMessageCustomRole(message) {
|
|
11
|
+
if (message.role !== "ai" && message.role !== "user") {
|
|
12
|
+
console.warn(`Unknown message role: ${message.role}`);
|
|
13
|
+
}
|
|
14
|
+
if (message.role === "ai") {
|
|
15
|
+
return "BOT";
|
|
16
|
+
}
|
|
17
|
+
if (message.role === "user") {
|
|
18
|
+
return "USER";
|
|
19
|
+
}
|
|
20
|
+
return message.role;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Function that converts a base message to a Minimax message sender_type.
|
|
24
|
+
* @param message Base message to convert.
|
|
25
|
+
* @returns The Minimax message sender_type.
|
|
26
|
+
*/
|
|
27
|
+
function messageToMinimaxRole(message) {
|
|
28
|
+
const type = message._getType();
|
|
29
|
+
switch (type) {
|
|
30
|
+
case "ai":
|
|
31
|
+
return "BOT";
|
|
32
|
+
case "human":
|
|
33
|
+
return "USER";
|
|
34
|
+
case "system":
|
|
35
|
+
throw new Error("System messages not supported");
|
|
36
|
+
case "function":
|
|
37
|
+
return "FUNCTION";
|
|
38
|
+
case "generic": {
|
|
39
|
+
if (!ChatMessage.isInstance(message))
|
|
40
|
+
throw new Error("Invalid generic chat message");
|
|
41
|
+
return extractGenericMessageCustomRole(message);
|
|
42
|
+
}
|
|
43
|
+
default:
|
|
44
|
+
throw new Error(`Unknown message type: ${type}`);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Wrapper around Minimax large language models that use the Chat endpoint.
|
|
49
|
+
*
|
|
50
|
+
* To use you should have the `MINIMAX_GROUP_ID` and `MINIMAX_API_KEY`
|
|
51
|
+
* environment variable set.
|
|
52
|
+
*/
|
|
53
|
+
export class ChatMinimax extends BaseChatModel {
|
|
54
|
+
static lc_name() {
|
|
55
|
+
return "ChatMinimax";
|
|
56
|
+
}
|
|
57
|
+
get callKeys() {
|
|
58
|
+
return [
|
|
59
|
+
...super.callKeys,
|
|
60
|
+
"functions",
|
|
61
|
+
"tools",
|
|
62
|
+
"defaultBotName",
|
|
63
|
+
"defaultUserName",
|
|
64
|
+
"plugins",
|
|
65
|
+
"replyConstraints",
|
|
66
|
+
"botSetting",
|
|
67
|
+
"sampleMessages",
|
|
68
|
+
];
|
|
69
|
+
}
|
|
70
|
+
get lc_secrets() {
|
|
71
|
+
return {
|
|
72
|
+
minimaxApiKey: "MINIMAX_API_KEY",
|
|
73
|
+
minimaxGroupId: "MINIMAX_GROUP_ID",
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
constructor(fields) {
|
|
77
|
+
super(fields ?? {});
|
|
78
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
79
|
+
enumerable: true,
|
|
80
|
+
configurable: true,
|
|
81
|
+
writable: true,
|
|
82
|
+
value: true
|
|
83
|
+
});
|
|
84
|
+
Object.defineProperty(this, "minimaxGroupId", {
|
|
85
|
+
enumerable: true,
|
|
86
|
+
configurable: true,
|
|
87
|
+
writable: true,
|
|
88
|
+
value: void 0
|
|
89
|
+
});
|
|
90
|
+
Object.defineProperty(this, "minimaxApiKey", {
|
|
91
|
+
enumerable: true,
|
|
92
|
+
configurable: true,
|
|
93
|
+
writable: true,
|
|
94
|
+
value: void 0
|
|
95
|
+
});
|
|
96
|
+
Object.defineProperty(this, "streaming", {
|
|
97
|
+
enumerable: true,
|
|
98
|
+
configurable: true,
|
|
99
|
+
writable: true,
|
|
100
|
+
value: false
|
|
101
|
+
});
|
|
102
|
+
Object.defineProperty(this, "prompt", {
|
|
103
|
+
enumerable: true,
|
|
104
|
+
configurable: true,
|
|
105
|
+
writable: true,
|
|
106
|
+
value: void 0
|
|
107
|
+
});
|
|
108
|
+
Object.defineProperty(this, "modelName", {
|
|
109
|
+
enumerable: true,
|
|
110
|
+
configurable: true,
|
|
111
|
+
writable: true,
|
|
112
|
+
value: "abab5.5-chat"
|
|
113
|
+
});
|
|
114
|
+
Object.defineProperty(this, "defaultBotName", {
|
|
115
|
+
enumerable: true,
|
|
116
|
+
configurable: true,
|
|
117
|
+
writable: true,
|
|
118
|
+
value: "Assistant"
|
|
119
|
+
});
|
|
120
|
+
Object.defineProperty(this, "defaultUserName", {
|
|
121
|
+
enumerable: true,
|
|
122
|
+
configurable: true,
|
|
123
|
+
writable: true,
|
|
124
|
+
value: "I"
|
|
125
|
+
});
|
|
126
|
+
Object.defineProperty(this, "prefixMessages", {
|
|
127
|
+
enumerable: true,
|
|
128
|
+
configurable: true,
|
|
129
|
+
writable: true,
|
|
130
|
+
value: void 0
|
|
131
|
+
});
|
|
132
|
+
Object.defineProperty(this, "apiUrl", {
|
|
133
|
+
enumerable: true,
|
|
134
|
+
configurable: true,
|
|
135
|
+
writable: true,
|
|
136
|
+
value: void 0
|
|
137
|
+
});
|
|
138
|
+
Object.defineProperty(this, "basePath", {
|
|
139
|
+
enumerable: true,
|
|
140
|
+
configurable: true,
|
|
141
|
+
writable: true,
|
|
142
|
+
value: "https://api.minimax.chat/v1"
|
|
143
|
+
});
|
|
144
|
+
Object.defineProperty(this, "headers", {
|
|
145
|
+
enumerable: true,
|
|
146
|
+
configurable: true,
|
|
147
|
+
writable: true,
|
|
148
|
+
value: void 0
|
|
149
|
+
});
|
|
150
|
+
Object.defineProperty(this, "temperature", {
|
|
151
|
+
enumerable: true,
|
|
152
|
+
configurable: true,
|
|
153
|
+
writable: true,
|
|
154
|
+
value: 0.9
|
|
155
|
+
});
|
|
156
|
+
Object.defineProperty(this, "topP", {
|
|
157
|
+
enumerable: true,
|
|
158
|
+
configurable: true,
|
|
159
|
+
writable: true,
|
|
160
|
+
value: 0.8
|
|
161
|
+
});
|
|
162
|
+
Object.defineProperty(this, "tokensToGenerate", {
|
|
163
|
+
enumerable: true,
|
|
164
|
+
configurable: true,
|
|
165
|
+
writable: true,
|
|
166
|
+
value: void 0
|
|
167
|
+
});
|
|
168
|
+
Object.defineProperty(this, "skipInfoMask", {
|
|
169
|
+
enumerable: true,
|
|
170
|
+
configurable: true,
|
|
171
|
+
writable: true,
|
|
172
|
+
value: void 0
|
|
173
|
+
});
|
|
174
|
+
Object.defineProperty(this, "proVersion", {
|
|
175
|
+
enumerable: true,
|
|
176
|
+
configurable: true,
|
|
177
|
+
writable: true,
|
|
178
|
+
value: true
|
|
179
|
+
});
|
|
180
|
+
Object.defineProperty(this, "beamWidth", {
|
|
181
|
+
enumerable: true,
|
|
182
|
+
configurable: true,
|
|
183
|
+
writable: true,
|
|
184
|
+
value: void 0
|
|
185
|
+
});
|
|
186
|
+
Object.defineProperty(this, "botSetting", {
|
|
187
|
+
enumerable: true,
|
|
188
|
+
configurable: true,
|
|
189
|
+
writable: true,
|
|
190
|
+
value: void 0
|
|
191
|
+
});
|
|
192
|
+
Object.defineProperty(this, "continueLastMessage", {
|
|
193
|
+
enumerable: true,
|
|
194
|
+
configurable: true,
|
|
195
|
+
writable: true,
|
|
196
|
+
value: void 0
|
|
197
|
+
});
|
|
198
|
+
Object.defineProperty(this, "maskSensitiveInfo", {
|
|
199
|
+
enumerable: true,
|
|
200
|
+
configurable: true,
|
|
201
|
+
writable: true,
|
|
202
|
+
value: void 0
|
|
203
|
+
});
|
|
204
|
+
Object.defineProperty(this, "roleMeta", {
|
|
205
|
+
enumerable: true,
|
|
206
|
+
configurable: true,
|
|
207
|
+
writable: true,
|
|
208
|
+
value: void 0
|
|
209
|
+
});
|
|
210
|
+
Object.defineProperty(this, "useStandardSse", {
|
|
211
|
+
enumerable: true,
|
|
212
|
+
configurable: true,
|
|
213
|
+
writable: true,
|
|
214
|
+
value: void 0
|
|
215
|
+
});
|
|
216
|
+
Object.defineProperty(this, "replyConstraints", {
|
|
217
|
+
enumerable: true,
|
|
218
|
+
configurable: true,
|
|
219
|
+
writable: true,
|
|
220
|
+
value: void 0
|
|
221
|
+
});
|
|
222
|
+
this.minimaxGroupId =
|
|
223
|
+
fields?.minimaxGroupId ?? getEnvironmentVariable("MINIMAX_GROUP_ID");
|
|
224
|
+
if (!this.minimaxGroupId) {
|
|
225
|
+
throw new Error("Minimax GroupID not found");
|
|
226
|
+
}
|
|
227
|
+
this.minimaxApiKey =
|
|
228
|
+
fields?.minimaxApiKey ?? getEnvironmentVariable("MINIMAX_API_KEY");
|
|
229
|
+
if (!this.minimaxApiKey) {
|
|
230
|
+
throw new Error("Minimax ApiKey not found");
|
|
231
|
+
}
|
|
232
|
+
this.streaming = fields?.streaming ?? this.streaming;
|
|
233
|
+
this.prompt = fields?.prompt ?? this.prompt;
|
|
234
|
+
this.temperature = fields?.temperature ?? this.temperature;
|
|
235
|
+
this.topP = fields?.topP ?? this.topP;
|
|
236
|
+
this.skipInfoMask = fields?.skipInfoMask ?? this.skipInfoMask;
|
|
237
|
+
this.prefixMessages = fields?.prefixMessages ?? this.prefixMessages;
|
|
238
|
+
this.maskSensitiveInfo =
|
|
239
|
+
fields?.maskSensitiveInfo ?? this.maskSensitiveInfo;
|
|
240
|
+
this.beamWidth = fields?.beamWidth ?? this.beamWidth;
|
|
241
|
+
this.continueLastMessage =
|
|
242
|
+
fields?.continueLastMessage ?? this.continueLastMessage;
|
|
243
|
+
this.tokensToGenerate = fields?.tokensToGenerate ?? this.tokensToGenerate;
|
|
244
|
+
this.roleMeta = fields?.roleMeta ?? this.roleMeta;
|
|
245
|
+
this.botSetting = fields?.botSetting ?? this.botSetting;
|
|
246
|
+
this.useStandardSse = fields?.useStandardSse ?? this.useStandardSse;
|
|
247
|
+
this.replyConstraints = fields?.replyConstraints ?? this.replyConstraints;
|
|
248
|
+
this.defaultBotName = fields?.defaultBotName ?? this.defaultBotName;
|
|
249
|
+
this.modelName = fields?.modelName ?? this.modelName;
|
|
250
|
+
this.basePath = fields?.configuration?.basePath ?? this.basePath;
|
|
251
|
+
this.headers = fields?.configuration?.headers ?? this.headers;
|
|
252
|
+
this.proVersion = fields?.proVersion ?? this.proVersion;
|
|
253
|
+
const modelCompletion = this.proVersion
|
|
254
|
+
? "chatcompletion_pro"
|
|
255
|
+
: "chatcompletion";
|
|
256
|
+
this.apiUrl = `${this.basePath}/text/${modelCompletion}`;
|
|
257
|
+
}
|
|
258
|
+
fallbackBotName(options) {
|
|
259
|
+
let botName = options?.defaultBotName ?? this.defaultBotName ?? "Assistant";
|
|
260
|
+
if (this.botSetting) {
|
|
261
|
+
botName = this.botSetting[0].bot_name;
|
|
262
|
+
}
|
|
263
|
+
return botName;
|
|
264
|
+
}
|
|
265
|
+
defaultReplyConstraints(options) {
|
|
266
|
+
const constraints = options?.replyConstraints ?? this.replyConstraints;
|
|
267
|
+
if (!constraints) {
|
|
268
|
+
let botName = options?.defaultBotName ?? this.defaultBotName ?? "Assistant";
|
|
269
|
+
if (this.botSetting) {
|
|
270
|
+
botName = this.botSetting[0].bot_name;
|
|
271
|
+
}
|
|
272
|
+
return {
|
|
273
|
+
sender_type: "BOT",
|
|
274
|
+
sender_name: botName,
|
|
275
|
+
};
|
|
276
|
+
}
|
|
277
|
+
return constraints;
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Get the parameters used to invoke the model
|
|
281
|
+
*/
|
|
282
|
+
invocationParams(options) {
|
|
283
|
+
return {
|
|
284
|
+
model: this.modelName,
|
|
285
|
+
stream: this.streaming,
|
|
286
|
+
prompt: this.prompt,
|
|
287
|
+
temperature: this.temperature,
|
|
288
|
+
top_p: this.topP,
|
|
289
|
+
tokens_to_generate: this.tokensToGenerate,
|
|
290
|
+
skip_info_mask: this.skipInfoMask,
|
|
291
|
+
mask_sensitive_info: this.maskSensitiveInfo,
|
|
292
|
+
beam_width: this.beamWidth,
|
|
293
|
+
use_standard_sse: this.useStandardSse,
|
|
294
|
+
role_meta: this.roleMeta,
|
|
295
|
+
bot_setting: options?.botSetting ?? this.botSetting,
|
|
296
|
+
reply_constraints: this.defaultReplyConstraints(options),
|
|
297
|
+
sample_messages: this.messageToMinimaxMessage(options?.sampleMessages, options),
|
|
298
|
+
functions: options?.functions ??
|
|
299
|
+
(options?.tools
|
|
300
|
+
? options?.tools.map(formatToOpenAIFunction)
|
|
301
|
+
: undefined),
|
|
302
|
+
plugins: options?.plugins,
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
/**
|
|
306
|
+
* Get the identifying parameters for the model
|
|
307
|
+
*/
|
|
308
|
+
identifyingParams() {
|
|
309
|
+
return {
|
|
310
|
+
...this.invocationParams(),
|
|
311
|
+
};
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
314
|
+
* Convert a list of messages to the format expected by the model.
|
|
315
|
+
* @param messages
|
|
316
|
+
* @param options
|
|
317
|
+
*/
|
|
318
|
+
messageToMinimaxMessage(messages, options) {
|
|
319
|
+
return messages
|
|
320
|
+
?.filter((message) => {
|
|
321
|
+
if (ChatMessage.isInstance(message)) {
|
|
322
|
+
return message.role !== "system";
|
|
323
|
+
}
|
|
324
|
+
return message._getType() !== "system";
|
|
325
|
+
})
|
|
326
|
+
?.map((message) => {
|
|
327
|
+
const sender_type = messageToMinimaxRole(message);
|
|
328
|
+
return {
|
|
329
|
+
sender_type,
|
|
330
|
+
text: message.content,
|
|
331
|
+
sender_name: message.name ??
|
|
332
|
+
(sender_type === "BOT"
|
|
333
|
+
? this.fallbackBotName()
|
|
334
|
+
: options?.defaultUserName ?? this.defaultUserName),
|
|
335
|
+
};
|
|
336
|
+
});
|
|
337
|
+
}
|
|
338
|
+
/** @ignore */
|
|
339
|
+
async _generate(messages, options, runManager) {
|
|
340
|
+
const tokenUsage = { totalTokens: 0 };
|
|
341
|
+
this.botSettingFallback(options, messages);
|
|
342
|
+
const params = this.invocationParams(options);
|
|
343
|
+
const messagesMapped = [
|
|
344
|
+
...(this.messageToMinimaxMessage(messages, options) ?? []),
|
|
345
|
+
...(this.prefixMessages ?? []),
|
|
346
|
+
];
|
|
347
|
+
const data = params.stream
|
|
348
|
+
? await new Promise((resolve, reject) => {
|
|
349
|
+
let response;
|
|
350
|
+
let rejected = false;
|
|
351
|
+
let resolved = false;
|
|
352
|
+
this.completionWithRetry({
|
|
353
|
+
...params,
|
|
354
|
+
messages: messagesMapped,
|
|
355
|
+
}, true, options?.signal, (event) => {
|
|
356
|
+
const data = JSON.parse(event.data);
|
|
357
|
+
if (data?.error_code) {
|
|
358
|
+
if (rejected) {
|
|
359
|
+
return;
|
|
360
|
+
}
|
|
361
|
+
rejected = true;
|
|
362
|
+
reject(data);
|
|
363
|
+
return;
|
|
364
|
+
}
|
|
365
|
+
const message = data;
|
|
366
|
+
// on the first message set the response properties
|
|
367
|
+
if (!message.choices[0].finish_reason) {
|
|
368
|
+
// the last stream message
|
|
369
|
+
let streamText;
|
|
370
|
+
if (this.proVersion) {
|
|
371
|
+
const messages = message.choices[0].messages ?? [];
|
|
372
|
+
streamText = messages[0].text;
|
|
373
|
+
}
|
|
374
|
+
else {
|
|
375
|
+
streamText = message.choices[0].delta;
|
|
376
|
+
}
|
|
377
|
+
// TODO this should pass part.index to the callback
|
|
378
|
+
// when that's supported there
|
|
379
|
+
// eslint-disable-next-line no-void
|
|
380
|
+
void runManager?.handleLLMNewToken(streamText ?? "");
|
|
381
|
+
return;
|
|
382
|
+
}
|
|
383
|
+
response = message;
|
|
384
|
+
if (!this.proVersion) {
|
|
385
|
+
response.choices[0].text = message.reply;
|
|
386
|
+
}
|
|
387
|
+
if (resolved || rejected) {
|
|
388
|
+
return;
|
|
389
|
+
}
|
|
390
|
+
resolved = true;
|
|
391
|
+
resolve(response);
|
|
392
|
+
}).catch((error) => {
|
|
393
|
+
if (!rejected) {
|
|
394
|
+
rejected = true;
|
|
395
|
+
reject(error);
|
|
396
|
+
}
|
|
397
|
+
});
|
|
398
|
+
})
|
|
399
|
+
: await this.completionWithRetry({
|
|
400
|
+
...params,
|
|
401
|
+
messages: messagesMapped,
|
|
402
|
+
}, false, options?.signal);
|
|
403
|
+
const { total_tokens: totalTokens } = data.usage ?? {};
|
|
404
|
+
if (totalTokens) {
|
|
405
|
+
tokenUsage.totalTokens = totalTokens;
|
|
406
|
+
}
|
|
407
|
+
if (data.base_resp?.status_code !== 0) {
|
|
408
|
+
throw new Error(`Minimax API error: ${data.base_resp?.status_msg}`);
|
|
409
|
+
}
|
|
410
|
+
const generations = [];
|
|
411
|
+
if (this.proVersion) {
|
|
412
|
+
for (const choice of data.choices) {
|
|
413
|
+
const messages = choice.messages ?? [];
|
|
414
|
+
// 取最后一条消息
|
|
415
|
+
if (messages) {
|
|
416
|
+
const message = messages[messages.length - 1];
|
|
417
|
+
const text = message?.text ?? "";
|
|
418
|
+
generations.push({
|
|
419
|
+
text,
|
|
420
|
+
message: minimaxResponseToChatMessage(message),
|
|
421
|
+
});
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
else {
|
|
426
|
+
for (const choice of data.choices) {
|
|
427
|
+
const text = choice?.text ?? "";
|
|
428
|
+
generations.push({
|
|
429
|
+
text,
|
|
430
|
+
message: minimaxResponseToChatMessage({
|
|
431
|
+
sender_type: "BOT",
|
|
432
|
+
sender_name: options?.defaultBotName ?? this.defaultBotName ?? "Assistant",
|
|
433
|
+
text,
|
|
434
|
+
}),
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
return {
|
|
439
|
+
generations,
|
|
440
|
+
llmOutput: { tokenUsage },
|
|
441
|
+
};
|
|
442
|
+
}
|
|
443
|
+
/** @ignore */
|
|
444
|
+
async completionWithRetry(request, stream, signal, onmessage) {
|
|
445
|
+
// The first run will get the accessToken
|
|
446
|
+
const makeCompletionRequest = async () => {
|
|
447
|
+
const url = `${this.apiUrl}?GroupId=${this.minimaxGroupId}`;
|
|
448
|
+
const response = await fetch(url, {
|
|
449
|
+
method: "POST",
|
|
450
|
+
headers: {
|
|
451
|
+
"Content-Type": "application/json",
|
|
452
|
+
Authorization: `Bearer ${this.minimaxApiKey}`,
|
|
453
|
+
...this.headers,
|
|
454
|
+
},
|
|
455
|
+
body: JSON.stringify(request),
|
|
456
|
+
signal,
|
|
457
|
+
});
|
|
458
|
+
if (!stream) {
|
|
459
|
+
const json = await response.json();
|
|
460
|
+
return json;
|
|
461
|
+
}
|
|
462
|
+
else {
|
|
463
|
+
if (response.body) {
|
|
464
|
+
const reader = response.body.getReader();
|
|
465
|
+
const decoder = new TextDecoder("utf-8");
|
|
466
|
+
let data = "";
|
|
467
|
+
let continueReading = true;
|
|
468
|
+
while (continueReading) {
|
|
469
|
+
const { done, value } = await reader.read();
|
|
470
|
+
if (done) {
|
|
471
|
+
continueReading = false;
|
|
472
|
+
break;
|
|
473
|
+
}
|
|
474
|
+
data += decoder.decode(value);
|
|
475
|
+
let continueProcessing = true;
|
|
476
|
+
while (continueProcessing) {
|
|
477
|
+
const newlineIndex = data.indexOf("\n");
|
|
478
|
+
if (newlineIndex === -1) {
|
|
479
|
+
continueProcessing = false;
|
|
480
|
+
break;
|
|
481
|
+
}
|
|
482
|
+
const line = data.slice(0, newlineIndex);
|
|
483
|
+
data = data.slice(newlineIndex + 1);
|
|
484
|
+
if (line.startsWith("data:")) {
|
|
485
|
+
const event = new MessageEvent("message", {
|
|
486
|
+
data: line.slice("data:".length).trim(),
|
|
487
|
+
});
|
|
488
|
+
onmessage?.(event);
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
return {};
|
|
493
|
+
}
|
|
494
|
+
return {};
|
|
495
|
+
}
|
|
496
|
+
};
|
|
497
|
+
return this.caller.call(makeCompletionRequest);
|
|
498
|
+
}
|
|
499
|
+
_llmType() {
|
|
500
|
+
return "minimax";
|
|
501
|
+
}
|
|
502
|
+
/** @ignore */
|
|
503
|
+
_combineLLMOutput() {
|
|
504
|
+
return [];
|
|
505
|
+
}
|
|
506
|
+
botSettingFallback(options, messages) {
|
|
507
|
+
const botSettings = options?.botSetting ?? this.botSetting;
|
|
508
|
+
if (!botSettings) {
|
|
509
|
+
const systemMessages = messages?.filter((message) => {
|
|
510
|
+
if (ChatMessage.isInstance(message)) {
|
|
511
|
+
return message.role === "system";
|
|
512
|
+
}
|
|
513
|
+
return message._getType() === "system";
|
|
514
|
+
});
|
|
515
|
+
// get the last system message
|
|
516
|
+
if (!systemMessages?.length) {
|
|
517
|
+
return;
|
|
518
|
+
}
|
|
519
|
+
const lastSystemMessage = systemMessages[systemMessages.length - 1];
|
|
520
|
+
// setting the default botSetting.
|
|
521
|
+
this.botSetting = [
|
|
522
|
+
{
|
|
523
|
+
content: lastSystemMessage.content,
|
|
524
|
+
bot_name: options?.defaultBotName ?? this.defaultBotName ?? "Assistant",
|
|
525
|
+
},
|
|
526
|
+
];
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
function minimaxResponseToChatMessage(message) {
|
|
531
|
+
switch (message.sender_type) {
|
|
532
|
+
case "USER":
|
|
533
|
+
return new HumanMessage(message.text || "");
|
|
534
|
+
case "BOT":
|
|
535
|
+
return new AIMessage(message.text || "", {
|
|
536
|
+
function_call: message.function_call,
|
|
537
|
+
});
|
|
538
|
+
case "FUNCTION":
|
|
539
|
+
return new AIMessage(message.text || "");
|
|
540
|
+
default:
|
|
541
|
+
return new ChatMessage(message.text || "", message.sender_type ?? "unknown");
|
|
542
|
+
}
|
|
543
|
+
}
|