langchain 0.0.176 → 0.0.178
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chat_models/iflytek_xinghuo/web.cjs +1 -0
- package/chat_models/iflytek_xinghuo/web.d.ts +1 -0
- package/chat_models/iflytek_xinghuo/web.js +1 -0
- package/chat_models/iflytek_xinghuo.cjs +1 -0
- package/chat_models/iflytek_xinghuo.d.ts +1 -0
- package/chat_models/iflytek_xinghuo.js +1 -0
- package/dist/chat_models/bedrock.cjs +25 -4
- package/dist/chat_models/bedrock.d.ts +2 -1
- package/dist/chat_models/bedrock.js +25 -4
- package/dist/chat_models/cloudflare_workersai.cjs +70 -24
- package/dist/chat_models/cloudflare_workersai.d.ts +6 -2
- package/dist/chat_models/cloudflare_workersai.js +71 -25
- package/dist/chat_models/iflytek_xinghuo/common.cjs +335 -0
- package/dist/chat_models/iflytek_xinghuo/common.d.ts +165 -0
- package/dist/chat_models/iflytek_xinghuo/common.js +331 -0
- package/dist/chat_models/iflytek_xinghuo/index.cjs +35 -0
- package/dist/chat_models/iflytek_xinghuo/index.d.ts +5 -0
- package/dist/chat_models/iflytek_xinghuo/index.js +28 -0
- package/dist/chat_models/iflytek_xinghuo/web.cjs +30 -0
- package/dist/chat_models/iflytek_xinghuo/web.d.ts +5 -0
- package/dist/chat_models/iflytek_xinghuo/web.js +26 -0
- package/dist/chat_models/llama_cpp.cjs +31 -79
- package/dist/chat_models/llama_cpp.d.ts +15 -58
- package/dist/chat_models/llama_cpp.js +32 -80
- package/dist/chat_models/openai.cjs +91 -6
- package/dist/chat_models/openai.d.ts +10 -0
- package/dist/chat_models/openai.js +91 -6
- package/dist/embeddings/hf.cjs +10 -1
- package/dist/embeddings/hf.d.ts +4 -2
- package/dist/embeddings/hf.js +10 -1
- package/dist/embeddings/llama_cpp.cjs +67 -0
- package/dist/embeddings/llama_cpp.d.ts +26 -0
- package/dist/embeddings/llama_cpp.js +63 -0
- package/dist/embeddings/ollama.cjs +7 -1
- package/dist/embeddings/ollama.js +7 -1
- package/dist/graphs/neo4j_graph.cjs +36 -5
- package/dist/graphs/neo4j_graph.js +14 -3
- package/dist/llms/bedrock.cjs +25 -3
- package/dist/llms/bedrock.d.ts +2 -1
- package/dist/llms/bedrock.js +25 -3
- package/dist/llms/cloudflare_workersai.cjs +59 -13
- package/dist/llms/cloudflare_workersai.d.ts +9 -3
- package/dist/llms/cloudflare_workersai.js +59 -13
- package/dist/llms/hf.cjs +10 -1
- package/dist/llms/hf.d.ts +3 -0
- package/dist/llms/hf.js +10 -1
- package/dist/llms/llama_cpp.cjs +25 -65
- package/dist/llms/llama_cpp.d.ts +7 -43
- package/dist/llms/llama_cpp.js +25 -65
- package/dist/load/import_constants.cjs +3 -0
- package/dist/load/import_constants.js +3 -0
- package/dist/prompts/chat.cjs +8 -0
- package/dist/prompts/chat.d.ts +5 -0
- package/dist/prompts/chat.js +8 -0
- package/dist/prompts/few_shot.cjs +162 -1
- package/dist/prompts/few_shot.d.ts +90 -2
- package/dist/prompts/few_shot.js +160 -0
- package/dist/prompts/index.cjs +2 -1
- package/dist/prompts/index.d.ts +1 -1
- package/dist/prompts/index.js +1 -1
- package/dist/retrievers/zep.cjs +26 -3
- package/dist/retrievers/zep.d.ts +11 -2
- package/dist/retrievers/zep.js +26 -3
- package/dist/util/bedrock.d.ts +2 -0
- package/dist/util/event-source-parse.cjs +20 -1
- package/dist/util/event-source-parse.d.ts +2 -0
- package/dist/util/event-source-parse.js +18 -0
- package/dist/util/iflytek_websocket_stream.cjs +81 -0
- package/dist/util/iflytek_websocket_stream.d.ts +27 -0
- package/dist/util/iflytek_websocket_stream.js +77 -0
- package/dist/util/llama_cpp.cjs +34 -0
- package/dist/util/llama_cpp.d.ts +46 -0
- package/dist/util/llama_cpp.js +28 -0
- package/dist/util/openai-format-fndef.cjs +81 -0
- package/dist/util/openai-format-fndef.d.ts +44 -0
- package/dist/util/openai-format-fndef.js +77 -0
- package/dist/util/openapi.d.ts +2 -2
- package/dist/vectorstores/pinecone.cjs +5 -5
- package/dist/vectorstores/pinecone.d.ts +2 -2
- package/dist/vectorstores/pinecone.js +5 -5
- package/embeddings/llama_cpp.cjs +1 -0
- package/embeddings/llama_cpp.d.ts +1 -0
- package/embeddings/llama_cpp.js +1 -0
- package/package.json +34 -5
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BaseChatIflytekXinghuo = void 0;
|
|
4
|
+
const index_js_1 = require("../../schema/index.cjs");
|
|
5
|
+
const env_js_1 = require("../../util/env.cjs");
|
|
6
|
+
const stream_js_1 = require("../../util/stream.cjs");
|
|
7
|
+
const base_js_1 = require("../base.cjs");
|
|
8
|
+
/**
|
|
9
|
+
* Function that extracts the custom role of a generic chat message.
|
|
10
|
+
* @param message Chat message from which to extract the custom role.
|
|
11
|
+
* @returns The custom role of the chat message.
|
|
12
|
+
*/
|
|
13
|
+
function extractGenericMessageCustomRole(message) {
|
|
14
|
+
if (message.role !== "assistant" && message.role !== "user") {
|
|
15
|
+
console.warn(`Unknown message role: ${message.role}`);
|
|
16
|
+
}
|
|
17
|
+
return message.role;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Function that converts a base message to a Xinghuo message role.
|
|
21
|
+
* @param message Base message to convert.
|
|
22
|
+
* @returns The Xinghuo message role.
|
|
23
|
+
*/
|
|
24
|
+
function messageToXinghuoRole(message) {
|
|
25
|
+
const type = message._getType();
|
|
26
|
+
switch (type) {
|
|
27
|
+
case "ai":
|
|
28
|
+
return "assistant";
|
|
29
|
+
case "human":
|
|
30
|
+
return "user";
|
|
31
|
+
case "system":
|
|
32
|
+
throw new Error("System messages should not be here");
|
|
33
|
+
case "function":
|
|
34
|
+
throw new Error("Function messages not supported");
|
|
35
|
+
case "generic": {
|
|
36
|
+
if (!index_js_1.ChatMessage.isInstance(message))
|
|
37
|
+
throw new Error("Invalid generic chat message");
|
|
38
|
+
return extractGenericMessageCustomRole(message);
|
|
39
|
+
}
|
|
40
|
+
default:
|
|
41
|
+
throw new Error(`Unknown message type: ${type}`);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Wrapper around IflytekXingHuo large language models that use the Chat endpoint.
|
|
46
|
+
*
|
|
47
|
+
* To use you should have the `IFLYTEK_API_KEY` and `IFLYTEK_API_SECRET` and `IFLYTEK_APPID`
|
|
48
|
+
* environment variable set.
|
|
49
|
+
*
|
|
50
|
+
* @augments BaseChatModel
|
|
51
|
+
* @augments IflytekXinghuoChatInput
|
|
52
|
+
*/
|
|
53
|
+
class BaseChatIflytekXinghuo extends base_js_1.BaseChatModel {
|
|
54
|
+
static lc_name() {
|
|
55
|
+
return "ChatIflytekXinghuo";
|
|
56
|
+
}
|
|
57
|
+
get callKeys() {
|
|
58
|
+
return ["stop", "signal", "options"];
|
|
59
|
+
}
|
|
60
|
+
get lc_secrets() {
|
|
61
|
+
return {
|
|
62
|
+
iflytekApiKey: "IFLYTEK_API_KEY",
|
|
63
|
+
iflytekApiSecret: "IFLYTEK_API_SECRET",
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
get lc_aliases() {
|
|
67
|
+
return undefined;
|
|
68
|
+
}
|
|
69
|
+
constructor(fields) {
|
|
70
|
+
super(fields ?? {});
|
|
71
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
72
|
+
enumerable: true,
|
|
73
|
+
configurable: true,
|
|
74
|
+
writable: true,
|
|
75
|
+
value: true
|
|
76
|
+
});
|
|
77
|
+
Object.defineProperty(this, "version", {
|
|
78
|
+
enumerable: true,
|
|
79
|
+
configurable: true,
|
|
80
|
+
writable: true,
|
|
81
|
+
value: "v2.1"
|
|
82
|
+
});
|
|
83
|
+
Object.defineProperty(this, "iflytekAppid", {
|
|
84
|
+
enumerable: true,
|
|
85
|
+
configurable: true,
|
|
86
|
+
writable: true,
|
|
87
|
+
value: void 0
|
|
88
|
+
});
|
|
89
|
+
Object.defineProperty(this, "iflytekApiKey", {
|
|
90
|
+
enumerable: true,
|
|
91
|
+
configurable: true,
|
|
92
|
+
writable: true,
|
|
93
|
+
value: void 0
|
|
94
|
+
});
|
|
95
|
+
Object.defineProperty(this, "iflytekApiSecret", {
|
|
96
|
+
enumerable: true,
|
|
97
|
+
configurable: true,
|
|
98
|
+
writable: true,
|
|
99
|
+
value: void 0
|
|
100
|
+
});
|
|
101
|
+
Object.defineProperty(this, "userId", {
|
|
102
|
+
enumerable: true,
|
|
103
|
+
configurable: true,
|
|
104
|
+
writable: true,
|
|
105
|
+
value: void 0
|
|
106
|
+
});
|
|
107
|
+
Object.defineProperty(this, "apiUrl", {
|
|
108
|
+
enumerable: true,
|
|
109
|
+
configurable: true,
|
|
110
|
+
writable: true,
|
|
111
|
+
value: void 0
|
|
112
|
+
});
|
|
113
|
+
Object.defineProperty(this, "domain", {
|
|
114
|
+
enumerable: true,
|
|
115
|
+
configurable: true,
|
|
116
|
+
writable: true,
|
|
117
|
+
value: void 0
|
|
118
|
+
});
|
|
119
|
+
Object.defineProperty(this, "temperature", {
|
|
120
|
+
enumerable: true,
|
|
121
|
+
configurable: true,
|
|
122
|
+
writable: true,
|
|
123
|
+
value: 0.5
|
|
124
|
+
});
|
|
125
|
+
Object.defineProperty(this, "max_tokens", {
|
|
126
|
+
enumerable: true,
|
|
127
|
+
configurable: true,
|
|
128
|
+
writable: true,
|
|
129
|
+
value: 2048
|
|
130
|
+
});
|
|
131
|
+
Object.defineProperty(this, "top_k", {
|
|
132
|
+
enumerable: true,
|
|
133
|
+
configurable: true,
|
|
134
|
+
writable: true,
|
|
135
|
+
value: 4
|
|
136
|
+
});
|
|
137
|
+
Object.defineProperty(this, "streaming", {
|
|
138
|
+
enumerable: true,
|
|
139
|
+
configurable: true,
|
|
140
|
+
writable: true,
|
|
141
|
+
value: false
|
|
142
|
+
});
|
|
143
|
+
const iflytekAppid = fields?.iflytekAppid ?? (0, env_js_1.getEnvironmentVariable)("IFLYTEK_APPID");
|
|
144
|
+
if (!iflytekAppid) {
|
|
145
|
+
throw new Error("Iflytek APPID not found");
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
this.iflytekAppid = iflytekAppid;
|
|
149
|
+
}
|
|
150
|
+
const iflytekApiKey = fields?.iflytekApiKey ?? (0, env_js_1.getEnvironmentVariable)("IFLYTEK_API_KEY");
|
|
151
|
+
if (!iflytekApiKey) {
|
|
152
|
+
throw new Error("Iflytek API key not found");
|
|
153
|
+
}
|
|
154
|
+
else {
|
|
155
|
+
this.iflytekApiKey = iflytekApiKey;
|
|
156
|
+
}
|
|
157
|
+
const iflytekApiSecret = fields?.iflytekApiSecret ?? (0, env_js_1.getEnvironmentVariable)("IFLYTEK_API_SECRET");
|
|
158
|
+
if (!iflytekApiSecret) {
|
|
159
|
+
throw new Error("Iflytek API secret not found");
|
|
160
|
+
}
|
|
161
|
+
else {
|
|
162
|
+
this.iflytekApiSecret = iflytekApiSecret;
|
|
163
|
+
}
|
|
164
|
+
this.userId = fields?.userId ?? this.userId;
|
|
165
|
+
this.streaming = fields?.streaming ?? this.streaming;
|
|
166
|
+
this.temperature = fields?.temperature ?? this.temperature;
|
|
167
|
+
this.max_tokens = fields?.max_tokens ?? this.max_tokens;
|
|
168
|
+
this.top_k = fields?.top_k ?? this.top_k;
|
|
169
|
+
this.version = fields?.version ?? this.version;
|
|
170
|
+
if (["v1.1", "v2.1", "v3.1"].includes(this.version)) {
|
|
171
|
+
switch (this.version) {
|
|
172
|
+
case "v1.1":
|
|
173
|
+
this.domain = "general";
|
|
174
|
+
break;
|
|
175
|
+
case "v2.1":
|
|
176
|
+
this.domain = "generalv2";
|
|
177
|
+
break;
|
|
178
|
+
case "v3.1":
|
|
179
|
+
this.domain = "generalv3";
|
|
180
|
+
break;
|
|
181
|
+
default:
|
|
182
|
+
this.domain = "generalv2";
|
|
183
|
+
}
|
|
184
|
+
this.apiUrl = `wss://spark-api.xf-yun.com/${this.version}/chat`;
|
|
185
|
+
}
|
|
186
|
+
else {
|
|
187
|
+
throw new Error(`Invalid model version: ${this.version}`);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Get the identifying parameters for the model
|
|
192
|
+
*/
|
|
193
|
+
identifyingParams() {
|
|
194
|
+
return {
|
|
195
|
+
version: this.version,
|
|
196
|
+
...this.invocationParams(),
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
/**
|
|
200
|
+
* Get the parameters used to invoke the model
|
|
201
|
+
*/
|
|
202
|
+
invocationParams() {
|
|
203
|
+
return {
|
|
204
|
+
streaming: this.streaming,
|
|
205
|
+
temperature: this.temperature,
|
|
206
|
+
top_k: this.top_k,
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
async completion(request, stream, signal) {
|
|
210
|
+
const webSocketStream = await this.openWebSocketStream({
|
|
211
|
+
signal,
|
|
212
|
+
});
|
|
213
|
+
const connection = await webSocketStream.connection;
|
|
214
|
+
const header = {
|
|
215
|
+
app_id: this.iflytekAppid,
|
|
216
|
+
uid: this.userId,
|
|
217
|
+
};
|
|
218
|
+
const parameter = {
|
|
219
|
+
chat: {
|
|
220
|
+
domain: this.domain,
|
|
221
|
+
temperature: request.temperature ?? this.temperature,
|
|
222
|
+
max_tokens: request.max_tokens ?? this.max_tokens,
|
|
223
|
+
top_k: request.top_k ?? this.top_k,
|
|
224
|
+
},
|
|
225
|
+
};
|
|
226
|
+
const payload = {
|
|
227
|
+
message: {
|
|
228
|
+
text: request.messages,
|
|
229
|
+
},
|
|
230
|
+
};
|
|
231
|
+
const message = JSON.stringify({
|
|
232
|
+
header,
|
|
233
|
+
parameter,
|
|
234
|
+
payload,
|
|
235
|
+
});
|
|
236
|
+
const { writable, readable } = connection;
|
|
237
|
+
const writer = writable.getWriter();
|
|
238
|
+
await writer.write(message);
|
|
239
|
+
const streams = stream_js_1.IterableReadableStream.fromReadableStream(readable);
|
|
240
|
+
if (stream) {
|
|
241
|
+
return streams;
|
|
242
|
+
}
|
|
243
|
+
else {
|
|
244
|
+
let response = { result: "" };
|
|
245
|
+
for await (const chunk of streams) {
|
|
246
|
+
const data = JSON.parse(chunk);
|
|
247
|
+
const { header, payload } = data;
|
|
248
|
+
if (header.code === 0) {
|
|
249
|
+
if (header.status === 0) {
|
|
250
|
+
response.result = payload.choices?.text[0]?.content ?? "";
|
|
251
|
+
}
|
|
252
|
+
else if (header.status === 1) {
|
|
253
|
+
response.result += payload.choices?.text[0]?.content ?? "";
|
|
254
|
+
}
|
|
255
|
+
else if (header.status === 2) {
|
|
256
|
+
response = { ...response, usage: payload.usage?.text };
|
|
257
|
+
break;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
else {
|
|
261
|
+
break;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
void streams.cancel();
|
|
265
|
+
void webSocketStream.close();
|
|
266
|
+
return response;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
async _generate(messages, options, runManager) {
|
|
270
|
+
const tokenUsage = {};
|
|
271
|
+
const params = this.invocationParams();
|
|
272
|
+
const messagesMapped = messages.map((message) => ({
|
|
273
|
+
role: messageToXinghuoRole(message),
|
|
274
|
+
content: message.content,
|
|
275
|
+
}));
|
|
276
|
+
const data = params.streaming
|
|
277
|
+
? await (async () => {
|
|
278
|
+
const streams = await this.completion({ messages: messagesMapped, ...params }, true, options.signal);
|
|
279
|
+
let response = { result: "" };
|
|
280
|
+
for await (const chunk of streams) {
|
|
281
|
+
const data = JSON.parse(chunk);
|
|
282
|
+
const { header, payload } = data;
|
|
283
|
+
if (header.code === 0) {
|
|
284
|
+
if (header.status === 0) {
|
|
285
|
+
response.result = payload.choices?.text[0]?.content ?? "";
|
|
286
|
+
}
|
|
287
|
+
else if (header.status === 1) {
|
|
288
|
+
response.result += payload.choices?.text[0]?.content ?? "";
|
|
289
|
+
}
|
|
290
|
+
else if (header.status === 2) {
|
|
291
|
+
response = { ...response, usage: payload.usage?.text };
|
|
292
|
+
break;
|
|
293
|
+
}
|
|
294
|
+
void runManager?.handleLLMNewToken(payload.choices?.text[0]?.content);
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
break;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
void streams.cancel();
|
|
301
|
+
return response;
|
|
302
|
+
})()
|
|
303
|
+
: await this.completion({ messages: messagesMapped, ...params }, false, options.signal);
|
|
304
|
+
const { completion_tokens: completionTokens, prompt_tokens: promptTokens, total_tokens: totalTokens, } = data.usage ?? {};
|
|
305
|
+
if (completionTokens) {
|
|
306
|
+
tokenUsage.completionTokens =
|
|
307
|
+
(tokenUsage.completionTokens ?? 0) + completionTokens;
|
|
308
|
+
}
|
|
309
|
+
if (promptTokens) {
|
|
310
|
+
tokenUsage.promptTokens = (tokenUsage.promptTokens ?? 0) + promptTokens;
|
|
311
|
+
}
|
|
312
|
+
if (totalTokens) {
|
|
313
|
+
tokenUsage.totalTokens = (tokenUsage.totalTokens ?? 0) + totalTokens;
|
|
314
|
+
}
|
|
315
|
+
const generations = [];
|
|
316
|
+
const text = data.result ?? "";
|
|
317
|
+
generations.push({
|
|
318
|
+
text,
|
|
319
|
+
message: new index_js_1.AIMessage(text),
|
|
320
|
+
});
|
|
321
|
+
return {
|
|
322
|
+
generations,
|
|
323
|
+
llmOutput: { tokenUsage },
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
/** @ignore */
|
|
327
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
328
|
+
_combineLLMOutput() {
|
|
329
|
+
return [];
|
|
330
|
+
}
|
|
331
|
+
_llmType() {
|
|
332
|
+
return "iflytek_xinghuo";
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
exports.BaseChatIflytekXinghuo = BaseChatIflytekXinghuo;
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { CallbackManagerForLLMRun } from "../../callbacks/manager.js";
|
|
2
|
+
import { BaseMessage, ChatResult } from "../../schema/index.js";
|
|
3
|
+
import { IterableReadableStream } from "../../util/stream.js";
|
|
4
|
+
import { BaseChatModel, BaseChatModelParams } from "../base.js";
|
|
5
|
+
import { BaseWebSocketStream, WebSocketStreamOptions } from "../../util/iflytek_websocket_stream.js";
|
|
6
|
+
/**
|
|
7
|
+
* Type representing the role of a message in the Xinghuo chat model.
|
|
8
|
+
*/
|
|
9
|
+
export type XinghuoMessageRole = "assistant" | "user";
|
|
10
|
+
/**
|
|
11
|
+
* Interface representing a message in the Xinghuo chat model.
|
|
12
|
+
*/
|
|
13
|
+
interface XinghuoMessage {
|
|
14
|
+
role: XinghuoMessageRole;
|
|
15
|
+
content: string;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Interface representing a request for a chat completion.
|
|
19
|
+
*/
|
|
20
|
+
interface ChatCompletionRequest {
|
|
21
|
+
messages: XinghuoMessage[];
|
|
22
|
+
temperature?: number;
|
|
23
|
+
max_tokens?: number;
|
|
24
|
+
top_k?: number;
|
|
25
|
+
chat_id?: string;
|
|
26
|
+
}
|
|
27
|
+
export interface ChatCompletionChunk {
|
|
28
|
+
header: {
|
|
29
|
+
code: number;
|
|
30
|
+
message: string;
|
|
31
|
+
sid: string;
|
|
32
|
+
status: number;
|
|
33
|
+
};
|
|
34
|
+
payload: {
|
|
35
|
+
choices: {
|
|
36
|
+
status: number;
|
|
37
|
+
seq: number;
|
|
38
|
+
text: {
|
|
39
|
+
content: string;
|
|
40
|
+
role: XinghuoMessageRole;
|
|
41
|
+
index: number;
|
|
42
|
+
}[];
|
|
43
|
+
};
|
|
44
|
+
usage?: {
|
|
45
|
+
text: {
|
|
46
|
+
question_tokens: number;
|
|
47
|
+
prompt_tokens: number;
|
|
48
|
+
completion_tokens: number;
|
|
49
|
+
total_tokens: number;
|
|
50
|
+
};
|
|
51
|
+
};
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Interface representing a response from a chat completion.
|
|
56
|
+
*/
|
|
57
|
+
interface ChatCompletionResponse {
|
|
58
|
+
result: string;
|
|
59
|
+
usage?: {
|
|
60
|
+
completion_tokens: number;
|
|
61
|
+
prompt_tokens: number;
|
|
62
|
+
total_tokens: number;
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
declare interface IflytekXinghuoChatInput {
|
|
66
|
+
/** Model version to use. Available options are: v1.1, v2.1, v3.1
|
|
67
|
+
* @default "v2.1"
|
|
68
|
+
*/
|
|
69
|
+
version: string;
|
|
70
|
+
/**
|
|
71
|
+
* ID of the end-user who made requests.
|
|
72
|
+
*/
|
|
73
|
+
userId?: string;
|
|
74
|
+
/**
|
|
75
|
+
* APPID to use when making requests. Defaults to the value of
|
|
76
|
+
* `IFLYTEK_APPID` environment variable.
|
|
77
|
+
*/
|
|
78
|
+
iflytekAppid?: string;
|
|
79
|
+
/**
|
|
80
|
+
* API key to use when making requests. Defaults to the value of
|
|
81
|
+
* `IFLYTEK_API_KEY` environment variable.
|
|
82
|
+
*/
|
|
83
|
+
iflytekApiKey?: string;
|
|
84
|
+
/**
|
|
85
|
+
* API Secret to use when making requests. Defaults to the value of
|
|
86
|
+
* `IFLYTEK_API_SECRET` environment variable.
|
|
87
|
+
*/
|
|
88
|
+
iflytekApiSecret?: string;
|
|
89
|
+
/** Amount of randomness injected into the response. Ranges
|
|
90
|
+
* from 0 to 1 (0 is not included). Use temp closer to 0 for analytical /
|
|
91
|
+
* multiple choice, and temp closer to 1 for creative
|
|
92
|
+
* and generative tasks. Defaults to 0.5.
|
|
93
|
+
*/
|
|
94
|
+
temperature?: number;
|
|
95
|
+
max_tokens?: number;
|
|
96
|
+
top_k?: number;
|
|
97
|
+
streaming?: boolean;
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Wrapper around IflytekXingHuo large language models that use the Chat endpoint.
|
|
101
|
+
*
|
|
102
|
+
* To use you should have the `IFLYTEK_API_KEY` and `IFLYTEK_API_SECRET` and `IFLYTEK_APPID`
|
|
103
|
+
* environment variable set.
|
|
104
|
+
*
|
|
105
|
+
* @augments BaseChatModel
|
|
106
|
+
* @augments IflytekXinghuoChatInput
|
|
107
|
+
*/
|
|
108
|
+
export declare abstract class BaseChatIflytekXinghuo extends BaseChatModel implements IflytekXinghuoChatInput {
|
|
109
|
+
static lc_name(): string;
|
|
110
|
+
get callKeys(): string[];
|
|
111
|
+
get lc_secrets(): {
|
|
112
|
+
[key: string]: string;
|
|
113
|
+
} | undefined;
|
|
114
|
+
get lc_aliases(): {
|
|
115
|
+
[key: string]: string;
|
|
116
|
+
} | undefined;
|
|
117
|
+
lc_serializable: boolean;
|
|
118
|
+
version: string;
|
|
119
|
+
iflytekAppid: string;
|
|
120
|
+
iflytekApiKey: string;
|
|
121
|
+
iflytekApiSecret: string;
|
|
122
|
+
userId?: string;
|
|
123
|
+
apiUrl: string;
|
|
124
|
+
domain: string;
|
|
125
|
+
temperature: number;
|
|
126
|
+
max_tokens: number;
|
|
127
|
+
top_k: number;
|
|
128
|
+
streaming: boolean;
|
|
129
|
+
constructor(fields?: Partial<IflytekXinghuoChatInput> & BaseChatModelParams);
|
|
130
|
+
/**
|
|
131
|
+
* Get the identifying parameters for the model
|
|
132
|
+
*/
|
|
133
|
+
identifyingParams(): {
|
|
134
|
+
max_tokens?: number | undefined;
|
|
135
|
+
temperature?: number | undefined;
|
|
136
|
+
top_k?: number | undefined;
|
|
137
|
+
chat_id?: string | undefined;
|
|
138
|
+
streaming: boolean;
|
|
139
|
+
version: string;
|
|
140
|
+
};
|
|
141
|
+
/**
|
|
142
|
+
* Get the parameters used to invoke the model
|
|
143
|
+
*/
|
|
144
|
+
invocationParams(): Omit<ChatCompletionRequest, "messages"> & {
|
|
145
|
+
streaming: boolean;
|
|
146
|
+
};
|
|
147
|
+
/**
|
|
148
|
+
* Method that retrieves the auth websocketStream for making requests to the Iflytek Xinghuo API.
|
|
149
|
+
* @returns The auth websocketStream for making requests to the Iflytek Xinghuo API.
|
|
150
|
+
*/
|
|
151
|
+
abstract openWebSocketStream<T extends BaseWebSocketStream<string>>(options: WebSocketStreamOptions): Promise<T>;
|
|
152
|
+
/**
|
|
153
|
+
* Calls the Xinghuo API completion.
|
|
154
|
+
* @param request The request to send to the Xinghuo API.
|
|
155
|
+
* @param signal The signal for the API call.
|
|
156
|
+
* @returns The response from the Xinghuo API.
|
|
157
|
+
*/
|
|
158
|
+
completion(request: ChatCompletionRequest, stream: true, signal?: AbortSignal): Promise<IterableReadableStream<string>>;
|
|
159
|
+
completion(request: ChatCompletionRequest, stream: false, signal?: AbortSignal): Promise<ChatCompletionResponse>;
|
|
160
|
+
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;
|
|
161
|
+
/** @ignore */
|
|
162
|
+
_combineLLMOutput(): Record<string, any> | undefined;
|
|
163
|
+
_llmType(): string;
|
|
164
|
+
}
|
|
165
|
+
export {};
|