langchain 0.0.21 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chat_models.d.ts +1 -0
- package/chat_models.js +1 -0
- package/dist/cache.d.ts +1 -1
- package/dist/chains/chat_vector_db_chain.d.ts +4 -2
- package/dist/chains/chat_vector_db_chain.js +15 -1
- package/dist/chains/chat_vector_db_chain.js.map +1 -1
- package/dist/chains/llm_chain.d.ts +5 -4
- package/dist/chains/llm_chain.js +4 -5
- package/dist/chains/llm_chain.js.map +1 -1
- package/dist/chains/tests/chat_vector_db_qa_chain.int.test.js +18 -0
- package/dist/chains/tests/chat_vector_db_qa_chain.int.test.js.map +1 -1
- package/dist/chains/tests/combine_docs_chain.test.js.map +1 -1
- package/dist/chains/tests/llm_chain.int.test.js +14 -1
- package/dist/chains/tests/llm_chain.int.test.js.map +1 -1
- package/dist/chains/vector_db_qa.d.ts +2 -0
- package/dist/chains/vector_db_qa.js +14 -0
- package/dist/chains/vector_db_qa.js.map +1 -1
- package/dist/chat_models/base.d.ts +32 -0
- package/dist/chat_models/base.js +110 -0
- package/dist/chat_models/base.js.map +1 -0
- package/dist/chat_models/index.d.ts +1 -0
- package/dist/chat_models/index.js +2 -0
- package/dist/chat_models/index.js.map +1 -0
- package/dist/chat_models/openai.d.ts +150 -0
- package/dist/chat_models/openai.js +295 -0
- package/dist/chat_models/openai.js.map +1 -0
- package/dist/chat_models/tests/chatopenai.int.test.d.ts +1 -0
- package/dist/chat_models/tests/chatopenai.int.test.js +122 -0
- package/dist/chat_models/tests/chatopenai.int.test.js.map +1 -0
- package/dist/llms/base.d.ts +6 -2
- package/dist/llms/base.js +28 -5
- package/dist/llms/base.js.map +1 -1
- package/dist/llms/cohere.d.ts +2 -1
- package/dist/llms/cohere.js.map +1 -1
- package/dist/llms/hf.d.ts +20 -3
- package/dist/llms/hf.js +48 -4
- package/dist/llms/hf.js.map +1 -1
- package/dist/llms/index.d.ts +0 -35
- package/dist/llms/openai-chat.d.ts +3 -3
- package/dist/llms/openai.d.ts +3 -3
- package/dist/llms/openai.js.map +1 -1
- package/dist/llms/tests/huggingface_hub.int.test.js +1 -1
- package/dist/llms/tests/huggingface_hub.int.test.js.map +1 -1
- package/dist/llms/tests/openai.int.test.js +15 -0
- package/dist/llms/tests/openai.int.test.js.map +1 -1
- package/dist/prompts/base.d.ts +20 -3
- package/dist/prompts/base.js +24 -0
- package/dist/prompts/base.js.map +1 -1
- package/dist/prompts/chat.d.ts +74 -0
- package/dist/prompts/chat.js +179 -0
- package/dist/prompts/chat.js.map +1 -0
- package/dist/prompts/few_shot.d.ts +2 -2
- package/dist/prompts/few_shot.js +3 -3
- package/dist/prompts/few_shot.js.map +1 -1
- package/dist/prompts/index.d.ts +2 -1
- package/dist/prompts/index.js +2 -1
- package/dist/prompts/index.js.map +1 -1
- package/dist/prompts/prompt.d.ts +3 -3
- package/dist/prompts/prompt.js +4 -4
- package/dist/prompts/prompt.js.map +1 -1
- package/dist/prompts/template.d.ts +3 -0
- package/dist/prompts/template.js +5 -0
- package/dist/prompts/template.js.map +1 -1
- package/dist/prompts/tests/chat.test.d.ts +1 -0
- package/dist/prompts/tests/chat.test.js +101 -0
- package/dist/prompts/tests/chat.test.js.map +1 -0
- package/dist/prompts/tests/few_shot.test.js +19 -0
- package/dist/prompts/tests/few_shot.test.js.map +1 -1
- package/dist/schema/index.d.ts +79 -0
- package/dist/schema/index.js +53 -0
- package/dist/schema/index.js.map +1 -0
- package/dist/vectorstores/chroma.d.ts +3 -0
- package/dist/vectorstores/chroma.js +10 -14
- package/dist/vectorstores/chroma.js.map +1 -1
- package/dist/vectorstores/tests/chroma.test.d.ts +1 -0
- package/dist/vectorstores/tests/chroma.test.js +9 -0
- package/dist/vectorstores/tests/chroma.test.js.map +1 -0
- package/package.json +22 -8
- package/schema.d.ts +1 -0
- package/schema.js +1 -0
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
import { Configuration, OpenAIApi, } from "openai";
|
|
2
|
+
import { createParser } from "eventsource-parser";
|
|
3
|
+
import { backOff } from "exponential-backoff";
|
|
4
|
+
import fetchAdapter from "../util/axios-fetch-adapter.js";
|
|
5
|
+
import { BaseChatModel } from "./base.js";
|
|
6
|
+
import { AIChatMessage, ChatMessage, HumanChatMessage, SystemChatMessage, } from "../schema/index.js";
|
|
7
|
+
function messageTypeToOpenAIRole(type) {
|
|
8
|
+
switch (type) {
|
|
9
|
+
case "system":
|
|
10
|
+
return "system";
|
|
11
|
+
case "ai":
|
|
12
|
+
return "assistant";
|
|
13
|
+
case "human":
|
|
14
|
+
return "user";
|
|
15
|
+
default:
|
|
16
|
+
throw new Error(`Unknown message type: ${type}`);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
function openAIResponseToChatMessage(role, text) {
|
|
20
|
+
switch (role) {
|
|
21
|
+
case "user":
|
|
22
|
+
return new HumanChatMessage(text);
|
|
23
|
+
case "assistant":
|
|
24
|
+
return new AIChatMessage(text);
|
|
25
|
+
case "system":
|
|
26
|
+
return new SystemChatMessage(text);
|
|
27
|
+
default:
|
|
28
|
+
return new ChatMessage(text, role ?? "unknown");
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Wrapper around OpenAI large language models that use the Chat endpoint.
|
|
33
|
+
*
|
|
34
|
+
* To use you should have the `openai` package installed, with the
|
|
35
|
+
* `OPENAI_API_KEY` environment variable set.
|
|
36
|
+
*
|
|
37
|
+
* @remarks
|
|
38
|
+
* Any parameters that are valid to be passed to {@link
|
|
39
|
+
* https://platform.openai.com/docs/api-reference/chat/create |
|
|
40
|
+
* `openai.createCompletion`} can be passed through {@link modelKwargs}, even
|
|
41
|
+
* if not explicitly available on this class.
|
|
42
|
+
*
|
|
43
|
+
* @augments BaseLLM
|
|
44
|
+
* @augments OpenAIInput
|
|
45
|
+
*/
|
|
46
|
+
export class ChatOpenAI extends BaseChatModel {
|
|
47
|
+
constructor(fields, configuration) {
|
|
48
|
+
super(fields?.callbackManager, fields?.verbose);
|
|
49
|
+
Object.defineProperty(this, "temperature", {
|
|
50
|
+
enumerable: true,
|
|
51
|
+
configurable: true,
|
|
52
|
+
writable: true,
|
|
53
|
+
value: 1
|
|
54
|
+
});
|
|
55
|
+
Object.defineProperty(this, "topP", {
|
|
56
|
+
enumerable: true,
|
|
57
|
+
configurable: true,
|
|
58
|
+
writable: true,
|
|
59
|
+
value: 1
|
|
60
|
+
});
|
|
61
|
+
Object.defineProperty(this, "frequencyPenalty", {
|
|
62
|
+
enumerable: true,
|
|
63
|
+
configurable: true,
|
|
64
|
+
writable: true,
|
|
65
|
+
value: 0
|
|
66
|
+
});
|
|
67
|
+
Object.defineProperty(this, "presencePenalty", {
|
|
68
|
+
enumerable: true,
|
|
69
|
+
configurable: true,
|
|
70
|
+
writable: true,
|
|
71
|
+
value: 0
|
|
72
|
+
});
|
|
73
|
+
Object.defineProperty(this, "n", {
|
|
74
|
+
enumerable: true,
|
|
75
|
+
configurable: true,
|
|
76
|
+
writable: true,
|
|
77
|
+
value: 1
|
|
78
|
+
});
|
|
79
|
+
Object.defineProperty(this, "logitBias", {
|
|
80
|
+
enumerable: true,
|
|
81
|
+
configurable: true,
|
|
82
|
+
writable: true,
|
|
83
|
+
value: void 0
|
|
84
|
+
});
|
|
85
|
+
Object.defineProperty(this, "modelName", {
|
|
86
|
+
enumerable: true,
|
|
87
|
+
configurable: true,
|
|
88
|
+
writable: true,
|
|
89
|
+
value: "gpt-3.5-turbo"
|
|
90
|
+
});
|
|
91
|
+
Object.defineProperty(this, "modelKwargs", {
|
|
92
|
+
enumerable: true,
|
|
93
|
+
configurable: true,
|
|
94
|
+
writable: true,
|
|
95
|
+
value: void 0
|
|
96
|
+
});
|
|
97
|
+
Object.defineProperty(this, "maxRetries", {
|
|
98
|
+
enumerable: true,
|
|
99
|
+
configurable: true,
|
|
100
|
+
writable: true,
|
|
101
|
+
value: 6
|
|
102
|
+
});
|
|
103
|
+
Object.defineProperty(this, "stop", {
|
|
104
|
+
enumerable: true,
|
|
105
|
+
configurable: true,
|
|
106
|
+
writable: true,
|
|
107
|
+
value: void 0
|
|
108
|
+
});
|
|
109
|
+
Object.defineProperty(this, "streaming", {
|
|
110
|
+
enumerable: true,
|
|
111
|
+
configurable: true,
|
|
112
|
+
writable: true,
|
|
113
|
+
value: false
|
|
114
|
+
});
|
|
115
|
+
Object.defineProperty(this, "maxTokens", {
|
|
116
|
+
enumerable: true,
|
|
117
|
+
configurable: true,
|
|
118
|
+
writable: true,
|
|
119
|
+
value: 256
|
|
120
|
+
});
|
|
121
|
+
// Used for non-streaming requests
|
|
122
|
+
Object.defineProperty(this, "batchClient", {
|
|
123
|
+
enumerable: true,
|
|
124
|
+
configurable: true,
|
|
125
|
+
writable: true,
|
|
126
|
+
value: void 0
|
|
127
|
+
});
|
|
128
|
+
// Used for streaming requests
|
|
129
|
+
Object.defineProperty(this, "streamingClient", {
|
|
130
|
+
enumerable: true,
|
|
131
|
+
configurable: true,
|
|
132
|
+
writable: true,
|
|
133
|
+
value: void 0
|
|
134
|
+
});
|
|
135
|
+
Object.defineProperty(this, "clientConfig", {
|
|
136
|
+
enumerable: true,
|
|
137
|
+
configurable: true,
|
|
138
|
+
writable: true,
|
|
139
|
+
value: void 0
|
|
140
|
+
});
|
|
141
|
+
const apiKey = fields?.openAIApiKey ?? process.env.OPENAI_API_KEY;
|
|
142
|
+
if (!apiKey) {
|
|
143
|
+
throw new Error("OpenAI API key not found");
|
|
144
|
+
}
|
|
145
|
+
this.modelName = fields?.modelName ?? this.modelName;
|
|
146
|
+
this.modelKwargs = fields?.modelKwargs ?? {};
|
|
147
|
+
this.maxRetries = fields?.maxRetries ?? this.maxRetries;
|
|
148
|
+
this.temperature = fields?.temperature ?? this.temperature;
|
|
149
|
+
this.topP = fields?.topP ?? this.topP;
|
|
150
|
+
this.frequencyPenalty = fields?.frequencyPenalty ?? this.frequencyPenalty;
|
|
151
|
+
this.presencePenalty = fields?.presencePenalty ?? this.presencePenalty;
|
|
152
|
+
this.n = fields?.n ?? this.n;
|
|
153
|
+
this.logitBias = fields?.logitBias;
|
|
154
|
+
this.stop = fields?.stop;
|
|
155
|
+
this.streaming = fields?.streaming ?? false;
|
|
156
|
+
if (this.streaming && this.n > 1) {
|
|
157
|
+
throw new Error("Cannot stream results when n > 1");
|
|
158
|
+
}
|
|
159
|
+
this.clientConfig = {
|
|
160
|
+
apiKey: fields?.openAIApiKey ?? process.env.OPENAI_API_KEY,
|
|
161
|
+
...configuration,
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* Get the parameters used to invoke the model
|
|
166
|
+
*/
|
|
167
|
+
invocationParams() {
|
|
168
|
+
return {
|
|
169
|
+
model: this.modelName,
|
|
170
|
+
temperature: this.temperature,
|
|
171
|
+
top_p: this.topP,
|
|
172
|
+
frequency_penalty: this.frequencyPenalty,
|
|
173
|
+
presence_penalty: this.presencePenalty,
|
|
174
|
+
n: this.n,
|
|
175
|
+
logit_bias: this.logitBias,
|
|
176
|
+
stop: this.stop,
|
|
177
|
+
stream: this.streaming,
|
|
178
|
+
...this.modelKwargs,
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
_identifyingParams() {
|
|
182
|
+
return {
|
|
183
|
+
model_name: this.modelName,
|
|
184
|
+
...this.invocationParams(),
|
|
185
|
+
...this.clientConfig,
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Get the identifying parameters for the model
|
|
190
|
+
*/
|
|
191
|
+
identifyingParams() {
|
|
192
|
+
return this._identifyingParams();
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Call out to OpenAI's endpoint with k unique prompts
|
|
196
|
+
*
|
|
197
|
+
* @param messages - The messages to pass into the model.
|
|
198
|
+
* @param [stop] - Optional list of stop words to use when generating.
|
|
199
|
+
*
|
|
200
|
+
* @returns The full LLM output.
|
|
201
|
+
*
|
|
202
|
+
* @example
|
|
203
|
+
* ```ts
|
|
204
|
+
* import { OpenAI } from "langchain/llms";
|
|
205
|
+
* const openai = new OpenAI();
|
|
206
|
+
* const response = await openai.generate(["Tell me a joke."]);
|
|
207
|
+
* ```
|
|
208
|
+
*/
|
|
209
|
+
async _generate(messages, stop) {
|
|
210
|
+
if (this.stop && stop) {
|
|
211
|
+
throw new Error("Stop found in input and default params");
|
|
212
|
+
}
|
|
213
|
+
const params = this.invocationParams();
|
|
214
|
+
params.stop = stop ?? params.stop;
|
|
215
|
+
const { data } = await this.completionWithRetry({
|
|
216
|
+
...params,
|
|
217
|
+
messages: messages.map((message) => ({
|
|
218
|
+
role: messageTypeToOpenAIRole(message._getType()),
|
|
219
|
+
content: message.text,
|
|
220
|
+
})),
|
|
221
|
+
});
|
|
222
|
+
if (params.stream) {
|
|
223
|
+
let role = "assistant";
|
|
224
|
+
const completion = await new Promise((resolve, reject) => {
|
|
225
|
+
let innerCompletion = "";
|
|
226
|
+
const parser = createParser((event) => {
|
|
227
|
+
if (event.type === "event") {
|
|
228
|
+
if (event.data === "[DONE]") {
|
|
229
|
+
resolve(innerCompletion);
|
|
230
|
+
}
|
|
231
|
+
else {
|
|
232
|
+
const response = JSON.parse(event.data);
|
|
233
|
+
const part = response.choices[0];
|
|
234
|
+
if (part != null) {
|
|
235
|
+
innerCompletion += part.delta?.content ?? "";
|
|
236
|
+
role = part.delta?.role ?? role;
|
|
237
|
+
this.callbackManager.handleNewToken?.(part.delta?.content ?? "", this.verbose);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
});
|
|
242
|
+
// workaround for incorrect axios types
|
|
243
|
+
const stream = data;
|
|
244
|
+
stream.on("data", (data) => parser.feed(data.toString("utf-8")));
|
|
245
|
+
stream.on("error", (error) => reject(error));
|
|
246
|
+
});
|
|
247
|
+
return {
|
|
248
|
+
generations: [
|
|
249
|
+
{
|
|
250
|
+
text: completion,
|
|
251
|
+
message: openAIResponseToChatMessage(role, completion),
|
|
252
|
+
},
|
|
253
|
+
],
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
const generations = [];
|
|
257
|
+
for (const part of data.choices) {
|
|
258
|
+
const role = part.message?.role ?? undefined;
|
|
259
|
+
const text = part.message?.content ?? "";
|
|
260
|
+
generations.push({
|
|
261
|
+
text,
|
|
262
|
+
message: openAIResponseToChatMessage(role, text),
|
|
263
|
+
});
|
|
264
|
+
}
|
|
265
|
+
return {
|
|
266
|
+
generations,
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
/** @ignore */
|
|
270
|
+
async completionWithRetry(request) {
|
|
271
|
+
if (!request.stream && !this.batchClient) {
|
|
272
|
+
const clientConfig = new Configuration({
|
|
273
|
+
...this.clientConfig,
|
|
274
|
+
baseOptions: { adapter: fetchAdapter },
|
|
275
|
+
});
|
|
276
|
+
this.batchClient = new OpenAIApi(clientConfig);
|
|
277
|
+
}
|
|
278
|
+
if (request.stream && !this.streamingClient) {
|
|
279
|
+
const clientConfig = new Configuration(this.clientConfig);
|
|
280
|
+
this.streamingClient = new OpenAIApi(clientConfig);
|
|
281
|
+
}
|
|
282
|
+
const client = !request.stream ? this.batchClient : this.streamingClient;
|
|
283
|
+
const makeCompletionRequest = async () => client.createChatCompletion(request, request.stream ? { responseType: "stream" } : undefined);
|
|
284
|
+
return backOff(makeCompletionRequest, {
|
|
285
|
+
startingDelay: 4,
|
|
286
|
+
maxDelay: 10,
|
|
287
|
+
numOfAttempts: this.maxRetries,
|
|
288
|
+
// TODO(sean) pass custom retry function to check error types.
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
_llmType() {
|
|
292
|
+
return "openai";
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
//# sourceMappingURL=openai.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.js","sourceRoot":"","sources":["../../src/chat_models/openai.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,aAAa,EACb,SAAS,GAIV,MAAM,QAAQ,CAAC;AAEhB,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAC9C,OAAO,YAAY,MAAM,gCAAgC,CAAC;AAC1D,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EACL,aAAa,EAGb,WAAW,EAEX,gBAAgB,EAGhB,iBAAiB,GAClB,MAAM,oBAAoB,CAAC;AAE5B,SAAS,uBAAuB,CAC9B,IAAiB;IAEjB,QAAQ,IAAI,EAAE;QACZ,KAAK,QAAQ;YACX,OAAO,QAAQ,CAAC;QAClB,KAAK,IAAI;YACP,OAAO,WAAW,CAAC;QACrB,KAAK,OAAO;YACV,OAAO,MAAM,CAAC;QAChB;YACE,MAAM,IAAI,KAAK,CAAC,yBAAyB,IAAI,EAAE,CAAC,CAAC;KACpD;AACH,CAAC;AAED,SAAS,2BAA2B,CAClC,IAAuD,EACvD,IAAY;IAEZ,QAAQ,IAAI,EAAE;QACZ,KAAK,MAAM;YACT,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,CAAC;QACpC,KAAK,WAAW;YACd,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC;QACjC,KAAK,QAAQ;YACX,OAAO,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC;QACrC;YACE,OAAO,IAAI,WAAW,CAAC,IAAI,EAAE,IAAI,IAAI,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAuDD;;;;;;;;;;;;;;GAcG;AACH,MAAM,OAAO,UAAW,SAAQ,aAAa;IAiC3C,YACE,MAMC,EACD,aAAuC;QAEvC,KAAK,CAAC,MAAM,EAAE,eAAe,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;QA1ClD;;;;mBAAc,CAAC;WAAC;QAEhB;;;;mBAAO,CAAC;WAAC;QAET;;;;mBAAmB,CAAC;WAAC;QAErB;;;;mBAAkB,CAAC;WAAC;QAEpB;;;;mBAAI,CAAC;WAAC;QAEN;;;;;WAAmC;QAEnC;;;;mBAAY,eAAe;WAAC;QAE5B;;;;;WAAqB;QAErB;;;;mBAAa,CAAC;WAAC;QAEf;;;;;WAAgB;QAEhB;;;;mBAAY,KAAK;WAAC;QAElB;;;;mBAAY,GAAG;WAAC;QAEhB,kCAAkC;QAClC;;;;;WAA+B;QAE/B,8BAA8B;QAC9B;;;;;WAAmC;QAEnC;;;;;WAA8C;QAc5C,MAAM,MAAM,GAAG,MAAM,EAAE,YAAY,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;QAClE,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,EAAE,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC;QACrD,IAAI,CAAC,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,EAAE,CAAC;QAC7C,IAAI,CAAC,UAAU,GAAG,MAAM,EAAE,UAAU,IAAI,IAAI,CAAC,UAAU,CAAC;QAExD,IAAI,CAAC,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,IAAI,CAAC,WAAW,CAAC;QAC3D,IAAI,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,CAAC,gBAAgB,GAAG,MAAM,EAAE,gBAAgB,IAAI,IAAI,CAAC,gBAAgB,CAAC;QAC1E,IAAI,CAAC,eAAe,GAAG,MAAM,EAAE,eAAe,IAAI,IAAI,CAAC,eAAe,CAAC;QACvE,IAAI,CAAC,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC;QAC7B,IAAI,CAAC,SAAS,GAAG,MAAM,EAAE,SAAS,CAAC;QACnC,IAAI,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,CAAC;QAEzB,IAAI,CAAC,SAAS,GAAG,MAAM,EAAE,SAAS,IAAI,KAAK,CAAC;QAE5C,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE;YAChC,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,CAAC,YAAY,GAAG;YAClB,MAAM,EAAE,MAAM,EAAE,YAAY,IAAI,OAAO,CAAC,GAAG,CAAC,cAAc;YAC1D,GAAG,aAAa;SACjB,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,gBAAgB;QACd,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,SAAS;YACrB,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,KAAK,EAAE,IAAI,CAAC,IAAI;YAChB,iBAAiB,EAAE,IAAI,CAAC,gBAAgB;YACxC,gBAAgB,EAAE,IAAI,CAAC,eAAe;YACtC,CAAC,EAAE,IAAI,CAAC,CAAC;YACT,UAAU,EAAE,IAAI,CAAC,SAAS;YAC1B,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,MAAM,EAAE,IAAI,CAAC,SAAS;YACtB,GAAG,IAAI,CAAC,WAAW;SACpB,CAAC;IACJ,CAAC;IAED,kBAAkB;QAChB,OAAO;YACL,UAAU,EAAE,IAAI,CAAC,SAAS;YAC1B,GAAG,IAAI,CAAC,gBAAgB,EAAE;YAC1B,GAAG,IAAI,CAAC,YAAY;SACrB,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,iBAAiB;QACf,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC;IACnC,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,SAAS,CACb,QAA2B,EAC3B,IAAe;QAEf,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;SAC3D;QAED,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,EAAE,CAAC;QACvC,MAAM,CAAC,IAAI,GAAG,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC;QAElC,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,mBAAmB,CAAC;YAC9C,GAAG,MAAM;YACT,QAAQ,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;gBACnC,IAAI,EAAE,uBAAuB,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC;gBACjD,OAAO,EAAE,OAAO,CAAC,IAAI;aACtB,CAAC,CAAC;SACJ,CAAC,CAAC;QAEH,IAAI,MAAM,CAAC,MAAM,EAAE;YACjB,IAAI,IAAI,GAA0C,WAAW,CAAC;YAC9D,MAAM,UAAU,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBAC/D,IAAI,eAAe,GAAG,EAAE,CAAC;gBACzB,MAAM,MAAM,GAAG,YAAY,CAAC,CAAC,KAAK,EAAE,EAAE;oBACpC,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;wBAC1B,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;4BAC3B,OAAO,CAAC,eAAe,CAAC,CAAC;yBAC1B;6BAAM;4BACL,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAarC,CAAC;4BAEF,MAAM,IAAI,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;4BACjC,IAAI,IAAI,IAAI,IAAI,EAAE;gCAChB,eAAe,IAAI,IAAI,CAAC,KAAK,EAAE,OAAO,IAAI,EAAE,CAAC;gCAC7C,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,IAAI,IAAI,CAAC;gCAChC,IAAI,CAAC,eAAe,CAAC,cAAc,EAAE,CACnC,IAAI,CAAC,KAAK,EAAE,OAAO,IAAI,EAAE,EACzB,IAAI,CAAC,OAAO,CACb,CAAC;6BACH;yBACF;qBACF;gBACH,CAAC,CAAC,CAAC;gBAEH,uCAAuC;gBACvC,MAAM,MAAM,GAAG,IAAkC,CAAC;gBAClD,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE,CACjC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CACpC,CAAC;gBACF,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YAC/C,CAAC,CAAC,CAAC;YACH,OAAO;gBACL,WAAW,EAAE;oBACX;wBACE,IAAI,EAAE,UAAU;wBAChB,OAAO,EAAE,2BAA2B,CAAC,IAAI,EAAE,UAAU,CAAC;qBACvD;iBACF;aACF,CAAC;SACH;QACD,MAAM,WAAW,GAAqB,EAAE,CAAC;QACzC,KAAK,MAAM,IAAI,IAAI,IAAI,CAAC,OAAO,EAAE;YAC/B,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,SAAS,CAAC;YAC7C,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE,OAAO,IAAI,EAAE,CAAC;YACzC,WAAW,CAAC,IAAI,CAAC;gBACf,IAAI;gBACJ,OAAO,EAAE,2BAA2B,CAAC,IAAI,EAAE,IAAI,CAAC;aACjD,CAAC,CAAC;SACJ;QACD,OAAO;YACL,WAAW;SACZ,CAAC;IACJ,CAAC;IAED,cAAc;IACd,KAAK,CAAC,mBAAmB,CAAC,OAAoC;QAC5D,IAAI,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,YAAY,GAAG,IAAI,aAAa,CAAC;gBACrC,GAAG,IAAI,CAAC,YAAY;gBACpB,WAAW,EAAE,EAAE,OAAO,EAAE,YAAY,EAAE;aACvC,CAAC,CAAC;YACH,IAAI,CAAC,WAAW,GAAG,IAAI,SAAS,CAAC,YAAY,CAAC,CAAC;SAChD;QACD,IAAI,OAAO,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YAC3C,MAAM,YAAY,GAAG,IAAI,aAAa,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YAC1D,IAAI,CAAC,eAAe,GAAG,IAAI,SAAS,CAAC,YAAY,CAAC,CAAC;SACpD;QACD,MAAM,MAAM,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC;QACzE,MAAM,qBAAqB,GAAG,KAAK,IAAI,EAAE,CACvC,MAAM,CAAC,oBAAoB,CACzB,OAAO,EACP,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,YAAY,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CACxD,CAAC;QACJ,OAAO,OAAO,CAAC,qBAAqB,EAAE;YACpC,aAAa,EAAE,CAAC;YAChB,QAAQ,EAAE,EAAE;YACZ,aAAa,EAAE,IAAI,CAAC,UAAU;YAC9B,8DAA8D;SAC/D,CAAC,CAAC;IACL,CAAC;IAED,QAAQ;QACN,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import { test, expect } from "@jest/globals";
|
|
2
|
+
import { ChatOpenAI } from "../openai.js";
|
|
3
|
+
import { HumanChatMessage, SystemChatMessage } from "../../schema/index.js";
|
|
4
|
+
import { ChatPromptValue } from "../../prompts/chat.js";
|
|
5
|
+
import { PromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, } from "../../prompts/index.js";
|
|
6
|
+
import { LLMChain } from "../../chains/index.js";
|
|
7
|
+
test("Test ChatOpenAI", async () => {
|
|
8
|
+
const chat = new ChatOpenAI({ modelName: "gpt-3.5-turbo", maxTokens: 10 });
|
|
9
|
+
const message = new HumanChatMessage("Hello!");
|
|
10
|
+
const res = await chat.call([message]);
|
|
11
|
+
console.log({ res });
|
|
12
|
+
});
|
|
13
|
+
test("Test ChatOpenAI with SystemChatMessage", async () => {
|
|
14
|
+
const chat = new ChatOpenAI({ modelName: "gpt-3.5-turbo", maxTokens: 10 });
|
|
15
|
+
const system_message = new SystemChatMessage("You are to chat with a user.");
|
|
16
|
+
const message = new HumanChatMessage("Hello!");
|
|
17
|
+
const res = await chat.call([system_message, message]);
|
|
18
|
+
console.log({ res });
|
|
19
|
+
});
|
|
20
|
+
test("Test ChatOpenAI Generate", async () => {
|
|
21
|
+
const chat = new ChatOpenAI({
|
|
22
|
+
modelName: "gpt-3.5-turbo",
|
|
23
|
+
maxTokens: 10,
|
|
24
|
+
n: 2,
|
|
25
|
+
});
|
|
26
|
+
const message = new HumanChatMessage("Hello!");
|
|
27
|
+
const res = await chat.generate([[message], [message]]);
|
|
28
|
+
expect(res.generations.length).toBe(2);
|
|
29
|
+
for (const generation of res.generations) {
|
|
30
|
+
expect(generation.length).toBe(2);
|
|
31
|
+
for (const message of generation) {
|
|
32
|
+
console.log(message.text);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
console.log({ res });
|
|
36
|
+
});
|
|
37
|
+
test("Test ChatOpenAI in streaming mode", async () => {
|
|
38
|
+
let nrNewTokens = 0;
|
|
39
|
+
let streamedCompletion = "";
|
|
40
|
+
const model = new ChatOpenAI({
|
|
41
|
+
modelName: "gpt-3.5-turbo",
|
|
42
|
+
streaming: true,
|
|
43
|
+
callbackManager: {
|
|
44
|
+
handleNewToken(token) {
|
|
45
|
+
nrNewTokens += 1;
|
|
46
|
+
streamedCompletion += token;
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
});
|
|
50
|
+
const message = new HumanChatMessage("Hello!");
|
|
51
|
+
const res = await model.call([message]);
|
|
52
|
+
console.log({ res });
|
|
53
|
+
expect(nrNewTokens > 0).toBe(true);
|
|
54
|
+
expect(res.text).toBe(streamedCompletion);
|
|
55
|
+
});
|
|
56
|
+
test("Test ChatOpenAI prompt value", async () => {
|
|
57
|
+
const chat = new ChatOpenAI({
|
|
58
|
+
modelName: "gpt-3.5-turbo",
|
|
59
|
+
maxTokens: 10,
|
|
60
|
+
n: 2,
|
|
61
|
+
});
|
|
62
|
+
const message = new HumanChatMessage("Hello!");
|
|
63
|
+
const res = await chat.generatePrompt([new ChatPromptValue([message])]);
|
|
64
|
+
expect(res.generations.length).toBe(1);
|
|
65
|
+
for (const generation of res.generations) {
|
|
66
|
+
expect(generation.length).toBe(2);
|
|
67
|
+
for (const g of generation) {
|
|
68
|
+
console.log(g.text);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
console.log({ res });
|
|
72
|
+
});
|
|
73
|
+
test("OpenAI Chat, docs, getting started", async () => {
|
|
74
|
+
const chat = new ChatOpenAI({ temperature: 0 });
|
|
75
|
+
const responseA = await chat.call([
|
|
76
|
+
new HumanChatMessage("Translate this sentence from English to French. I love programming."),
|
|
77
|
+
]);
|
|
78
|
+
console.log(responseA);
|
|
79
|
+
const responseB = await chat.call([
|
|
80
|
+
new SystemChatMessage("You are a helpful assistant that translates English to French."),
|
|
81
|
+
new HumanChatMessage("Translate this sentence from English to French. I love programming."),
|
|
82
|
+
]);
|
|
83
|
+
console.log(responseB);
|
|
84
|
+
const responseC = await chat.generate([
|
|
85
|
+
[
|
|
86
|
+
new SystemChatMessage("You are a helpful assistant that translates English to French."),
|
|
87
|
+
new HumanChatMessage("Translate this sentence from English to French. I love programming."),
|
|
88
|
+
],
|
|
89
|
+
[
|
|
90
|
+
new SystemChatMessage("You are a helpful assistant that translates English to French."),
|
|
91
|
+
new HumanChatMessage("Translate this sentence from English to French. I love artificial intelligence."),
|
|
92
|
+
],
|
|
93
|
+
]);
|
|
94
|
+
console.log(responseC);
|
|
95
|
+
});
|
|
96
|
+
test("OpenAI Chat, docs, prompt templates", async () => {
|
|
97
|
+
const chat = new ChatOpenAI({ temperature: 0 });
|
|
98
|
+
const systemPrompt = PromptTemplate.fromTemplate("You are a helpful assistant that translates {input_language} to {output_language}.");
|
|
99
|
+
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
|
|
100
|
+
new SystemMessagePromptTemplate(systemPrompt),
|
|
101
|
+
HumanMessagePromptTemplate.fromTemplate("{text}"),
|
|
102
|
+
]);
|
|
103
|
+
const responseA = await chat.generatePrompt([
|
|
104
|
+
await chatPrompt.formatPromptValue({
|
|
105
|
+
input_language: "English",
|
|
106
|
+
output_language: "French",
|
|
107
|
+
text: "I love programming.",
|
|
108
|
+
}),
|
|
109
|
+
]);
|
|
110
|
+
console.log(responseA.generations);
|
|
111
|
+
const chain = new LLMChain({
|
|
112
|
+
prompt: chatPrompt,
|
|
113
|
+
llm: chat,
|
|
114
|
+
});
|
|
115
|
+
const responseB = await chain.call({
|
|
116
|
+
input_language: "English",
|
|
117
|
+
output_language: "French",
|
|
118
|
+
text: "I love programming.",
|
|
119
|
+
});
|
|
120
|
+
console.log(responseB);
|
|
121
|
+
});
|
|
122
|
+
//# sourceMappingURL=chatopenai.int.test.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"chatopenai.int.test.js","sourceRoot":"","sources":["../../../src/chat_models/tests/chatopenai.int.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,uBAAuB,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AACxD,OAAO,EACL,cAAc,EACd,kBAAkB,EAClB,0BAA0B,EAC1B,2BAA2B,GAC5B,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAEjD,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,EAAE;IACjC,MAAM,IAAI,GAAG,IAAI,UAAU,CAAC,EAAE,SAAS,EAAE,eAAe,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;IAC3E,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC/C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,wCAAwC,EAAE,KAAK,IAAI,EAAE;IACxD,MAAM,IAAI,GAAG,IAAI,UAAU,CAAC,EAAE,SAAS,EAAE,eAAe,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;IAC3E,MAAM,cAAc,GAAG,IAAI,iBAAiB,CAAC,8BAA8B,CAAC,CAAC;IAC7E,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC/C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,EAAE,OAAO,CAAC,CAAC,CAAC;IACvD,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,0BAA0B,EAAE,KAAK,IAAI,EAAE;IAC1C,MAAM,IAAI,GAAG,IAAI,UAAU,CAAC;QAC1B,SAAS,EAAE,eAAe;QAC1B,SAAS,EAAE,EAAE;QACb,CAAC,EAAE,CAAC;KACL,CAAC,CAAC;IACH,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC/C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IACxD,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACvC,KAAK,MAAM,UAAU,IAAI,GAAG,CAAC,WAAW,EAAE;QACxC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClC,KAAK,MAAM,OAAO,IAAI,UAAU,EAAE;YAChC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;SAC3B;KACF;IACD,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,mCAAmC,EAAE,KAAK,IAAI,EAAE;IACnD,IAAI,WAAW,GAAG,CAAC,CAAC;IACpB,IAAI,kBAAkB,GAAG,EAAE,CAAC;IAE5B,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC;QAC3B,SAAS,EAAE,eAAe;QAC1B,SAAS,EAAE,IAAI;QACf,eAAe,EAAE;YACf,cAAc,CAAC,KAAK;gBAClB,WAAW,IAAI,CAAC,CAAC;gBACjB,kBAAkB,IAAI,KAAK,CAAC;YAC9B,CAAC;SACF;KACF,CAAC,CAAC;IACH,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC/C,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;IACxC,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;IAErB,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACnC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAC5C,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,8BAA8B,EAAE,KAAK,IAAI,EAAE;IAC9C,MAAM,IAAI,GAAG,IAAI,UAAU,CAAC;QAC1B,SAAS,EAAE,eAAe;QAC1B,SAAS,EAAE,EAAE;QACb,CAAC,EAAE,CAAC;KACL,CAAC,CAAC;IACH,MAAM,OAAO,GAAG,IAAI,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC/C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,CAAC,IAAI,eAAe,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;IACxE,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IACvC,KAAK,MAAM,UAAU,IAAI,GAAG,CAAC,WAAW,EAAE;QACxC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClC,KAAK,MAAM,CAAC,IAAI,UAAU,EAAE;YAC1B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACrB;KACF;IACD,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,oCAAoC,EAAE,KAAK,IAAI,EAAE;IACpD,MAAM,IAAI,GAAG,IAAI,UAAU,CAAC,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC;IAEhD,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC;QAChC,IAAI,gBAAgB,CAClB,qEAAqE,CACtE;KACF,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;IAEvB,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC;QAChC,IAAI,iBAAiB,CACnB,gEAAgE,CACjE;QACD,IAAI,gBAAgB,CAClB,qEAAqE,CACtE;KACF,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;IAEvB,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC;QACpC;YACE,IAAI,iBAAiB,CACnB,gEAAgE,CACjE;YACD,IAAI,gBAAgB,CAClB,qEAAqE,CACtE;SACF;QACD;YACE,IAAI,iBAAiB,CACnB,gEAAgE,CACjE;YACD,IAAI,gBAAgB,CAClB,iFAAiF,CAClF;SACF;KACF,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;AACzB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,qCAAqC,EAAE,KAAK,IAAI,EAAE;IACrD,MAAM,IAAI,GAAG,IAAI,UAAU,CAAC,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC;IAEhD,MAAM,YAAY,GAAG,cAAc,CAAC,YAAY,CAC9C,oFAAoF,CACrF,CAAC;IAEF,MAAM,UAAU,GAAG,kBAAkB,CAAC,kBAAkB,CAAC;QACvD,IAAI,2BAA2B,CAAC,YAAY,CAAC;QAC7C,0BAA0B,CAAC,YAAY,CAAC,QAAQ,CAAC;KAClD,CAAC,CAAC;IAEH,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC;QAC1C,MAAM,UAAU,CAAC,iBAAiB,CAAC;YACjC,cAAc,EAAE,SAAS;YACzB,eAAe,EAAE,QAAQ;YACzB,IAAI,EAAE,qBAAqB;SAC5B,CAAC;KACH,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAEnC,MAAM,KAAK,GAAG,IAAI,QAAQ,CAAC;QACzB,MAAM,EAAE,UAAU;QAClB,GAAG,EAAE,IAAI;KACV,CAAC,CAAC;IAEH,MAAM,SAAS,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC;QACjC,cAAc,EAAE,SAAS;QACzB,eAAe,EAAE,QAAQ;QACzB,IAAI,EAAE,qBAAqB;KAC5B,CAAC,CAAC;IAEH,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC;AACzB,CAAC,CAAC,CAAC"}
|
package/dist/llms/base.d.ts
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import PQueue from "p-queue";
|
|
2
|
-
import { LLMCallbackManager, LLMResult } from "
|
|
2
|
+
import { BaseLanguageModel, BasePromptValue, LLMCallbackManager, LLMResult } from "../schema/index.js";
|
|
3
3
|
export type SerializedLLM = {
|
|
4
|
+
_model: string;
|
|
4
5
|
_type: string;
|
|
5
6
|
} & Record<string, any>;
|
|
6
7
|
/**
|
|
7
8
|
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
|
|
8
9
|
*/
|
|
9
|
-
export declare abstract class BaseLLM {
|
|
10
|
+
export declare abstract class BaseLLM extends BaseLanguageModel {
|
|
10
11
|
/**
|
|
11
12
|
* The name of the LLM class
|
|
12
13
|
*/
|
|
@@ -24,6 +25,7 @@ export declare abstract class BaseLLM {
|
|
|
24
25
|
*/
|
|
25
26
|
verbose?: boolean;
|
|
26
27
|
constructor(callbackManager?: LLMCallbackManager, verbose?: boolean, concurrency?: number, cache?: boolean);
|
|
28
|
+
generatePrompt(promptValues: BasePromptValue[], stop?: string[]): Promise<LLMResult>;
|
|
27
29
|
/**
|
|
28
30
|
* Run the LLM on the given prompts and input.
|
|
29
31
|
*/
|
|
@@ -50,10 +52,12 @@ export declare abstract class BaseLLM {
|
|
|
50
52
|
* Return a json-like object representing this LLM.
|
|
51
53
|
*/
|
|
52
54
|
serialize(): SerializedLLM;
|
|
55
|
+
_modelType(): string;
|
|
53
56
|
/**
|
|
54
57
|
* Load an LLM from a json-like object describing it.
|
|
55
58
|
*/
|
|
56
59
|
static deserialize(data: SerializedLLM): Promise<BaseLLM>;
|
|
60
|
+
private _tokenizer?;
|
|
57
61
|
getNumTokens(text: string): number;
|
|
58
62
|
}
|
|
59
63
|
/**
|
package/dist/llms/base.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import
|
|
1
|
+
import GPT3Tokenizer from "gpt3-tokenizer";
|
|
2
2
|
import PQueue from "p-queue";
|
|
3
3
|
import { getKey, InMemoryCache } from "../cache.js";
|
|
4
|
+
import { BaseLanguageModel, } from "../schema/index.js";
|
|
4
5
|
const getCallbackManager = () => ({
|
|
5
6
|
handleStart: (..._args) => {
|
|
6
7
|
// console.log(args);
|
|
@@ -17,8 +18,9 @@ const cache = new InMemoryCache();
|
|
|
17
18
|
/**
|
|
18
19
|
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
|
|
19
20
|
*/
|
|
20
|
-
export class BaseLLM {
|
|
21
|
+
export class BaseLLM extends BaseLanguageModel {
|
|
21
22
|
constructor(callbackManager, verbose, concurrency, cache) {
|
|
23
|
+
super();
|
|
22
24
|
/**
|
|
23
25
|
* The name of the LLM class
|
|
24
26
|
*/
|
|
@@ -65,12 +67,22 @@ export class BaseLLM {
|
|
|
65
67
|
writable: true,
|
|
66
68
|
value: false
|
|
67
69
|
});
|
|
70
|
+
Object.defineProperty(this, "_tokenizer", {
|
|
71
|
+
enumerable: true,
|
|
72
|
+
configurable: true,
|
|
73
|
+
writable: true,
|
|
74
|
+
value: void 0
|
|
75
|
+
});
|
|
68
76
|
this.callbackManager = callbackManager ?? getCallbackManager();
|
|
69
77
|
this.verbose = verbose ?? getVerbosity();
|
|
70
78
|
this.cache = cache;
|
|
71
79
|
this.concurrency = concurrency ?? Infinity;
|
|
72
80
|
this.queue = new PQueue({ concurrency: this.concurrency });
|
|
73
81
|
}
|
|
82
|
+
async generatePrompt(promptValues, stop) {
|
|
83
|
+
const prompts = promptValues.map((promptValue) => promptValue.toString());
|
|
84
|
+
return this.generate(prompts, stop);
|
|
85
|
+
}
|
|
74
86
|
/** @ignore */
|
|
75
87
|
async _generateUncached(prompts, stop) {
|
|
76
88
|
this.callbackManager.handleStart?.({ name: this.name }, prompts, this.verbose);
|
|
@@ -145,13 +157,20 @@ export class BaseLLM {
|
|
|
145
157
|
return {
|
|
146
158
|
...this._identifyingParams(),
|
|
147
159
|
_type: this._llmType(),
|
|
160
|
+
_model: this._modelType(),
|
|
148
161
|
};
|
|
149
162
|
}
|
|
163
|
+
_modelType() {
|
|
164
|
+
return "base_llm";
|
|
165
|
+
}
|
|
150
166
|
/**
|
|
151
167
|
* Load an LLM from a json-like object describing it.
|
|
152
168
|
*/
|
|
153
169
|
static async deserialize(data) {
|
|
154
|
-
const { _type, ...rest } = data;
|
|
170
|
+
const { _type, _model, ...rest } = data;
|
|
171
|
+
if (_model && _model !== "base_llm") {
|
|
172
|
+
throw new Error(`Cannot load LLM with model ${_model}`);
|
|
173
|
+
}
|
|
155
174
|
const Cls = {
|
|
156
175
|
openai: (await import("./openai.js")).OpenAI,
|
|
157
176
|
}[_type];
|
|
@@ -163,8 +182,12 @@ export class BaseLLM {
|
|
|
163
182
|
getNumTokens(text) {
|
|
164
183
|
// TODOs copied from py implementation
|
|
165
184
|
// TODO: this method may not be exact.
|
|
166
|
-
// TODO: this method may differ based on model (eg codex).
|
|
167
|
-
|
|
185
|
+
// TODO: this method may differ based on model (eg codex, gpt-3.5).
|
|
186
|
+
if (this._tokenizer === undefined) {
|
|
187
|
+
const Constructor = GPT3Tokenizer.default;
|
|
188
|
+
this._tokenizer = new Constructor({ type: "gpt3" });
|
|
189
|
+
}
|
|
190
|
+
return this._tokenizer.encode(text).bpe.length;
|
|
168
191
|
}
|
|
169
192
|
}
|
|
170
193
|
/**
|
package/dist/llms/base.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"base.js","sourceRoot":"","sources":["../../src/llms/base.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"base.js","sourceRoot":"","sources":["../../src/llms/base.ts"],"names":[],"mappings":"AAAA,OAAO,aAAa,MAAM,gBAAgB,CAAC;AAC3C,OAAO,MAAM,MAAM,SAAS,CAAC;AAE7B,OAAO,EAAa,MAAM,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AAC/D,OAAO,EACL,iBAAiB,GAIlB,MAAM,oBAAoB,CAAC;AAE5B,MAAM,kBAAkB,GAAG,GAAuB,EAAE,CAAC,CAAC;IACpD,WAAW,EAAE,CAAC,GAAG,KAAK,EAAE,EAAE;QACxB,qBAAqB;IACvB,CAAC;IACD,SAAS,EAAE,CAAC,GAAG,KAAK,EAAE,EAAE;QACtB,qBAAqB;IACvB,CAAC;IACD,WAAW,EAAE,CAAC,GAAG,KAAK,EAAE,EAAE;QACxB,qBAAqB;IACvB,CAAC;CACF,CAAC,CAAC;AAEH,MAAM,YAAY,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC;AAEhC,MAAM,KAAK,GAAc,IAAI,aAAa,EAAE,CAAC;AAQ7C;;GAEG;AACH,MAAM,OAAgB,OAAQ,SAAQ,iBAAiB;IAuBrD,YACE,eAAoC,EACpC,OAAiB,EACjB,WAAoB,EACpB,KAAe;QAEf,KAAK,EAAE,CAAC;QA5BV;;WAEG;QACH;;;;;WAAa;QAEb;;;;;WAAgB;QAEhB;;;;;WAAoC;QAEpC;;;WAGG;QACH;;;;;WAAqB;QAErB;;;;;WAAwB;QAExB;;WAEG;QACH;;;;mBAAoB,KAAK;WAAC;QA+J1B;;;;;WAA2C;QAtJzC,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,kBAAkB,EAAE,CAAC;QAC/D,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,YAAY,EAAE,CAAC;QACzC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,WAAW,GAAG,WAAW,IAAI,QAAQ,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;IAC7D,CAAC;IAED,KAAK,CAAC,cAAc,CAClB,YAA+B,EAC/B,IAAe;QAEf,MAAM,OAAO,GAAa,YAAY,CAAC,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CACzD,WAAW,CAAC,QAAQ,EAAE,CACvB,CAAC;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;IACtC,CAAC;IAOD,cAAc;IACd,KAAK,CAAC,iBAAiB,CACrB,OAAiB,EACjB,IAAe;QAEf,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,CAChC,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,EAAE,EACnB,OAAO,EACP,IAAI,CAAC,OAAO,CACb,CAAC;QACF,IAAI,MAAM,CAAC;QACX,IAAI;YACF,MAAM,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,EAAE;gBACjE,cAAc,EAAE,IAAI;aACrB,CAAC,CAAC;SACJ;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAC3D,MAAM,GAAG,CAAC;SACX;QAED,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACvD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAiB,EAAE,IAAe;QAC/C,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;SACpE;QAED,IAAI,IAAI,CAAC,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,IAAI,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,IAAI,KAAK,KAAK,IAAI,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK,EAAE;YAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;SAC9C;QAED,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC;QAChC,MAAM,CAAC,IAAI,GAAG,IAAI,CAAC;QAEnB,MAAM,YAAY,GAAG,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QACxD,MAAM,oBAAoB,GAAa,EAAE,CAAC;QAC1C,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,GAAG,CACnC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE;YAClC,MAAM,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC,CAAC;YAChE,IAAI,CAAC,MAAM,EAAE;gBACX,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aAClC;YACD,OAAO,MAAM,CAAC;QAChB,CAAC,CAAC,CACH,CAAC;QAEF,IAAI,SAAS,GAAG,EAAE,CAAC;QACnB,IAAI,oBAAoB,CAAC,MAAM,GAAG,CAAC,EAAE;YACnC,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAC1C,oBAAoB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAC3C,IAAI,CACL,CAAC;YACF,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,KAAK,EAAE,UAAU,EAAE,KAAK,EAAE,EAAE;gBAClD,MAAM,WAAW,GAAG,oBAAoB,CAAC,KAAK,CAAC,CAAC;gBAChD,WAAW,CAAC,WAAW,CAAC,GAAG,UAAU,CAAC;gBACtC,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE,YAAY,CAAC,CAAC;gBAC7D,KAAK,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC;YAChC,CAAC,CAAC,CACH,CAAC;YACF,SAAS,GAAG,OAAO,CAAC,SAAS,IAAI,EAAE,CAAC;SACrC;QAED,OAAO,EAAE,WAAW,EAAE,SAAS,EAAe,CAAC;IACjD,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,IAAI,CAAC,MAAc,EAAE,IAAe;QACxC,MAAM,EAAE,WAAW,EAAE,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC,CAAC;QAC5D,OAAO,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IAChC,CAAC;IAED;;OAEG;IACH,8DAA8D;IAC9D,kBAAkB;QAChB,OAAO,EAAE,CAAC;IACZ,CAAC;IAOD;;OAEG;IACH,SAAS;QACP,OAAO;YACL,GAAG,IAAI,CAAC,kBAAkB,EAAE;YAC5B,KAAK,EAAE,IAAI,CAAC,QAAQ,EAAE;YACtB,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE;SAC1B,CAAC;IACJ,CAAC;IAED,UAAU;QACR,OAAO,UAAmB,CAAC;IAC7B,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,IAAmB;QAC1C,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,IAAI,CAAC;QACxC,IAAI,MAAM,IAAI,MAAM,KAAK,UAAU,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,8BAA8B,MAAM,EAAE,CAAC,CAAC;SACzD;QACD,MAAM,GAAG,GAAG;YACV,MAAM,EAAE,CAAC,MAAM,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM;SAC7C,CAAC,KAAK,CAAC,CAAC;QACT,IAAI,GAAG,KAAK,SAAS,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,8BAA8B,KAAK,EAAE,CAAC,CAAC;SACxD;QACD,OAAO,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC;IACvB,CAAC;IAID,YAAY,CAAC,IAAY;QACvB,sCAAsC;QACtC,sCAAsC;QACtC,mEAAmE;QACnE,IAAI,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACjC,MAAM,WAAW,GAAG,aAAa,CAAC,OAAO,CAAC;YAC1C,IAAI,CAAC,UAAU,GAAG,IAAI,WAAW,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC;SACrD;QACD,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC;IACjD,CAAC;CAGF;AAED;;;;;;GAMG;AACH,MAAM,OAAgB,GAAI,SAAQ,OAAO;IAMvC,KAAK,CAAC,SAAS,CAAC,OAAiB,EAAE,IAAe;QAChD,MAAM,WAAW,GAAG,EAAE,CAAC;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;YAC1C,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE;gBACpE,cAAc,EAAE,IAAI;aACrB,CAAC,CAAC;YACH,WAAW,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;SAC9B;QACD,OAAO,EAAE,WAAW,EAAE,CAAC;IACzB,CAAC;CACF"}
|
package/dist/llms/cohere.d.ts
CHANGED
package/dist/llms/cohere.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cohere.js","sourceRoot":"","sources":["../../src/llms/cohere.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,
|
|
1
|
+
{"version":3,"file":"cohere.js","sourceRoot":"","sources":["../../src/llms/cohere.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AAgBjC,MAAM,OAAO,MAAO,SAAQ,GAAG;IAS7B,YACE,MAKC;QAED,KAAK,CACH,MAAM,EAAE,eAAe,EACvB,MAAM,EAAE,OAAO,EACf,MAAM,EAAE,WAAW,EACnB,MAAM,EAAE,KAAK,CACd,CAAC;QArBJ;;;;mBAAc,CAAC;WAAC;QAEhB;;;;mBAAY,GAAG;WAAC;QAEhB;;;;;WAAc;QAEd;;;;;WAAe;QAiBb,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;QAE1C,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,SAAS,GAAG,MAAM,EAAE,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC;QACrD,IAAI,CAAC,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,IAAI,CAAC,WAAW,CAAC;QAC3D,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,KAAK,IAAI,IAAI,CAAC,KAAK,CAAC;IAC3C,CAAC;IAED,QAAQ;QACN,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,MAAc,EAAE,KAAgB;QAC1C,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QAE1C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAEzB,mDAAmD;QACnD,MAAM,gBAAgB,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC;YAC7C,MAAM;YACN,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,UAAU,EAAE,IAAI,CAAC,SAAS;YAC1B,WAAW,EAAE,IAAI,CAAC,WAAW;SAC9B,CAAC,CAAC;QACH,IAAI;YACF,OAAO,gBAAgB,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;SAClD;QAAC,MAAM;YACN,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;YAC9B,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;SAC9C;IACH,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,OAAO;QAGlB,IAAI;YACF,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,CAAC;YACtD,OAAO,EAAE,MAAM,EAAE,CAAC;SACnB;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,IAAI,KAAK,CACb,0EAA0E,CAC3E,CAAC;SACH;IACH,CAAC;CACF"}
|
package/dist/llms/hf.d.ts
CHANGED
|
@@ -1,11 +1,28 @@
|
|
|
1
|
-
import
|
|
2
|
-
import {
|
|
1
|
+
import { LLM } from "./index.js";
|
|
2
|
+
import { LLMCallbackManager } from "../schema/index.js";
|
|
3
3
|
interface HFInput {
|
|
4
4
|
/** Model to use */
|
|
5
5
|
model: string;
|
|
6
|
+
/** Sampling temperature to use */
|
|
7
|
+
temperature?: number;
|
|
8
|
+
/**
|
|
9
|
+
* Maximum number of tokens to generate in the completion.
|
|
10
|
+
*/
|
|
11
|
+
maxTokens?: number;
|
|
12
|
+
/** Total probability mass of tokens to consider at each step */
|
|
13
|
+
topP?: number;
|
|
14
|
+
/** Integer to define the top tokens considered within the sample operation to create new text. */
|
|
15
|
+
topK?: number;
|
|
16
|
+
/** Penalizes repeated tokens according to frequency */
|
|
17
|
+
frequencyPenalty?: number;
|
|
6
18
|
}
|
|
7
19
|
export declare class HuggingFaceInference extends LLM implements HFInput {
|
|
8
20
|
model: string;
|
|
21
|
+
temperature: number | undefined;
|
|
22
|
+
maxTokens: number | undefined;
|
|
23
|
+
topP: number | undefined;
|
|
24
|
+
topK: number | undefined;
|
|
25
|
+
frequencyPenalty: number | undefined;
|
|
9
26
|
constructor(fields?: Partial<HFInput> & {
|
|
10
27
|
callbackManager?: LLMCallbackManager;
|
|
11
28
|
verbose?: boolean;
|
|
@@ -15,7 +32,7 @@ export declare class HuggingFaceInference extends LLM implements HFInput {
|
|
|
15
32
|
_llmType(): string;
|
|
16
33
|
_call(prompt: string, _stop?: string[]): Promise<string>;
|
|
17
34
|
static imports(): Promise<{
|
|
18
|
-
|
|
35
|
+
HfInference: typeof import("@huggingface/inference").HfInference;
|
|
19
36
|
}>;
|
|
20
37
|
}
|
|
21
38
|
export {};
|