@langchain/core 0.2.5 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/callbacks/promises.cjs +1 -1
- package/dist/callbacks/promises.d.ts +1 -1
- package/dist/callbacks/promises.js +1 -1
- package/dist/language_models/chat_models.d.ts +1 -1
- package/dist/prompts/chat.cjs +18 -4
- package/dist/prompts/chat.js +19 -5
- package/dist/prompts/tests/chat.mustache.test.js +41 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# 🦜🍎️ @langchain/core
|
|
2
2
|
|
|
3
|
-
[](https://github.com/langchain-ai/langchainjs/actions/workflows/ci.yml) ](https://github.com/langchain-ai/langchainjs/actions/workflows/ci.yml)  [](https://opensource.org/licenses/MIT) [](https://twitter.com/langchainai) [](https://discord.gg/6adMQxSpJS)
|
|
4
4
|
|
|
5
5
|
`@langchain/core` contains the core abstractions and schemas of LangChain.js, including base classes for language models,
|
|
6
6
|
chat models, vectorstores, retrievers, and runnables.
|
|
@@ -20,7 +20,7 @@ function createQueue() {
|
|
|
20
20
|
}
|
|
21
21
|
/**
|
|
22
22
|
* Consume a promise, either adding it to the queue or waiting for it to resolve
|
|
23
|
-
* @param
|
|
23
|
+
* @param promiseFn Promise to consume
|
|
24
24
|
* @param wait Whether to wait for the promise to resolve or resolve immediately
|
|
25
25
|
*/
|
|
26
26
|
async function consumeCallback(promiseFn, wait) {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Consume a promise, either adding it to the queue or waiting for it to resolve
|
|
3
|
-
* @param
|
|
3
|
+
* @param promiseFn Promise to consume
|
|
4
4
|
* @param wait Whether to wait for the promise to resolve or resolve immediately
|
|
5
5
|
*/
|
|
6
6
|
export declare function consumeCallback<T>(promiseFn: () => Promise<T> | T | void, wait: boolean): Promise<void>;
|
|
@@ -14,7 +14,7 @@ function createQueue() {
|
|
|
14
14
|
}
|
|
15
15
|
/**
|
|
16
16
|
* Consume a promise, either adding it to the queue or waiting for it to resolve
|
|
17
|
-
* @param
|
|
17
|
+
* @param promiseFn Promise to consume
|
|
18
18
|
* @param wait Whether to wait for the promise to resolve or resolve immediately
|
|
19
19
|
*/
|
|
20
20
|
export async function consumeCallback(promiseFn, wait) {
|
|
@@ -78,7 +78,7 @@ export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCal
|
|
|
78
78
|
invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<OutputMessageType>;
|
|
79
79
|
_streamResponseChunks(_messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
80
80
|
_streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<OutputMessageType>;
|
|
81
|
-
|
|
81
|
+
getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
|
|
82
82
|
/** @ignore */
|
|
83
83
|
_generateUncached(messages: BaseMessageLike[][], parsedOptions: this["ParsedCallOptions"], handledOptions: RunnableConfig): Promise<LLMResult>;
|
|
84
84
|
_generateCached({ messages, cache, llmStringKey, parsedOptions, handledOptions, }: ChatModelGenerateCachedParameters<typeof this>): Promise<LLMResult & {
|
package/dist/prompts/chat.cjs
CHANGED
|
@@ -301,14 +301,20 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
|
|
|
301
301
|
else if (typeof item.text === "string") {
|
|
302
302
|
text = item.text ?? "";
|
|
303
303
|
}
|
|
304
|
-
prompt.push(prompt_js_1.PromptTemplate.fromTemplate(text));
|
|
304
|
+
prompt.push(prompt_js_1.PromptTemplate.fromTemplate(text, additionalOptions));
|
|
305
305
|
}
|
|
306
306
|
else if (typeof item === "object" && "image_url" in item) {
|
|
307
307
|
let imgTemplate = item.image_url ?? "";
|
|
308
308
|
let imgTemplateObject;
|
|
309
309
|
let inputVariables = [];
|
|
310
310
|
if (typeof imgTemplate === "string") {
|
|
311
|
-
|
|
311
|
+
let parsedTemplate;
|
|
312
|
+
if (additionalOptions?.templateFormat === "mustache") {
|
|
313
|
+
parsedTemplate = (0, template_js_1.parseMustache)(imgTemplate);
|
|
314
|
+
}
|
|
315
|
+
else {
|
|
316
|
+
parsedTemplate = (0, template_js_1.parseFString)(imgTemplate);
|
|
317
|
+
}
|
|
312
318
|
const variables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
|
|
313
319
|
if ((variables?.length ?? 0) > 0) {
|
|
314
320
|
if (variables.length > 1) {
|
|
@@ -327,7 +333,13 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
|
|
|
327
333
|
}
|
|
328
334
|
else if (typeof imgTemplate === "object") {
|
|
329
335
|
if ("url" in imgTemplate) {
|
|
330
|
-
|
|
336
|
+
let parsedTemplate;
|
|
337
|
+
if (additionalOptions?.templateFormat === "mustache") {
|
|
338
|
+
parsedTemplate = (0, template_js_1.parseMustache)(imgTemplate.url);
|
|
339
|
+
}
|
|
340
|
+
else {
|
|
341
|
+
parsedTemplate = (0, template_js_1.parseFString)(imgTemplate.url);
|
|
342
|
+
}
|
|
331
343
|
inputVariables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
|
|
332
344
|
}
|
|
333
345
|
else {
|
|
@@ -600,7 +612,9 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
|
|
|
600
612
|
else {
|
|
601
613
|
imageUrl = item.image_url.url;
|
|
602
614
|
}
|
|
603
|
-
const promptTemplatePlaceholder = prompt_js_1.PromptTemplate.fromTemplate(imageUrl
|
|
615
|
+
const promptTemplatePlaceholder = prompt_js_1.PromptTemplate.fromTemplate(imageUrl, {
|
|
616
|
+
templateFormat: this.templateFormat,
|
|
617
|
+
});
|
|
604
618
|
const formattedUrl = await promptTemplatePlaceholder.format(inputValues);
|
|
605
619
|
if (typeof item.image_url !== "string" && "url" in item.image_url) {
|
|
606
620
|
// eslint-disable-next-line no-param-reassign
|
package/dist/prompts/chat.js
CHANGED
|
@@ -7,7 +7,7 @@ import { BaseStringPromptTemplate } from "./string.js";
|
|
|
7
7
|
import { BasePromptTemplate, } from "./base.js";
|
|
8
8
|
import { PromptTemplate, } from "./prompt.js";
|
|
9
9
|
import { ImagePromptTemplate } from "./image.js";
|
|
10
|
-
import { parseFString } from "./template.js";
|
|
10
|
+
import { parseFString, parseMustache, } from "./template.js";
|
|
11
11
|
/**
|
|
12
12
|
* Abstract class that serves as a base for creating message prompt
|
|
13
13
|
* templates. It defines how to format messages for different roles in a
|
|
@@ -293,14 +293,20 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
|
|
|
293
293
|
else if (typeof item.text === "string") {
|
|
294
294
|
text = item.text ?? "";
|
|
295
295
|
}
|
|
296
|
-
prompt.push(PromptTemplate.fromTemplate(text));
|
|
296
|
+
prompt.push(PromptTemplate.fromTemplate(text, additionalOptions));
|
|
297
297
|
}
|
|
298
298
|
else if (typeof item === "object" && "image_url" in item) {
|
|
299
299
|
let imgTemplate = item.image_url ?? "";
|
|
300
300
|
let imgTemplateObject;
|
|
301
301
|
let inputVariables = [];
|
|
302
302
|
if (typeof imgTemplate === "string") {
|
|
303
|
-
|
|
303
|
+
let parsedTemplate;
|
|
304
|
+
if (additionalOptions?.templateFormat === "mustache") {
|
|
305
|
+
parsedTemplate = parseMustache(imgTemplate);
|
|
306
|
+
}
|
|
307
|
+
else {
|
|
308
|
+
parsedTemplate = parseFString(imgTemplate);
|
|
309
|
+
}
|
|
304
310
|
const variables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
|
|
305
311
|
if ((variables?.length ?? 0) > 0) {
|
|
306
312
|
if (variables.length > 1) {
|
|
@@ -319,7 +325,13 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
|
|
|
319
325
|
}
|
|
320
326
|
else if (typeof imgTemplate === "object") {
|
|
321
327
|
if ("url" in imgTemplate) {
|
|
322
|
-
|
|
328
|
+
let parsedTemplate;
|
|
329
|
+
if (additionalOptions?.templateFormat === "mustache") {
|
|
330
|
+
parsedTemplate = parseMustache(imgTemplate.url);
|
|
331
|
+
}
|
|
332
|
+
else {
|
|
333
|
+
parsedTemplate = parseFString(imgTemplate.url);
|
|
334
|
+
}
|
|
323
335
|
inputVariables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
|
|
324
336
|
}
|
|
325
337
|
else {
|
|
@@ -589,7 +601,9 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
|
|
|
589
601
|
else {
|
|
590
602
|
imageUrl = item.image_url.url;
|
|
591
603
|
}
|
|
592
|
-
const promptTemplatePlaceholder = PromptTemplate.fromTemplate(imageUrl
|
|
604
|
+
const promptTemplatePlaceholder = PromptTemplate.fromTemplate(imageUrl, {
|
|
605
|
+
templateFormat: this.templateFormat,
|
|
606
|
+
});
|
|
593
607
|
const formattedUrl = await promptTemplatePlaceholder.format(inputValues);
|
|
594
608
|
if (typeof item.image_url !== "string" && "url" in item.image_url) {
|
|
595
609
|
// eslint-disable-next-line no-param-reassign
|
|
@@ -2,7 +2,7 @@ import { test, expect } from "@jest/globals";
|
|
|
2
2
|
import { AIMessage } from "../../messages/ai.js";
|
|
3
3
|
import { HumanMessage } from "../../messages/human.js";
|
|
4
4
|
import { SystemMessage } from "../../messages/system.js";
|
|
5
|
-
import { ChatPromptTemplate } from "../chat.js";
|
|
5
|
+
import { ChatPromptTemplate, HumanMessagePromptTemplate } from "../chat.js";
|
|
6
6
|
test("Test creating a chat prompt template from role string messages", async () => {
|
|
7
7
|
const template = ChatPromptTemplate.fromMessages([
|
|
8
8
|
["system", "You are a helpful AI bot. Your name is {{name}}."],
|
|
@@ -59,3 +59,43 @@ test("Ignores f-string inputs input variables with repeats.", async () => {
|
|
|
59
59
|
new HumanMessage("This {bar} is a {foo} test {foo}."),
|
|
60
60
|
]);
|
|
61
61
|
});
|
|
62
|
+
test("Mustache template with image and chat prompts inside one template (fromMessages)", async () => {
|
|
63
|
+
const template = ChatPromptTemplate.fromMessages([
|
|
64
|
+
[
|
|
65
|
+
"human",
|
|
66
|
+
[
|
|
67
|
+
{
|
|
68
|
+
type: "image_url",
|
|
69
|
+
image_url: "{{image_url}}",
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
type: "text",
|
|
73
|
+
text: "{{other_var}}",
|
|
74
|
+
},
|
|
75
|
+
],
|
|
76
|
+
],
|
|
77
|
+
["human", "hello {{name}}"],
|
|
78
|
+
], {
|
|
79
|
+
templateFormat: "mustache",
|
|
80
|
+
});
|
|
81
|
+
expect(template.inputVariables.sort()).toEqual([
|
|
82
|
+
"image_url",
|
|
83
|
+
"name",
|
|
84
|
+
"other_var",
|
|
85
|
+
]);
|
|
86
|
+
});
|
|
87
|
+
test("Mustache image template with nested URL and chat prompts HumanMessagePromptTemplate.fromTemplate", async () => {
|
|
88
|
+
const template = HumanMessagePromptTemplate.fromTemplate([
|
|
89
|
+
{
|
|
90
|
+
text: "{{name}}",
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
image_url: {
|
|
94
|
+
url: "{{image_url}}",
|
|
95
|
+
},
|
|
96
|
+
},
|
|
97
|
+
], {
|
|
98
|
+
templateFormat: "mustache",
|
|
99
|
+
});
|
|
100
|
+
expect(template.inputVariables.sort()).toEqual(["image_url", "name"]);
|
|
101
|
+
});
|