@langchain/core 0.1.46 → 0.1.48
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/language_models/chat_models.cjs +14 -10
- package/dist/language_models/chat_models.js +14 -10
- package/dist/output_parsers/base.cjs +3 -3
- package/dist/output_parsers/base.js +3 -3
- package/dist/prompts/chat.cjs +3 -3
- package/dist/prompts/chat.d.ts +2 -4
- package/dist/prompts/chat.js +3 -3
- package/dist/prompts/index.cjs +1 -0
- package/dist/prompts/index.d.ts +1 -0
- package/dist/prompts/index.js +1 -0
- package/dist/prompts/structured.cjs +40 -0
- package/dist/prompts/structured.d.ts +18 -0
- package/dist/prompts/structured.js +36 -0
- package/package.json +1 -1
|
@@ -80,8 +80,10 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
80
80
|
let generationChunk;
|
|
81
81
|
try {
|
|
82
82
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
83
|
-
chunk.message.response_metadata =
|
|
84
|
-
|
|
83
|
+
chunk.message.response_metadata = {
|
|
84
|
+
...chunk.generationInfo,
|
|
85
|
+
...chunk.message.response_metadata,
|
|
86
|
+
};
|
|
85
87
|
yield chunk.message;
|
|
86
88
|
if (!generationChunk) {
|
|
87
89
|
generationChunk = chunk;
|
|
@@ -121,8 +123,16 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
|
|
|
121
123
|
if (pResult.status === "fulfilled") {
|
|
122
124
|
const result = pResult.value;
|
|
123
125
|
for (const generation of result.generations) {
|
|
124
|
-
generation.message.response_metadata =
|
|
125
|
-
|
|
126
|
+
generation.message.response_metadata = {
|
|
127
|
+
...generation.generationInfo,
|
|
128
|
+
...generation.message.response_metadata,
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
if (result.generations.length === 1) {
|
|
132
|
+
result.generations[0].message.response_metadata = {
|
|
133
|
+
...result.llmOutput,
|
|
134
|
+
...result.generations[0].message.response_metadata,
|
|
135
|
+
};
|
|
126
136
|
}
|
|
127
137
|
generations[i] = result.generations;
|
|
128
138
|
llmOutputs[i] = result.llmOutput;
|
|
@@ -371,9 +381,3 @@ class SimpleChatModel extends BaseChatModel {
|
|
|
371
381
|
}
|
|
372
382
|
}
|
|
373
383
|
exports.SimpleChatModel = SimpleChatModel;
|
|
374
|
-
function _combineGenerationInfoAndMetadata(generation) {
|
|
375
|
-
return {
|
|
376
|
-
...generation.generationInfo,
|
|
377
|
-
...generation.message.response_metadata,
|
|
378
|
-
};
|
|
379
|
-
}
|
|
@@ -76,8 +76,10 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
76
76
|
let generationChunk;
|
|
77
77
|
try {
|
|
78
78
|
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
79
|
-
chunk.message.response_metadata =
|
|
80
|
-
|
|
79
|
+
chunk.message.response_metadata = {
|
|
80
|
+
...chunk.generationInfo,
|
|
81
|
+
...chunk.message.response_metadata,
|
|
82
|
+
};
|
|
81
83
|
yield chunk.message;
|
|
82
84
|
if (!generationChunk) {
|
|
83
85
|
generationChunk = chunk;
|
|
@@ -117,8 +119,16 @@ export class BaseChatModel extends BaseLanguageModel {
|
|
|
117
119
|
if (pResult.status === "fulfilled") {
|
|
118
120
|
const result = pResult.value;
|
|
119
121
|
for (const generation of result.generations) {
|
|
120
|
-
generation.message.response_metadata =
|
|
121
|
-
|
|
122
|
+
generation.message.response_metadata = {
|
|
123
|
+
...generation.generationInfo,
|
|
124
|
+
...generation.message.response_metadata,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
if (result.generations.length === 1) {
|
|
128
|
+
result.generations[0].message.response_metadata = {
|
|
129
|
+
...result.llmOutput,
|
|
130
|
+
...result.generations[0].message.response_metadata,
|
|
131
|
+
};
|
|
122
132
|
}
|
|
123
133
|
generations[i] = result.generations;
|
|
124
134
|
llmOutputs[i] = result.llmOutput;
|
|
@@ -365,9 +375,3 @@ export class SimpleChatModel extends BaseChatModel {
|
|
|
365
375
|
};
|
|
366
376
|
}
|
|
367
377
|
}
|
|
368
|
-
function _combineGenerationInfoAndMetadata(generation) {
|
|
369
|
-
return {
|
|
370
|
-
...generation.generationInfo,
|
|
371
|
-
...generation.message.response_metadata,
|
|
372
|
-
};
|
|
373
|
-
}
|
|
@@ -39,15 +39,15 @@ class BaseLLMOutputParser extends index_js_1.Runnable {
|
|
|
39
39
|
*/
|
|
40
40
|
async invoke(input, options) {
|
|
41
41
|
if (typeof input === "string") {
|
|
42
|
-
return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
|
|
42
|
+
return this._callWithConfig(async (input, options) => this.parseResult([{ text: input }], options?.callbacks), input, { ...options, runType: "parser" });
|
|
43
43
|
}
|
|
44
44
|
else {
|
|
45
|
-
return this._callWithConfig(async (input) => this.parseResult([
|
|
45
|
+
return this._callWithConfig(async (input, options) => this.parseResult([
|
|
46
46
|
{
|
|
47
47
|
message: input,
|
|
48
48
|
text: this._baseMessageToString(input),
|
|
49
49
|
},
|
|
50
|
-
]), input, { ...options, runType: "parser" });
|
|
50
|
+
], options?.callbacks), input, { ...options, runType: "parser" });
|
|
51
51
|
}
|
|
52
52
|
}
|
|
53
53
|
}
|
|
@@ -36,15 +36,15 @@ export class BaseLLMOutputParser extends Runnable {
|
|
|
36
36
|
*/
|
|
37
37
|
async invoke(input, options) {
|
|
38
38
|
if (typeof input === "string") {
|
|
39
|
-
return this._callWithConfig(async (input) => this.parseResult([{ text: input }]), input, { ...options, runType: "parser" });
|
|
39
|
+
return this._callWithConfig(async (input, options) => this.parseResult([{ text: input }], options?.callbacks), input, { ...options, runType: "parser" });
|
|
40
40
|
}
|
|
41
41
|
else {
|
|
42
|
-
return this._callWithConfig(async (input) => this.parseResult([
|
|
42
|
+
return this._callWithConfig(async (input, options) => this.parseResult([
|
|
43
43
|
{
|
|
44
44
|
message: input,
|
|
45
45
|
text: this._baseMessageToString(input),
|
|
46
46
|
},
|
|
47
|
-
]), input, { ...options, runType: "parser" });
|
|
47
|
+
], options?.callbacks), input, { ...options, runType: "parser" });
|
|
48
48
|
}
|
|
49
49
|
}
|
|
50
50
|
}
|
package/dist/prompts/chat.cjs
CHANGED
|
@@ -626,8 +626,7 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
|
|
|
626
626
|
* @param promptMessages Messages to be passed to the chat model
|
|
627
627
|
* @returns A new ChatPromptTemplate
|
|
628
628
|
*/
|
|
629
|
-
|
|
630
|
-
static fromMessages(promptMessages) {
|
|
629
|
+
static fromMessages(promptMessages, extra) {
|
|
631
630
|
const flattenedMessages = promptMessages.reduce((acc, promptMessage) => acc.concat(
|
|
632
631
|
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
633
632
|
promptMessage instanceof ChatPromptTemplate
|
|
@@ -650,7 +649,8 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
|
|
|
650
649
|
inputVariables.add(inputVariable);
|
|
651
650
|
}
|
|
652
651
|
}
|
|
653
|
-
return new
|
|
652
|
+
return new this({
|
|
653
|
+
...extra,
|
|
654
654
|
inputVariables: [...inputVariables],
|
|
655
655
|
promptMessages: flattenedMessages,
|
|
656
656
|
partialVariables: flattenedPartialVariables,
|
package/dist/prompts/chat.d.ts
CHANGED
|
@@ -202,9 +202,7 @@ export type BaseMessagePromptTemplateLike = BaseMessagePromptTemplate | BaseMess
|
|
|
202
202
|
*/
|
|
203
203
|
export declare class ChatPromptTemplate<RunInput extends InputValues = any, PartialVariableName extends string = any> extends BaseChatPromptTemplate<RunInput, PartialVariableName> implements ChatPromptTemplateInput<RunInput, PartialVariableName> {
|
|
204
204
|
static lc_name(): string;
|
|
205
|
-
get lc_aliases():
|
|
206
|
-
promptMessages: string;
|
|
207
|
-
};
|
|
205
|
+
get lc_aliases(): Record<string, string>;
|
|
208
206
|
promptMessages: Array<BaseMessagePromptTemplate | BaseMessage>;
|
|
209
207
|
validateTemplate: boolean;
|
|
210
208
|
constructor(input: ChatPromptTemplateInput<RunInput, PartialVariableName>);
|
|
@@ -222,7 +220,7 @@ export declare class ChatPromptTemplate<RunInput extends InputValues = any, Part
|
|
|
222
220
|
* @param promptMessages Messages to be passed to the chat model
|
|
223
221
|
* @returns A new ChatPromptTemplate
|
|
224
222
|
*/
|
|
225
|
-
static fromMessages<RunInput extends InputValues = any>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[]): ChatPromptTemplate<RunInput>;
|
|
223
|
+
static fromMessages<RunInput extends InputValues = any, Extra extends ChatPromptTemplateInput<RunInput> = ChatPromptTemplateInput<RunInput>>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[], extra?: Omit<Extra, "inputVariables" | "promptMessages" | "partialVariables">): ChatPromptTemplate<RunInput>;
|
|
226
224
|
/** @deprecated Renamed to .fromMessages */
|
|
227
225
|
static fromPromptMessages<RunInput extends InputValues = any>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[]): ChatPromptTemplate<RunInput>;
|
|
228
226
|
}
|
package/dist/prompts/chat.js
CHANGED
|
@@ -615,8 +615,7 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
|
|
|
615
615
|
* @param promptMessages Messages to be passed to the chat model
|
|
616
616
|
* @returns A new ChatPromptTemplate
|
|
617
617
|
*/
|
|
618
|
-
|
|
619
|
-
static fromMessages(promptMessages) {
|
|
618
|
+
static fromMessages(promptMessages, extra) {
|
|
620
619
|
const flattenedMessages = promptMessages.reduce((acc, promptMessage) => acc.concat(
|
|
621
620
|
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
622
621
|
promptMessage instanceof ChatPromptTemplate
|
|
@@ -639,7 +638,8 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
|
|
|
639
638
|
inputVariables.add(inputVariable);
|
|
640
639
|
}
|
|
641
640
|
}
|
|
642
|
-
return new
|
|
641
|
+
return new this({
|
|
642
|
+
...extra,
|
|
643
643
|
inputVariables: [...inputVariables],
|
|
644
644
|
promptMessages: flattenedMessages,
|
|
645
645
|
partialVariables: flattenedPartialVariables,
|
package/dist/prompts/index.cjs
CHANGED
package/dist/prompts/index.d.ts
CHANGED
package/dist/prompts/index.js
CHANGED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StructuredPrompt = void 0;
|
|
4
|
+
const chat_js_1 = require("./chat.cjs");
|
|
5
|
+
class StructuredPrompt extends chat_js_1.ChatPromptTemplate {
|
|
6
|
+
get lc_aliases() {
|
|
7
|
+
return {
|
|
8
|
+
...super.lc_aliases,
|
|
9
|
+
schema: "schema_",
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
constructor(input) {
|
|
13
|
+
super(input);
|
|
14
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
15
|
+
Object.defineProperty(this, "schema", {
|
|
16
|
+
enumerable: true,
|
|
17
|
+
configurable: true,
|
|
18
|
+
writable: true,
|
|
19
|
+
value: void 0
|
|
20
|
+
});
|
|
21
|
+
this.schema = input.schema;
|
|
22
|
+
}
|
|
23
|
+
pipe(coerceable) {
|
|
24
|
+
if (typeof coerceable === "object" &&
|
|
25
|
+
"withStructuredOutput" in coerceable &&
|
|
26
|
+
typeof coerceable.withStructuredOutput === "function") {
|
|
27
|
+
return super.pipe(coerceable.withStructuredOutput(this.schema));
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
throw new Error(`Structured prompts need to be piped to a language model that supports the "withStructuredOutput()" method.`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
34
|
+
static fromMessagesAndSchema(promptMessages, schema
|
|
35
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
36
|
+
) {
|
|
37
|
+
return StructuredPrompt.fromMessages(promptMessages, { schema });
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
exports.StructuredPrompt = StructuredPrompt;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { ChatPromptValueInterface } from "../prompt_values.js";
|
|
2
|
+
import { RunnableLike, Runnable } from "../runnables/base.js";
|
|
3
|
+
import { RunnableConfig } from "../runnables/config.js";
|
|
4
|
+
import { InputValues } from "../utils/types.js";
|
|
5
|
+
import { BaseMessagePromptTemplateLike, ChatPromptTemplate, ChatPromptTemplateInput } from "./chat.js";
|
|
6
|
+
/**
|
|
7
|
+
* Interface for the input of a ChatPromptTemplate.
|
|
8
|
+
*/
|
|
9
|
+
export interface StructuredPromptInput<RunInput extends InputValues = any, PartialVariableName extends string = any> extends ChatPromptTemplateInput<RunInput, PartialVariableName> {
|
|
10
|
+
schema: Record<string, any>;
|
|
11
|
+
}
|
|
12
|
+
export declare class StructuredPrompt<RunInput extends InputValues = any, PartialVariableName extends string = any> extends ChatPromptTemplate<RunInput, PartialVariableName> implements StructuredPromptInput<RunInput, PartialVariableName> {
|
|
13
|
+
schema: Record<string, any>;
|
|
14
|
+
get lc_aliases(): Record<string, string>;
|
|
15
|
+
constructor(input: StructuredPromptInput<RunInput, PartialVariableName>);
|
|
16
|
+
pipe<NewRunOutput>(coerceable: RunnableLike<ChatPromptValueInterface, NewRunOutput>): Runnable<RunInput, Exclude<NewRunOutput, Error>, RunnableConfig>;
|
|
17
|
+
static fromMessagesAndSchema<RunInput extends InputValues = any>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[], schema: StructuredPromptInput["schema"]): ChatPromptTemplate<RunInput, any>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { ChatPromptTemplate, } from "./chat.js";
|
|
2
|
+
export class StructuredPrompt extends ChatPromptTemplate {
|
|
3
|
+
get lc_aliases() {
|
|
4
|
+
return {
|
|
5
|
+
...super.lc_aliases,
|
|
6
|
+
schema: "schema_",
|
|
7
|
+
};
|
|
8
|
+
}
|
|
9
|
+
constructor(input) {
|
|
10
|
+
super(input);
|
|
11
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
12
|
+
Object.defineProperty(this, "schema", {
|
|
13
|
+
enumerable: true,
|
|
14
|
+
configurable: true,
|
|
15
|
+
writable: true,
|
|
16
|
+
value: void 0
|
|
17
|
+
});
|
|
18
|
+
this.schema = input.schema;
|
|
19
|
+
}
|
|
20
|
+
pipe(coerceable) {
|
|
21
|
+
if (typeof coerceable === "object" &&
|
|
22
|
+
"withStructuredOutput" in coerceable &&
|
|
23
|
+
typeof coerceable.withStructuredOutput === "function") {
|
|
24
|
+
return super.pipe(coerceable.withStructuredOutput(this.schema));
|
|
25
|
+
}
|
|
26
|
+
else {
|
|
27
|
+
throw new Error(`Structured prompts need to be piped to a language model that supports the "withStructuredOutput()" method.`);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
31
|
+
static fromMessagesAndSchema(promptMessages, schema
|
|
32
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
33
|
+
) {
|
|
34
|
+
return StructuredPrompt.fromMessages(promptMessages, { schema });
|
|
35
|
+
}
|
|
36
|
+
}
|