@langchain/core 0.1.47 → 0.1.48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -80,8 +80,10 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
80
80
  let generationChunk;
81
81
  try {
82
82
  for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
83
- chunk.message.response_metadata =
84
- _combineGenerationInfoAndMetadata(chunk);
83
+ chunk.message.response_metadata = {
84
+ ...chunk.generationInfo,
85
+ ...chunk.message.response_metadata,
86
+ };
85
87
  yield chunk.message;
86
88
  if (!generationChunk) {
87
89
  generationChunk = chunk;
@@ -121,8 +123,16 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
121
123
  if (pResult.status === "fulfilled") {
122
124
  const result = pResult.value;
123
125
  for (const generation of result.generations) {
124
- generation.message.response_metadata =
125
- _combineGenerationInfoAndMetadata(generation);
126
+ generation.message.response_metadata = {
127
+ ...generation.generationInfo,
128
+ ...generation.message.response_metadata,
129
+ };
130
+ }
131
+ if (result.generations.length === 1) {
132
+ result.generations[0].message.response_metadata = {
133
+ ...result.llmOutput,
134
+ ...result.generations[0].message.response_metadata,
135
+ };
126
136
  }
127
137
  generations[i] = result.generations;
128
138
  llmOutputs[i] = result.llmOutput;
@@ -371,9 +381,3 @@ class SimpleChatModel extends BaseChatModel {
371
381
  }
372
382
  }
373
383
  exports.SimpleChatModel = SimpleChatModel;
374
- function _combineGenerationInfoAndMetadata(generation) {
375
- return {
376
- ...generation.generationInfo,
377
- ...generation.message.response_metadata,
378
- };
379
- }
@@ -76,8 +76,10 @@ export class BaseChatModel extends BaseLanguageModel {
76
76
  let generationChunk;
77
77
  try {
78
78
  for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
79
- chunk.message.response_metadata =
80
- _combineGenerationInfoAndMetadata(chunk);
79
+ chunk.message.response_metadata = {
80
+ ...chunk.generationInfo,
81
+ ...chunk.message.response_metadata,
82
+ };
81
83
  yield chunk.message;
82
84
  if (!generationChunk) {
83
85
  generationChunk = chunk;
@@ -117,8 +119,16 @@ export class BaseChatModel extends BaseLanguageModel {
117
119
  if (pResult.status === "fulfilled") {
118
120
  const result = pResult.value;
119
121
  for (const generation of result.generations) {
120
- generation.message.response_metadata =
121
- _combineGenerationInfoAndMetadata(generation);
122
+ generation.message.response_metadata = {
123
+ ...generation.generationInfo,
124
+ ...generation.message.response_metadata,
125
+ };
126
+ }
127
+ if (result.generations.length === 1) {
128
+ result.generations[0].message.response_metadata = {
129
+ ...result.llmOutput,
130
+ ...result.generations[0].message.response_metadata,
131
+ };
122
132
  }
123
133
  generations[i] = result.generations;
124
134
  llmOutputs[i] = result.llmOutput;
@@ -365,9 +375,3 @@ export class SimpleChatModel extends BaseChatModel {
365
375
  };
366
376
  }
367
377
  }
368
- function _combineGenerationInfoAndMetadata(generation) {
369
- return {
370
- ...generation.generationInfo,
371
- ...generation.message.response_metadata,
372
- };
373
- }
@@ -626,8 +626,7 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
626
626
  * @param promptMessages Messages to be passed to the chat model
627
627
  * @returns A new ChatPromptTemplate
628
628
  */
629
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
630
- static fromMessages(promptMessages) {
629
+ static fromMessages(promptMessages, extra) {
631
630
  const flattenedMessages = promptMessages.reduce((acc, promptMessage) => acc.concat(
632
631
  // eslint-disable-next-line no-instanceof/no-instanceof
633
632
  promptMessage instanceof ChatPromptTemplate
@@ -650,7 +649,8 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
650
649
  inputVariables.add(inputVariable);
651
650
  }
652
651
  }
653
- return new ChatPromptTemplate({
652
+ return new this({
653
+ ...extra,
654
654
  inputVariables: [...inputVariables],
655
655
  promptMessages: flattenedMessages,
656
656
  partialVariables: flattenedPartialVariables,
@@ -202,9 +202,7 @@ export type BaseMessagePromptTemplateLike = BaseMessagePromptTemplate | BaseMess
202
202
  */
203
203
  export declare class ChatPromptTemplate<RunInput extends InputValues = any, PartialVariableName extends string = any> extends BaseChatPromptTemplate<RunInput, PartialVariableName> implements ChatPromptTemplateInput<RunInput, PartialVariableName> {
204
204
  static lc_name(): string;
205
- get lc_aliases(): {
206
- promptMessages: string;
207
- };
205
+ get lc_aliases(): Record<string, string>;
208
206
  promptMessages: Array<BaseMessagePromptTemplate | BaseMessage>;
209
207
  validateTemplate: boolean;
210
208
  constructor(input: ChatPromptTemplateInput<RunInput, PartialVariableName>);
@@ -222,7 +220,7 @@ export declare class ChatPromptTemplate<RunInput extends InputValues = any, Part
222
220
  * @param promptMessages Messages to be passed to the chat model
223
221
  * @returns A new ChatPromptTemplate
224
222
  */
225
- static fromMessages<RunInput extends InputValues = any>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[]): ChatPromptTemplate<RunInput>;
223
+ static fromMessages<RunInput extends InputValues = any, Extra extends ChatPromptTemplateInput<RunInput> = ChatPromptTemplateInput<RunInput>>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[], extra?: Omit<Extra, "inputVariables" | "promptMessages" | "partialVariables">): ChatPromptTemplate<RunInput>;
226
224
  /** @deprecated Renamed to .fromMessages */
227
225
  static fromPromptMessages<RunInput extends InputValues = any>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[]): ChatPromptTemplate<RunInput>;
228
226
  }
@@ -615,8 +615,7 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
615
615
  * @param promptMessages Messages to be passed to the chat model
616
616
  * @returns A new ChatPromptTemplate
617
617
  */
618
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
619
- static fromMessages(promptMessages) {
618
+ static fromMessages(promptMessages, extra) {
620
619
  const flattenedMessages = promptMessages.reduce((acc, promptMessage) => acc.concat(
621
620
  // eslint-disable-next-line no-instanceof/no-instanceof
622
621
  promptMessage instanceof ChatPromptTemplate
@@ -639,7 +638,8 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
639
638
  inputVariables.add(inputVariable);
640
639
  }
641
640
  }
642
- return new ChatPromptTemplate({
641
+ return new this({
642
+ ...extra,
643
643
  inputVariables: [...inputVariables],
644
644
  promptMessages: flattenedMessages,
645
645
  partialVariables: flattenedPartialVariables,
@@ -23,3 +23,4 @@ __exportStar(require("./serde.cjs"), exports);
23
23
  __exportStar(require("./string.cjs"), exports);
24
24
  __exportStar(require("./template.cjs"), exports);
25
25
  __exportStar(require("./image.cjs"), exports);
26
+ __exportStar(require("./structured.cjs"), exports);
@@ -7,3 +7,4 @@ export * from "./serde.js";
7
7
  export * from "./string.js";
8
8
  export * from "./template.js";
9
9
  export * from "./image.js";
10
+ export * from "./structured.js";
@@ -7,3 +7,4 @@ export * from "./serde.js";
7
7
  export * from "./string.js";
8
8
  export * from "./template.js";
9
9
  export * from "./image.js";
10
+ export * from "./structured.js";
@@ -0,0 +1,40 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.StructuredPrompt = void 0;
4
+ const chat_js_1 = require("./chat.cjs");
5
+ class StructuredPrompt extends chat_js_1.ChatPromptTemplate {
6
+ get lc_aliases() {
7
+ return {
8
+ ...super.lc_aliases,
9
+ schema: "schema_",
10
+ };
11
+ }
12
+ constructor(input) {
13
+ super(input);
14
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
15
+ Object.defineProperty(this, "schema", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: void 0
20
+ });
21
+ this.schema = input.schema;
22
+ }
23
+ pipe(coerceable) {
24
+ if (typeof coerceable === "object" &&
25
+ "withStructuredOutput" in coerceable &&
26
+ typeof coerceable.withStructuredOutput === "function") {
27
+ return super.pipe(coerceable.withStructuredOutput(this.schema));
28
+ }
29
+ else {
30
+ throw new Error(`Structured prompts need to be piped to a language model that supports the "withStructuredOutput()" method.`);
31
+ }
32
+ }
33
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
34
+ static fromMessagesAndSchema(promptMessages, schema
35
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
36
+ ) {
37
+ return StructuredPrompt.fromMessages(promptMessages, { schema });
38
+ }
39
+ }
40
+ exports.StructuredPrompt = StructuredPrompt;
@@ -0,0 +1,18 @@
1
+ import { ChatPromptValueInterface } from "../prompt_values.js";
2
+ import { RunnableLike, Runnable } from "../runnables/base.js";
3
+ import { RunnableConfig } from "../runnables/config.js";
4
+ import { InputValues } from "../utils/types.js";
5
+ import { BaseMessagePromptTemplateLike, ChatPromptTemplate, ChatPromptTemplateInput } from "./chat.js";
6
+ /**
7
+ * Interface for the input of a ChatPromptTemplate.
8
+ */
9
+ export interface StructuredPromptInput<RunInput extends InputValues = any, PartialVariableName extends string = any> extends ChatPromptTemplateInput<RunInput, PartialVariableName> {
10
+ schema: Record<string, any>;
11
+ }
12
+ export declare class StructuredPrompt<RunInput extends InputValues = any, PartialVariableName extends string = any> extends ChatPromptTemplate<RunInput, PartialVariableName> implements StructuredPromptInput<RunInput, PartialVariableName> {
13
+ schema: Record<string, any>;
14
+ get lc_aliases(): Record<string, string>;
15
+ constructor(input: StructuredPromptInput<RunInput, PartialVariableName>);
16
+ pipe<NewRunOutput>(coerceable: RunnableLike<ChatPromptValueInterface, NewRunOutput>): Runnable<RunInput, Exclude<NewRunOutput, Error>, RunnableConfig>;
17
+ static fromMessagesAndSchema<RunInput extends InputValues = any>(promptMessages: (ChatPromptTemplate<InputValues, string> | BaseMessagePromptTemplateLike)[], schema: StructuredPromptInput["schema"]): ChatPromptTemplate<RunInput, any>;
18
+ }
@@ -0,0 +1,36 @@
1
+ import { ChatPromptTemplate, } from "./chat.js";
2
+ export class StructuredPrompt extends ChatPromptTemplate {
3
+ get lc_aliases() {
4
+ return {
5
+ ...super.lc_aliases,
6
+ schema: "schema_",
7
+ };
8
+ }
9
+ constructor(input) {
10
+ super(input);
11
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
12
+ Object.defineProperty(this, "schema", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: void 0
17
+ });
18
+ this.schema = input.schema;
19
+ }
20
+ pipe(coerceable) {
21
+ if (typeof coerceable === "object" &&
22
+ "withStructuredOutput" in coerceable &&
23
+ typeof coerceable.withStructuredOutput === "function") {
24
+ return super.pipe(coerceable.withStructuredOutput(this.schema));
25
+ }
26
+ else {
27
+ throw new Error(`Structured prompts need to be piped to a language model that supports the "withStructuredOutput()" method.`);
28
+ }
29
+ }
30
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
31
+ static fromMessagesAndSchema(promptMessages, schema
32
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
33
+ ) {
34
+ return StructuredPrompt.fromMessages(promptMessages, { schema });
35
+ }
36
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/core",
3
- "version": "0.1.47",
3
+ "version": "0.1.48",
4
4
  "description": "Core LangChain.js abstractions and schemas",
5
5
  "type": "module",
6
6
  "engines": {