@langchain/core 0.2.4 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # 🦜🍎️ @langchain/core
2
2
 
3
- [![CI](https://github.com/langchain-ai/langchainjs/actions/workflows/ci.yml/badge.svg)](https://github.com/langchain-ai/langchainjs/actions/workflows/ci.yml) ![npm](https://img.shields.io/npm/dw/@langchain/core) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) [![](https://dcbadge.vercel.app/api/server/6adMQxSpJS?compact=true&style=flat)](https://discord.gg/6adMQxSpJS)
3
+ [![CI](https://github.com/langchain-ai/langchainjs/actions/workflows/ci.yml/badge.svg)](https://github.com/langchain-ai/langchainjs/actions/workflows/ci.yml) ![npm](https://img.shields.io/npm/dm/@langchain/core) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai) [![](https://dcbadge.vercel.app/api/server/6adMQxSpJS?compact=true&style=flat)](https://discord.gg/6adMQxSpJS)
4
4
 
5
5
  `@langchain/core` contains the core abstractions and schemas of LangChain.js, including base classes for language models,
6
6
  chat models, vectorstores, retrievers, and runnables.
@@ -20,7 +20,7 @@ function createQueue() {
20
20
  }
21
21
  /**
22
22
  * Consume a promise, either adding it to the queue or waiting for it to resolve
23
- * @param promise Promise to consume
23
+ * @param promiseFn Promise to consume
24
24
  * @param wait Whether to wait for the promise to resolve or resolve immediately
25
25
  */
26
26
  async function consumeCallback(promiseFn, wait) {
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * Consume a promise, either adding it to the queue or waiting for it to resolve
3
- * @param promise Promise to consume
3
+ * @param promiseFn Promise to consume
4
4
  * @param wait Whether to wait for the promise to resolve or resolve immediately
5
5
  */
6
6
  export declare function consumeCallback<T>(promiseFn: () => Promise<T> | T | void, wait: boolean): Promise<void>;
@@ -14,7 +14,7 @@ function createQueue() {
14
14
  }
15
15
  /**
16
16
  * Consume a promise, either adding it to the queue or waiting for it to resolve
17
- * @param promise Promise to consume
17
+ * @param promiseFn Promise to consume
18
18
  * @param wait Whether to wait for the promise to resolve or resolve immediately
19
19
  */
20
20
  export async function consumeCallback(promiseFn, wait) {
@@ -78,7 +78,7 @@ export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCal
78
78
  invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<OutputMessageType>;
79
79
  _streamResponseChunks(_messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
80
80
  _streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<OutputMessageType>;
81
- protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
81
+ getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
82
82
  /** @ignore */
83
83
  _generateUncached(messages: BaseMessageLike[][], parsedOptions: this["ParsedCallOptions"], handledOptions: RunnableConfig): Promise<LLMResult>;
84
84
  _generateCached({ messages, cache, llmStringKey, parsedOptions, handledOptions, }: ChatModelGenerateCachedParameters<typeof this>): Promise<LLMResult & {
@@ -76,11 +76,21 @@ class AIMessage extends base_js_1.BaseMessage {
76
76
  writable: true,
77
77
  value: []
78
78
  });
79
+ /**
80
+ * If provided, token usage information associated with the message.
81
+ */
82
+ Object.defineProperty(this, "usage_metadata", {
83
+ enumerable: true,
84
+ configurable: true,
85
+ writable: true,
86
+ value: void 0
87
+ });
79
88
  if (typeof initParams !== "string") {
80
89
  this.tool_calls = initParams.tool_calls ?? this.tool_calls;
81
90
  this.invalid_tool_calls =
82
91
  initParams.invalid_tool_calls ?? this.invalid_tool_calls;
83
92
  }
93
+ this.usage_metadata = initParams.usage_metadata;
84
94
  }
85
95
  static lc_name() {
86
96
  return "AIMessage";
@@ -172,11 +182,21 @@ class AIMessageChunk extends base_js_1.BaseMessageChunk {
172
182
  writable: true,
173
183
  value: []
174
184
  });
185
+ /**
186
+ * If provided, token usage information associated with the message.
187
+ */
188
+ Object.defineProperty(this, "usage_metadata", {
189
+ enumerable: true,
190
+ configurable: true,
191
+ writable: true,
192
+ value: void 0
193
+ });
175
194
  this.tool_call_chunks =
176
- initParams?.tool_call_chunks ?? this.tool_call_chunks;
177
- this.tool_calls = initParams?.tool_calls ?? this.tool_calls;
195
+ initParams.tool_call_chunks ?? this.tool_call_chunks;
196
+ this.tool_calls = initParams.tool_calls ?? this.tool_calls;
178
197
  this.invalid_tool_calls =
179
- initParams?.invalid_tool_calls ?? this.invalid_tool_calls;
198
+ initParams.invalid_tool_calls ?? this.invalid_tool_calls;
199
+ this.usage_metadata = initParams.usage_metadata;
180
200
  }
181
201
  get lc_aliases() {
182
202
  // exclude snake case conversion to pascal case
@@ -207,6 +227,25 @@ class AIMessageChunk extends base_js_1.BaseMessageChunk {
207
227
  combinedFields.tool_call_chunks = rawToolCalls;
208
228
  }
209
229
  }
230
+ if (this.usage_metadata !== undefined ||
231
+ chunk.usage_metadata !== undefined) {
232
+ const left = this.usage_metadata ?? {
233
+ input_tokens: 0,
234
+ output_tokens: 0,
235
+ total_tokens: 0,
236
+ };
237
+ const right = chunk.usage_metadata ?? {
238
+ input_tokens: 0,
239
+ output_tokens: 0,
240
+ total_tokens: 0,
241
+ };
242
+ const usage_metadata = {
243
+ input_tokens: left.input_tokens + right.input_tokens,
244
+ output_tokens: left.output_tokens + right.output_tokens,
245
+ total_tokens: left.total_tokens + right.total_tokens,
246
+ };
247
+ combinedFields.usage_metadata = usage_metadata;
248
+ }
210
249
  return new AIMessageChunk(combinedFields);
211
250
  }
212
251
  }
@@ -3,6 +3,24 @@ import { InvalidToolCall, ToolCall, ToolCallChunk } from "./tool.js";
3
3
  export type AIMessageFields = BaseMessageFields & {
4
4
  tool_calls?: ToolCall[];
5
5
  invalid_tool_calls?: InvalidToolCall[];
6
+ usage_metadata?: UsageMetadata;
7
+ };
8
+ /**
9
+ * Usage metadata for a message, such as token counts.
10
+ */
11
+ export type UsageMetadata = {
12
+ /**
13
+ * The count of input (or prompt) tokens.
14
+ */
15
+ input_tokens: number;
16
+ /**
17
+ * The count of output (or completion) tokens
18
+ */
19
+ output_tokens: number;
20
+ /**
21
+ * The total token count
22
+ */
23
+ total_tokens: number;
6
24
  };
7
25
  /**
8
26
  * Represents an AI message in a conversation.
@@ -10,6 +28,10 @@ export type AIMessageFields = BaseMessageFields & {
10
28
  export declare class AIMessage extends BaseMessage {
11
29
  tool_calls?: ToolCall[];
12
30
  invalid_tool_calls?: InvalidToolCall[];
31
+ /**
32
+ * If provided, token usage information associated with the message.
33
+ */
34
+ usage_metadata?: UsageMetadata;
13
35
  get lc_aliases(): Record<string, string>;
14
36
  constructor(fields: string | AIMessageFields,
15
37
  /** @deprecated */
@@ -29,6 +51,10 @@ export declare class AIMessageChunk extends BaseMessageChunk {
29
51
  tool_calls?: ToolCall[];
30
52
  invalid_tool_calls?: InvalidToolCall[];
31
53
  tool_call_chunks?: ToolCallChunk[];
54
+ /**
55
+ * If provided, token usage information associated with the message.
56
+ */
57
+ usage_metadata?: UsageMetadata;
32
58
  constructor(fields: string | AIMessageChunkFields);
33
59
  get lc_aliases(): Record<string, string>;
34
60
  static lc_name(): string;
@@ -73,11 +73,21 @@ export class AIMessage extends BaseMessage {
73
73
  writable: true,
74
74
  value: []
75
75
  });
76
+ /**
77
+ * If provided, token usage information associated with the message.
78
+ */
79
+ Object.defineProperty(this, "usage_metadata", {
80
+ enumerable: true,
81
+ configurable: true,
82
+ writable: true,
83
+ value: void 0
84
+ });
76
85
  if (typeof initParams !== "string") {
77
86
  this.tool_calls = initParams.tool_calls ?? this.tool_calls;
78
87
  this.invalid_tool_calls =
79
88
  initParams.invalid_tool_calls ?? this.invalid_tool_calls;
80
89
  }
90
+ this.usage_metadata = initParams.usage_metadata;
81
91
  }
82
92
  static lc_name() {
83
93
  return "AIMessage";
@@ -167,11 +177,21 @@ export class AIMessageChunk extends BaseMessageChunk {
167
177
  writable: true,
168
178
  value: []
169
179
  });
180
+ /**
181
+ * If provided, token usage information associated with the message.
182
+ */
183
+ Object.defineProperty(this, "usage_metadata", {
184
+ enumerable: true,
185
+ configurable: true,
186
+ writable: true,
187
+ value: void 0
188
+ });
170
189
  this.tool_call_chunks =
171
- initParams?.tool_call_chunks ?? this.tool_call_chunks;
172
- this.tool_calls = initParams?.tool_calls ?? this.tool_calls;
190
+ initParams.tool_call_chunks ?? this.tool_call_chunks;
191
+ this.tool_calls = initParams.tool_calls ?? this.tool_calls;
173
192
  this.invalid_tool_calls =
174
- initParams?.invalid_tool_calls ?? this.invalid_tool_calls;
193
+ initParams.invalid_tool_calls ?? this.invalid_tool_calls;
194
+ this.usage_metadata = initParams.usage_metadata;
175
195
  }
176
196
  get lc_aliases() {
177
197
  // exclude snake case conversion to pascal case
@@ -202,6 +222,25 @@ export class AIMessageChunk extends BaseMessageChunk {
202
222
  combinedFields.tool_call_chunks = rawToolCalls;
203
223
  }
204
224
  }
225
+ if (this.usage_metadata !== undefined ||
226
+ chunk.usage_metadata !== undefined) {
227
+ const left = this.usage_metadata ?? {
228
+ input_tokens: 0,
229
+ output_tokens: 0,
230
+ total_tokens: 0,
231
+ };
232
+ const right = chunk.usage_metadata ?? {
233
+ input_tokens: 0,
234
+ output_tokens: 0,
235
+ total_tokens: 0,
236
+ };
237
+ const usage_metadata = {
238
+ input_tokens: left.input_tokens + right.input_tokens,
239
+ output_tokens: left.output_tokens + right.output_tokens,
240
+ total_tokens: left.total_tokens + right.total_tokens,
241
+ };
242
+ combinedFields.usage_metadata = usage_metadata;
243
+ }
205
244
  return new AIMessageChunk(combinedFields);
206
245
  }
207
246
  }
@@ -301,14 +301,20 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
301
301
  else if (typeof item.text === "string") {
302
302
  text = item.text ?? "";
303
303
  }
304
- prompt.push(prompt_js_1.PromptTemplate.fromTemplate(text));
304
+ prompt.push(prompt_js_1.PromptTemplate.fromTemplate(text, additionalOptions));
305
305
  }
306
306
  else if (typeof item === "object" && "image_url" in item) {
307
307
  let imgTemplate = item.image_url ?? "";
308
308
  let imgTemplateObject;
309
309
  let inputVariables = [];
310
310
  if (typeof imgTemplate === "string") {
311
- const parsedTemplate = (0, template_js_1.parseFString)(imgTemplate);
311
+ let parsedTemplate;
312
+ if (additionalOptions?.templateFormat === "mustache") {
313
+ parsedTemplate = (0, template_js_1.parseMustache)(imgTemplate);
314
+ }
315
+ else {
316
+ parsedTemplate = (0, template_js_1.parseFString)(imgTemplate);
317
+ }
312
318
  const variables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
313
319
  if ((variables?.length ?? 0) > 0) {
314
320
  if (variables.length > 1) {
@@ -327,7 +333,13 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
327
333
  }
328
334
  else if (typeof imgTemplate === "object") {
329
335
  if ("url" in imgTemplate) {
330
- const parsedTemplate = (0, template_js_1.parseFString)(imgTemplate.url);
336
+ let parsedTemplate;
337
+ if (additionalOptions?.templateFormat === "mustache") {
338
+ parsedTemplate = (0, template_js_1.parseMustache)(imgTemplate.url);
339
+ }
340
+ else {
341
+ parsedTemplate = (0, template_js_1.parseFString)(imgTemplate.url);
342
+ }
331
343
  inputVariables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
332
344
  }
333
345
  else {
@@ -600,7 +612,9 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
600
612
  else {
601
613
  imageUrl = item.image_url.url;
602
614
  }
603
- const promptTemplatePlaceholder = prompt_js_1.PromptTemplate.fromTemplate(imageUrl);
615
+ const promptTemplatePlaceholder = prompt_js_1.PromptTemplate.fromTemplate(imageUrl, {
616
+ templateFormat: this.templateFormat,
617
+ });
604
618
  const formattedUrl = await promptTemplatePlaceholder.format(inputValues);
605
619
  if (typeof item.image_url !== "string" && "url" in item.image_url) {
606
620
  // eslint-disable-next-line no-param-reassign
@@ -7,7 +7,7 @@ import { BaseStringPromptTemplate } from "./string.js";
7
7
  import { BasePromptTemplate, } from "./base.js";
8
8
  import { PromptTemplate, } from "./prompt.js";
9
9
  import { ImagePromptTemplate } from "./image.js";
10
- import { parseFString } from "./template.js";
10
+ import { parseFString, parseMustache, } from "./template.js";
11
11
  /**
12
12
  * Abstract class that serves as a base for creating message prompt
13
13
  * templates. It defines how to format messages for different roles in a
@@ -293,14 +293,20 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
293
293
  else if (typeof item.text === "string") {
294
294
  text = item.text ?? "";
295
295
  }
296
- prompt.push(PromptTemplate.fromTemplate(text));
296
+ prompt.push(PromptTemplate.fromTemplate(text, additionalOptions));
297
297
  }
298
298
  else if (typeof item === "object" && "image_url" in item) {
299
299
  let imgTemplate = item.image_url ?? "";
300
300
  let imgTemplateObject;
301
301
  let inputVariables = [];
302
302
  if (typeof imgTemplate === "string") {
303
- const parsedTemplate = parseFString(imgTemplate);
303
+ let parsedTemplate;
304
+ if (additionalOptions?.templateFormat === "mustache") {
305
+ parsedTemplate = parseMustache(imgTemplate);
306
+ }
307
+ else {
308
+ parsedTemplate = parseFString(imgTemplate);
309
+ }
304
310
  const variables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
305
311
  if ((variables?.length ?? 0) > 0) {
306
312
  if (variables.length > 1) {
@@ -319,7 +325,13 @@ class _StringImageMessagePromptTemplate extends BaseMessagePromptTemplate {
319
325
  }
320
326
  else if (typeof imgTemplate === "object") {
321
327
  if ("url" in imgTemplate) {
322
- const parsedTemplate = parseFString(imgTemplate.url);
328
+ let parsedTemplate;
329
+ if (additionalOptions?.templateFormat === "mustache") {
330
+ parsedTemplate = parseMustache(imgTemplate.url);
331
+ }
332
+ else {
333
+ parsedTemplate = parseFString(imgTemplate.url);
334
+ }
323
335
  inputVariables = parsedTemplate.flatMap((item) => item.type === "variable" ? [item.name] : []);
324
336
  }
325
337
  else {
@@ -589,7 +601,9 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
589
601
  else {
590
602
  imageUrl = item.image_url.url;
591
603
  }
592
- const promptTemplatePlaceholder = PromptTemplate.fromTemplate(imageUrl);
604
+ const promptTemplatePlaceholder = PromptTemplate.fromTemplate(imageUrl, {
605
+ templateFormat: this.templateFormat,
606
+ });
593
607
  const formattedUrl = await promptTemplatePlaceholder.format(inputValues);
594
608
  if (typeof item.image_url !== "string" && "url" in item.image_url) {
595
609
  // eslint-disable-next-line no-param-reassign
@@ -2,7 +2,7 @@ import { test, expect } from "@jest/globals";
2
2
  import { AIMessage } from "../../messages/ai.js";
3
3
  import { HumanMessage } from "../../messages/human.js";
4
4
  import { SystemMessage } from "../../messages/system.js";
5
- import { ChatPromptTemplate } from "../chat.js";
5
+ import { ChatPromptTemplate, HumanMessagePromptTemplate } from "../chat.js";
6
6
  test("Test creating a chat prompt template from role string messages", async () => {
7
7
  const template = ChatPromptTemplate.fromMessages([
8
8
  ["system", "You are a helpful AI bot. Your name is {{name}}."],
@@ -59,3 +59,43 @@ test("Ignores f-string inputs input variables with repeats.", async () => {
59
59
  new HumanMessage("This {bar} is a {foo} test {foo}."),
60
60
  ]);
61
61
  });
62
+ test("Mustache template with image and chat prompts inside one template (fromMessages)", async () => {
63
+ const template = ChatPromptTemplate.fromMessages([
64
+ [
65
+ "human",
66
+ [
67
+ {
68
+ type: "image_url",
69
+ image_url: "{{image_url}}",
70
+ },
71
+ {
72
+ type: "text",
73
+ text: "{{other_var}}",
74
+ },
75
+ ],
76
+ ],
77
+ ["human", "hello {{name}}"],
78
+ ], {
79
+ templateFormat: "mustache",
80
+ });
81
+ expect(template.inputVariables.sort()).toEqual([
82
+ "image_url",
83
+ "name",
84
+ "other_var",
85
+ ]);
86
+ });
87
+ test("Mustache image template with nested URL and chat prompts HumanMessagePromptTemplate.fromTemplate", async () => {
88
+ const template = HumanMessagePromptTemplate.fromTemplate([
89
+ {
90
+ text: "{{name}}",
91
+ },
92
+ {
93
+ image_url: {
94
+ url: "{{image_url}}",
95
+ },
96
+ },
97
+ ], {
98
+ templateFormat: "mustache",
99
+ });
100
+ expect(template.inputVariables.sort()).toEqual(["image_url", "name"]);
101
+ });
@@ -481,7 +481,7 @@ class Runnable extends serializable_js_1.Serializable {
481
481
  async *_streamEventsV2(input, options, streamOptions) {
482
482
  const eventStreamer = new event_stream_js_1.EventStreamCallbackHandler({
483
483
  ...streamOptions,
484
- autoClose: true,
484
+ autoClose: false,
485
485
  });
486
486
  const config = (0, config_js_1.ensureConfig)(options);
487
487
  const runId = config.runId ?? (0, uuid_1.v4)();
@@ -503,11 +503,16 @@ class Runnable extends serializable_js_1.Serializable {
503
503
  // add each chunk to the output stream
504
504
  const outerThis = this;
505
505
  async function consumeRunnableStream() {
506
- const runnableStream = await outerThis.stream(input, config);
507
- const tappedStream = eventStreamer.tapOutputIterable(runId, runnableStream);
508
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
509
- for await (const _ of tappedStream) {
510
- // Just iterate so that the callback handler picks up events
506
+ try {
507
+ const runnableStream = await outerThis.stream(input, config);
508
+ const tappedStream = eventStreamer.tapOutputIterable(runId, runnableStream);
509
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
510
+ for await (const _ of tappedStream) {
511
+ // Just iterate so that the callback handler picks up events
512
+ }
513
+ }
514
+ finally {
515
+ await eventStreamer.finish();
511
516
  }
512
517
  }
513
518
  const runnableStreamConsumePromise = consumeRunnableStream();
@@ -474,7 +474,7 @@ export class Runnable extends Serializable {
474
474
  async *_streamEventsV2(input, options, streamOptions) {
475
475
  const eventStreamer = new EventStreamCallbackHandler({
476
476
  ...streamOptions,
477
- autoClose: true,
477
+ autoClose: false,
478
478
  });
479
479
  const config = ensureConfig(options);
480
480
  const runId = config.runId ?? uuidv4();
@@ -496,11 +496,16 @@ export class Runnable extends Serializable {
496
496
  // add each chunk to the output stream
497
497
  const outerThis = this;
498
498
  async function consumeRunnableStream() {
499
- const runnableStream = await outerThis.stream(input, config);
500
- const tappedStream = eventStreamer.tapOutputIterable(runId, runnableStream);
501
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
502
- for await (const _ of tappedStream) {
503
- // Just iterate so that the callback handler picks up events
499
+ try {
500
+ const runnableStream = await outerThis.stream(input, config);
501
+ const tappedStream = eventStreamer.tapOutputIterable(runId, runnableStream);
502
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
503
+ for await (const _ of tappedStream) {
504
+ // Just iterate so that the callback handler picks up events
505
+ }
506
+ }
507
+ finally {
508
+ await eventStreamer.finish();
504
509
  }
505
510
  }
506
511
  const runnableStreamConsumePromise = consumeRunnableStream();
@@ -310,7 +310,8 @@ class RemoteRunnable extends base_js_1.Runnable {
310
310
  async *_streamIterator(input, options) {
311
311
  const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
312
312
  const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(options);
313
- const runManager = await callbackManager_?.handleChainStart(this.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), undefined, undefined, undefined, undefined, options?.runName);
313
+ const runManager = await callbackManager_?.handleChainStart(this.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), config.runId, undefined, undefined, undefined, config.runName);
314
+ delete config.runId;
314
315
  let finalOutput;
315
316
  let finalOutputSupported = true;
316
317
  try {
@@ -360,7 +361,8 @@ class RemoteRunnable extends base_js_1.Runnable {
360
361
  async *streamLog(input, options, streamOptions) {
361
362
  const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
362
363
  const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(options);
363
- const runManager = await callbackManager_?.handleChainStart(this.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), undefined, undefined, undefined, undefined, options?.runName);
364
+ const runManager = await callbackManager_?.handleChainStart(this.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), config.runId, undefined, undefined, undefined, config.runName);
365
+ delete config.runId;
364
366
  // The type is in camelCase but the API only accepts snake_case.
365
367
  const camelCaseStreamOptions = {
366
368
  include_names: streamOptions?.includeNames,
@@ -411,7 +413,8 @@ class RemoteRunnable extends base_js_1.Runnable {
411
413
  const generator = async function* () {
412
414
  const [config, kwargs] = outerThis._separateRunnableConfigFromCallOptions(options);
413
415
  const callbackManager_ = await (0, config_js_1.getCallbackManagerForConfig)(options);
414
- const runManager = await callbackManager_?.handleChainStart(outerThis.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), undefined, undefined, undefined, undefined, options?.runName);
416
+ const runManager = await callbackManager_?.handleChainStart(outerThis.toJSON(), (0, base_js_1._coerceToDict)(input, "input"), config.runId, undefined, undefined, undefined, config.runName);
417
+ delete config.runId;
415
418
  // The type is in camelCase but the API only accepts snake_case.
416
419
  const camelCaseStreamOptions = {
417
420
  include_names: streamOptions?.includeNames,
@@ -307,7 +307,8 @@ export class RemoteRunnable extends Runnable {
307
307
  async *_streamIterator(input, options) {
308
308
  const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
309
309
  const callbackManager_ = await getCallbackManagerForConfig(options);
310
- const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), undefined, undefined, undefined, undefined, options?.runName);
310
+ const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, undefined, undefined, undefined, config.runName);
311
+ delete config.runId;
311
312
  let finalOutput;
312
313
  let finalOutputSupported = true;
313
314
  try {
@@ -357,7 +358,8 @@ export class RemoteRunnable extends Runnable {
357
358
  async *streamLog(input, options, streamOptions) {
358
359
  const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
359
360
  const callbackManager_ = await getCallbackManagerForConfig(options);
360
- const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), undefined, undefined, undefined, undefined, options?.runName);
361
+ const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), config.runId, undefined, undefined, undefined, config.runName);
362
+ delete config.runId;
361
363
  // The type is in camelCase but the API only accepts snake_case.
362
364
  const camelCaseStreamOptions = {
363
365
  include_names: streamOptions?.includeNames,
@@ -408,7 +410,8 @@ export class RemoteRunnable extends Runnable {
408
410
  const generator = async function* () {
409
411
  const [config, kwargs] = outerThis._separateRunnableConfigFromCallOptions(options);
410
412
  const callbackManager_ = await getCallbackManagerForConfig(options);
411
- const runManager = await callbackManager_?.handleChainStart(outerThis.toJSON(), _coerceToDict(input, "input"), undefined, undefined, undefined, undefined, options?.runName);
413
+ const runManager = await callbackManager_?.handleChainStart(outerThis.toJSON(), _coerceToDict(input, "input"), config.runId, undefined, undefined, undefined, config.runName);
414
+ delete config.runId;
412
415
  // The type is in camelCase but the API only accepts snake_case.
413
416
  const camelCaseStreamOptions = {
414
417
  include_names: streamOptions?.includeNames,
@@ -70,12 +70,6 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
70
70
  writable: true,
71
71
  value: void 0
72
72
  });
73
- Object.defineProperty(this, "rootId", {
74
- enumerable: true,
75
- configurable: true,
76
- writable: true,
77
- value: void 0
78
- });
79
73
  Object.defineProperty(this, "runInfoMap", {
80
74
  enumerable: true,
81
75
  configurable: true,
@@ -164,7 +158,10 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
164
158
  return;
165
159
  }
166
160
  const runInfo = this.runInfoMap.get(runId);
167
- // run has finished, don't issue any stream events
161
+ // Run has finished, don't issue any stream events.
162
+ // An example of this is for runnables that use the default
163
+ // implementation of .stream(), which delegates to .invoke()
164
+ // and calls .onChainEnd() before passing it to the iterator.
168
165
  if (runInfo === undefined) {
169
166
  yield firstChunk.value;
170
167
  return;
@@ -207,7 +204,7 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
207
204
  finally {
208
205
  // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
209
206
  tappedPromiseResolver();
210
- // Don't delete from the map to keep track of which runs have been tapped.
207
+ // Don't delete from the promises map to keep track of which runs have been tapped.
211
208
  }
212
209
  }
213
210
  else {
@@ -492,18 +489,11 @@ class EventStreamCallbackHandler extends base_js_1.BaseTracer {
492
489
  metadata: runInfo.metadata,
493
490
  }, runInfo);
494
491
  }
495
- async onRunCreate(run) {
496
- if (this.rootId === undefined) {
497
- this.rootId = run.id;
498
- }
499
- }
500
- async onRunUpdate(run) {
501
- if (run.id === this.rootId && this.autoClose) {
502
- const pendingPromises = [...this.tappedPromises.values()];
503
- void Promise.all(pendingPromises).finally(() => {
504
- void this.writer.close();
505
- });
506
- }
492
+ async finish() {
493
+ const pendingPromises = [...this.tappedPromises.values()];
494
+ void Promise.all(pendingPromises).finally(() => {
495
+ void this.writer.close();
496
+ });
507
497
  }
508
498
  }
509
499
  exports.EventStreamCallbackHandler = EventStreamCallbackHandler;
@@ -109,7 +109,6 @@ export declare class EventStreamCallbackHandler extends BaseTracer {
109
109
  protected excludeNames?: string[];
110
110
  protected excludeTypes?: string[];
111
111
  protected excludeTags?: string[];
112
- protected rootId?: string;
113
112
  private runInfoMap;
114
113
  private tappedPromises;
115
114
  protected transformStream: TransformStream;
@@ -134,7 +133,6 @@ export declare class EventStreamCallbackHandler extends BaseTracer {
134
133
  onToolEnd(run: Run): Promise<void>;
135
134
  onRetrieverStart(run: Run): Promise<void>;
136
135
  onRetrieverEnd(run: Run): Promise<void>;
137
- onRunCreate(run: Run): Promise<void>;
138
- onRunUpdate(run: Run): Promise<void>;
136
+ finish(): Promise<void>;
139
137
  }
140
138
  export {};
@@ -66,12 +66,6 @@ export class EventStreamCallbackHandler extends BaseTracer {
66
66
  writable: true,
67
67
  value: void 0
68
68
  });
69
- Object.defineProperty(this, "rootId", {
70
- enumerable: true,
71
- configurable: true,
72
- writable: true,
73
- value: void 0
74
- });
75
69
  Object.defineProperty(this, "runInfoMap", {
76
70
  enumerable: true,
77
71
  configurable: true,
@@ -160,7 +154,10 @@ export class EventStreamCallbackHandler extends BaseTracer {
160
154
  return;
161
155
  }
162
156
  const runInfo = this.runInfoMap.get(runId);
163
- // run has finished, don't issue any stream events
157
+ // Run has finished, don't issue any stream events.
158
+ // An example of this is for runnables that use the default
159
+ // implementation of .stream(), which delegates to .invoke()
160
+ // and calls .onChainEnd() before passing it to the iterator.
164
161
  if (runInfo === undefined) {
165
162
  yield firstChunk.value;
166
163
  return;
@@ -203,7 +200,7 @@ export class EventStreamCallbackHandler extends BaseTracer {
203
200
  finally {
204
201
  // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
205
202
  tappedPromiseResolver();
206
- // Don't delete from the map to keep track of which runs have been tapped.
203
+ // Don't delete from the promises map to keep track of which runs have been tapped.
207
204
  }
208
205
  }
209
206
  else {
@@ -488,17 +485,10 @@ export class EventStreamCallbackHandler extends BaseTracer {
488
485
  metadata: runInfo.metadata,
489
486
  }, runInfo);
490
487
  }
491
- async onRunCreate(run) {
492
- if (this.rootId === undefined) {
493
- this.rootId = run.id;
494
- }
495
- }
496
- async onRunUpdate(run) {
497
- if (run.id === this.rootId && this.autoClose) {
498
- const pendingPromises = [...this.tappedPromises.values()];
499
- void Promise.all(pendingPromises).finally(() => {
500
- void this.writer.close();
501
- });
502
- }
488
+ async finish() {
489
+ const pendingPromises = [...this.tappedPromises.values()];
490
+ void Promise.all(pendingPromises).finally(() => {
491
+ void this.writer.close();
492
+ });
503
493
  }
504
494
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/core",
3
- "version": "0.2.4",
3
+ "version": "0.2.6",
4
4
  "description": "Core LangChain.js abstractions and schemas",
5
5
  "type": "module",
6
6
  "engines": {