@langchain/core 0.2.32 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -83,13 +83,13 @@ Streaming (and streaming of intermediate steps) is needed to show the user that
83
83
  Async interfaces are nice when moving into production.
84
84
  Rather than having to write multiple implementations for all of those, LCEL allows you to write a runnable once and invoke it in many different ways.
85
85
 
86
- For more check out the [LCEL docs](https://js.langchain.com/v0.2/docs/concepts#langchain-expression-language).
86
+ For more check out the [LCEL docs](https://js.langchain.com/docs/concepts#langchain-expression-language).
87
87
 
88
88
  ![LangChain Stack](../docs/core_docs/static/svg/langchain_stack_062024.svg)
89
89
 
90
90
  ## 📕 Releases & Versioning
91
91
 
92
- `@langchain/core` is currently on version `0.1.x`.
92
+ `@langchain/core` is currently on version `0.3.x`.
93
93
 
94
94
  As `@langchain/core` contains the base abstractions and runtime for the whole LangChain ecosystem, we will communicate any breaking changes with advance notice and version bumps. The exception for this is anything in `@langchain/core/beta`. The reason for `@langchain/core/beta` is that given the rate of change of the field, being able to move quickly is still a priority, and this module is our attempt to do so.
95
95
 
@@ -109,7 +109,7 @@ Patch version increases will occur for:
109
109
  Other LangChain packages should add this package as a dependency and extend the classes within.
110
110
  For an example, see the [@langchain/anthropic](https://github.com/langchain-ai/langchainjs/tree/main/libs/langchain-anthropic) in this repo.
111
111
 
112
- Because all used packages must share the same version of core, we suggest using a tilde dependency to allow for different (backwards-compatible) patch versions:
112
+ Because all used packages must share the same version of core, packages should never directly depend on `@langchain/core`. Instead they should have core as a peer dependency and a dev dependency. We suggest using a tilde dependency to allow for different (backwards-compatible) patch versions:
113
113
 
114
114
  ```json
115
115
  {
@@ -120,8 +120,13 @@ Because all used packages must share the same version of core, we suggest using
120
120
  "author": "LangChain",
121
121
  "license": "MIT",
122
122
  "dependencies": {
123
- "@anthropic-ai/sdk": "^0.10.0",
124
- "@langchain/core": "~0.1.5"
123
+ "@anthropic-ai/sdk": "^0.10.0"
124
+ },
125
+ "peerDependencies": {
126
+ "@langchain/core": "~0.3.0"
127
+ },
128
+ "devDependencies": {
129
+ "@langchain/core": "~0.3.0"
125
130
  }
126
131
  }
127
132
  ```
@@ -125,7 +125,7 @@ class BaseCallbackHandler extends BaseCallbackHandlerMethodsClass {
125
125
  enumerable: true,
126
126
  configurable: true,
127
127
  writable: true,
128
- value: (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_CALLBACKS_BACKGROUND") !== "true"
128
+ value: (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_CALLBACKS_BACKGROUND") === "false"
129
129
  });
130
130
  this.lc_kwargs = input || {};
131
131
  if (input) {
@@ -99,7 +99,7 @@ export class BaseCallbackHandler extends BaseCallbackHandlerMethodsClass {
99
99
  enumerable: true,
100
100
  configurable: true,
101
101
  writable: true,
102
- value: getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") !== "true"
102
+ value: getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") === "false"
103
103
  });
104
104
  this.lc_kwargs = input || {};
105
105
  if (input) {
@@ -50,7 +50,7 @@ payload, config) {
50
50
  "and explicitly pass in a config parameter.",
51
51
  `\n\nOr, if you are calling this from a custom tool, ensure you're using the "tool" helper constructor as documented here:`,
52
52
  "\n |",
53
- "\n └-> https://js.langchain.com/v0.2/docs/how_to/custom_tools#tool-function",
53
+ "\n └-> https://js.langchain.com/docs/how_to/custom_tools#tool-function",
54
54
  "\n",
55
55
  ].join(" "));
56
56
  }
@@ -47,7 +47,7 @@ payload, config) {
47
47
  "and explicitly pass in a config parameter.",
48
48
  `\n\nOr, if you are calling this from a custom tool, ensure you're using the "tool" helper constructor as documented here:`,
49
49
  "\n |",
50
- "\n └-> https://js.langchain.com/v0.2/docs/how_to/custom_tools#tool-function",
50
+ "\n └-> https://js.langchain.com/docs/how_to/custom_tools#tool-function",
51
51
  "\n",
52
52
  ].join(" "));
53
53
  }
@@ -10,16 +10,6 @@ const tracer_langchain_js_1 = require("../tracers/tracer_langchain.cjs");
10
10
  const promises_js_1 = require("./promises.cjs");
11
11
  const callbacks_js_1 = require("../utils/callbacks.cjs");
12
12
  const base_js_2 = require("../tracers/base.cjs");
13
- if (
14
- /* #__PURE__ */ (0, callbacks_js_1.isTracingEnabled)() &&
15
- /* #__PURE__ */ (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_CALLBACKS_BACKGROUND") !==
16
- "true") {
17
- /* #__PURE__ */ console.warn([
18
- "[WARN]: You have enabled LangSmith tracing without backgrounding callbacks.",
19
- "[WARN]: If you are not using a serverless environment where you must wait for tracing calls to finish,",
20
- `[WARN]: we suggest setting "process.env.LANGCHAIN_CALLBACKS_BACKGROUND=true" to avoid additional latency.`,
21
- ].join("\n"));
22
- }
23
13
  function parseCallbackConfigArg(arg) {
24
14
  if (!arg) {
25
15
  return {};
@@ -729,7 +719,7 @@ class CallbackManager extends BaseCallbackManager {
729
719
  manager.addHandler(new Handler());
730
720
  return manager;
731
721
  }
732
- static async configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) {
722
+ static configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) {
733
723
  return this._configureSync(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options);
734
724
  }
735
725
  // TODO: Deprecate async method in favor of this one.
@@ -152,7 +152,7 @@ export declare class CallbackManager extends BaseCallbackManager implements Base
152
152
  removeMetadata(metadata: Record<string, unknown>): void;
153
153
  copy(additionalHandlers?: BaseCallbackHandler[], inherit?: boolean): CallbackManager;
154
154
  static fromHandlers(handlers: CallbackHandlerMethods): CallbackManager;
155
- static configure(inheritableHandlers?: Callbacks, localHandlers?: Callbacks, inheritableTags?: string[], localTags?: string[], inheritableMetadata?: Record<string, unknown>, localMetadata?: Record<string, unknown>, options?: CallbackManagerOptions): Promise<CallbackManager | undefined>;
155
+ static configure(inheritableHandlers?: Callbacks, localHandlers?: Callbacks, inheritableTags?: string[], localTags?: string[], inheritableMetadata?: Record<string, unknown>, localMetadata?: Record<string, unknown>, options?: CallbackManagerOptions): CallbackManager | undefined;
156
156
  static _configureSync(inheritableHandlers?: Callbacks, localHandlers?: Callbacks, inheritableTags?: string[], localTags?: string[], inheritableMetadata?: Record<string, unknown>, localMetadata?: Record<string, unknown>, options?: CallbackManagerOptions): CallbackManager | undefined;
157
157
  }
158
158
  export declare function ensureHandler(handler: BaseCallbackHandler | CallbackHandlerMethods): BaseCallbackHandler;
@@ -7,16 +7,6 @@ import { LangChainTracer, } from "../tracers/tracer_langchain.js";
7
7
  import { consumeCallback } from "./promises.js";
8
8
  import { isTracingEnabled } from "../utils/callbacks.js";
9
9
  import { isBaseTracer } from "../tracers/base.js";
10
- if (
11
- /* #__PURE__ */ isTracingEnabled() &&
12
- /* #__PURE__ */ getEnvironmentVariable("LANGCHAIN_CALLBACKS_BACKGROUND") !==
13
- "true") {
14
- /* #__PURE__ */ console.warn([
15
- "[WARN]: You have enabled LangSmith tracing without backgrounding callbacks.",
16
- "[WARN]: If you are not using a serverless environment where you must wait for tracing calls to finish,",
17
- `[WARN]: we suggest setting "process.env.LANGCHAIN_CALLBACKS_BACKGROUND=true" to avoid additional latency.`,
18
- ].join("\n"));
19
- }
20
10
  export function parseCallbackConfigArg(arg) {
21
11
  if (!arg) {
22
12
  return {};
@@ -719,7 +709,7 @@ export class CallbackManager extends BaseCallbackManager {
719
709
  manager.addHandler(new Handler());
720
710
  return manager;
721
711
  }
722
- static async configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) {
712
+ static configure(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options) {
723
713
  return this._configureSync(inheritableHandlers, localHandlers, inheritableTags, localTags, inheritableMetadata, localMetadata, options);
724
714
  }
725
715
  // TODO: Deprecate async method in favor of this one.
@@ -89,14 +89,6 @@ export interface FunctionDefinition {
89
89
  * how to call the function.
90
90
  */
91
91
  description?: string;
92
- /**
93
- * Whether to enable strict schema adherence when generating the function call. If
94
- * set to true, the model will follow the exact schema defined in the `parameters`
95
- * field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn
96
- * more about Structured Outputs in the
97
- * [function calling guide](https://platform.openai.com/docs/guides/function-calling).
98
- */
99
- strict?: boolean;
100
92
  }
101
93
  export interface ToolDefinition {
102
94
  type: "function";
@@ -2,7 +2,7 @@ import { z } from "zod";
2
2
  import { type BaseMessage, BaseMessageChunk, type BaseMessageLike } from "../messages/index.js";
3
3
  import type { BasePromptValueInterface } from "../prompt_values.js";
4
4
  import { LLMResult, ChatGenerationChunk, type ChatResult, type Generation } from "../outputs.js";
5
- import { BaseLanguageModel, StructuredOutputMethodOptions, ToolDefinition, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from "./base.js";
5
+ import { BaseLanguageModel, type StructuredOutputMethodOptions, type ToolDefinition, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from "./base.js";
6
6
  import { type CallbackManagerForLLMRun, type Callbacks } from "../callbacks/manager.js";
7
7
  import type { RunnableConfig } from "../runnables/config.js";
8
8
  import type { BaseCache } from "../caches/base.js";
@@ -55,30 +55,18 @@ export interface FunctionCall {
55
55
  */
56
56
  name: string;
57
57
  }
58
- /**
59
- * @deprecated
60
- * Import as "OpenAIToolCall" instead
61
- */
62
- export interface ToolCall {
63
- /**
64
- * The ID of the tool call.
65
- */
66
- id: string;
67
- /**
68
- * The function that the model called.
69
- */
70
- function: FunctionCall;
71
- /**
72
- * The type of the tool. Currently, only `function` is supported.
73
- */
74
- type: "function";
75
- }
76
58
  export type BaseMessageFields = {
77
59
  content: MessageContent;
78
60
  name?: string;
79
61
  additional_kwargs?: {
62
+ /**
63
+ * @deprecated Use "tool_calls" field on AIMessages instead
64
+ */
80
65
  function_call?: FunctionCall;
81
- tool_calls?: ToolCall[];
66
+ /**
67
+ * @deprecated Use "tool_calls" field on AIMessages instead
68
+ */
69
+ tool_calls?: OpenAIToolCall[];
82
70
  [key: string]: unknown;
83
71
  };
84
72
  /** Response metadata. For example: response headers, logprobs, token counts. */
@@ -137,8 +125,23 @@ export declare abstract class BaseMessage extends Serializable implements BaseMe
137
125
  _updateId(value: string | undefined): void;
138
126
  get [Symbol.toStringTag](): any;
139
127
  }
140
- export type OpenAIToolCall = ToolCall & {
141
- index: number;
128
+ /**
129
+ * @deprecated Use "tool_calls" field on AIMessages instead
130
+ */
131
+ export type OpenAIToolCall = {
132
+ /**
133
+ * The ID of the tool call.
134
+ */
135
+ id: string;
136
+ /**
137
+ * The function that the model called.
138
+ */
139
+ function: FunctionCall;
140
+ /**
141
+ * The type of the tool. Currently, only `function` is supported.
142
+ */
143
+ type: "function";
144
+ index?: number;
142
145
  };
143
146
  export declare function isOpenAIToolCallArray(value?: unknown): value is OpenAIToolCall[];
144
147
  export declare function _mergeDicts(left: Record<string, any>, right: Record<string, any>): Record<string, any>;
@@ -160,11 +163,15 @@ export type MessageFieldWithRole = {
160
163
  name?: string;
161
164
  } & Record<string, unknown>;
162
165
  export declare function _isMessageFieldWithRole(x: BaseMessageLike): x is MessageFieldWithRole;
163
- export type BaseMessageLike = BaseMessage | ({
164
- type: MessageType | "user" | "assistant" | "placeholder";
165
- } & BaseMessageFields & Record<string, unknown>) | MessageFieldWithRole | [
166
+ export type BaseMessageLike = BaseMessage | MessageFieldWithRole | [
166
167
  StringWithAutocomplete<MessageType | "user" | "assistant" | "placeholder">,
167
168
  MessageContent
168
- ] | string;
169
+ ] | string
170
+ /**
171
+ * @deprecated Specifying "type" is deprecated and will be removed in 0.4.0.
172
+ */
173
+ | ({
174
+ type: MessageType | "user" | "assistant" | "placeholder";
175
+ } & BaseMessageFields & Record<string, unknown>);
169
176
  export declare function isBaseMessage(messageLike?: unknown): messageLike is BaseMessage;
170
177
  export declare function isBaseMessageChunk(messageLike?: unknown): messageLike is BaseMessageChunk;
@@ -147,24 +147,11 @@ class JsonOutputToolsParser extends transform_js_1.BaseCumulativeTransformOutput
147
147
  const parsedToolCalls = [];
148
148
  for (const toolCall of toolCalls) {
149
149
  if (toolCall !== undefined) {
150
- // backward-compatibility with previous
151
- // versions of Langchain JS, which uses `name` and `arguments`
152
- // @ts-expect-error name and arguemnts are defined by Object.defineProperty
153
150
  const backwardsCompatibleToolCall = {
154
151
  type: toolCall.name,
155
152
  args: toolCall.args,
156
153
  id: toolCall.id,
157
154
  };
158
- Object.defineProperty(backwardsCompatibleToolCall, "name", {
159
- get() {
160
- return this.type;
161
- },
162
- });
163
- Object.defineProperty(backwardsCompatibleToolCall, "arguments", {
164
- get() {
165
- return this.args;
166
- },
167
- });
168
155
  parsedToolCalls.push(backwardsCompatibleToolCall);
169
156
  }
170
157
  }
@@ -6,10 +6,6 @@ export type ParsedToolCall = {
6
6
  id?: string;
7
7
  type: string;
8
8
  args: Record<string, any>;
9
- /** @deprecated Use `type` instead. Will be removed in 0.2.0. */
10
- name: string;
11
- /** @deprecated Use `args` instead. Will be removed in 0.2.0. */
12
- arguments: Record<string, any>;
13
9
  };
14
10
  export type JsonOutputToolsParserParams = {
15
11
  /** Whether to return the tool call id. */
@@ -141,24 +141,11 @@ export class JsonOutputToolsParser extends BaseCumulativeTransformOutputParser {
141
141
  const parsedToolCalls = [];
142
142
  for (const toolCall of toolCalls) {
143
143
  if (toolCall !== undefined) {
144
- // backward-compatibility with previous
145
- // versions of Langchain JS, which uses `name` and `arguments`
146
- // @ts-expect-error name and arguemnts are defined by Object.defineProperty
147
144
  const backwardsCompatibleToolCall = {
148
145
  type: toolCall.name,
149
146
  args: toolCall.args,
150
147
  id: toolCall.id,
151
148
  };
152
- Object.defineProperty(backwardsCompatibleToolCall, "name", {
153
- get() {
154
- return this.type;
155
- },
156
- });
157
- Object.defineProperty(backwardsCompatibleToolCall, "arguments", {
158
- get() {
159
- return this.args;
160
- },
161
- });
162
149
  parsedToolCalls.push(backwardsCompatibleToolCall);
163
150
  }
164
151
  }
@@ -942,7 +942,7 @@ class RunnableEach extends Runnable {
942
942
  * @returns A promise that resolves to the output of the runnable.
943
943
  */
944
944
  async invoke(inputs, config) {
945
- return this._callWithConfig(this._invoke, inputs, config);
945
+ return this._callWithConfig(this._invoke.bind(this), inputs, config);
946
946
  }
947
947
  /**
948
948
  * A helper method that is used to invoke the runnable with the specified input and configuration.
@@ -1026,7 +1026,7 @@ class RunnableRetry extends RunnableBinding {
1026
1026
  * @returns A promise that resolves to the output of the runnable.
1027
1027
  */
1028
1028
  async invoke(input, config) {
1029
- return this._callWithConfig(this._invoke, input, config);
1029
+ return this._callWithConfig(this._invoke.bind(this), input, config);
1030
1030
  }
1031
1031
  async _batch(inputs, configs, runManagers, batchOptions) {
1032
1032
  const resultsMap = {};
@@ -1590,7 +1590,7 @@ class RunnableLambda extends Runnable {
1590
1590
  });
1591
1591
  }
1592
1592
  async invoke(input, options) {
1593
- return this._callWithConfig(this._invoke, input, options);
1593
+ return this._callWithConfig(this._invoke.bind(this), input, options);
1594
1594
  }
1595
1595
  async *_transform(generator, runManager, config) {
1596
1596
  let finalChunk;
@@ -11,13 +11,7 @@ import { Run } from "../tracers/base.js";
11
11
  import { Graph } from "./graph.js";
12
12
  import { ToolCall } from "../messages/tool.js";
13
13
  export { type RunnableInterface, RunnableBatchOptions };
14
- export type RunnableFunc<RunInput, RunOutput> = (input: RunInput, options?: ({
15
- /** @deprecated Use top-level config fields instead. */
16
- config?: RunnableConfig;
17
- } & RunnableConfig) | Record<string, any> | (Record<string, any> & {
18
- /** @deprecated Use top-level config fields instead. */
19
- config: RunnableConfig;
20
- } & RunnableConfig)) => RunOutput | Promise<RunOutput>;
14
+ export type RunnableFunc<RunInput, RunOutput> = (input: RunInput, options: RunnableConfig | Record<string, any> | (Record<string, any> & RunnableConfig)) => RunOutput | Promise<RunOutput>;
21
15
  export type RunnableMapLike<RunInput, RunOutput> = {
22
16
  [K in keyof RunOutput]: RunnableLike<RunInput, RunOutput[K]>;
23
17
  };
@@ -188,39 +182,39 @@ export declare abstract class Runnable<RunInput = any, RunOutput = any, CallOpti
188
182
  * **ATTENTION** This reference table is for the V2 version of the schema.
189
183
  *
190
184
  * ```md
191
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
192
- * | event | name | chunk | input | output |
193
- * +======================+==================+=================================+===============================================+=================================================+
194
- * | on_chat_model_start | [model name] | | {"messages": [[SystemMessage, HumanMessage]]} | |
195
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
196
- * | on_chat_model_stream | [model name] | AIMessageChunk(content="hello") | | |
197
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
198
- * | on_chat_model_end | [model name] | | {"messages": [[SystemMessage, HumanMessage]]} | AIMessageChunk(content="hello world") |
199
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
200
- * | on_llm_start | [model name] | | {'input': 'hello'} | |
201
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
202
- * | on_llm_stream | [model name] | 'Hello' | | |
203
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
204
- * | on_llm_end | [model name] | | 'Hello human!' | |
205
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
206
- * | on_chain_start | some_runnable | | | |
207
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
208
- * | on_chain_stream | some_runnable | "hello world!, goodbye world!" | | |
209
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
210
- * | on_chain_end | some_runnable | | [Document(...)] | "hello world!, goodbye world!" |
211
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
212
- * | on_tool_start | some_tool | | {"x": 1, "y": "2"} | |
213
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
214
- * | on_tool_end | some_tool | | | {"x": 1, "y": "2"} |
215
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
216
- * | on_retriever_start | [retriever name] | | {"query": "hello"} | |
217
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
218
- * | on_retriever_end | [retriever name] | | {"query": "hello"} | [Document(...), ..] |
219
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
220
- * | on_prompt_start | [template_name] | | {"question": "hello"} | |
221
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
222
- * | on_prompt_end | [template_name] | | {"question": "hello"} | ChatPromptValue(messages: [SystemMessage, ...]) |
223
- * +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+
185
+ * +----------------------+-----------------------------+------------------------------------------+
186
+ * | event | input | output/chunk |
187
+ * +======================+=============================+==========================================+
188
+ * | on_chat_model_start | {"messages": BaseMessage[]} | |
189
+ * +----------------------+-----------------------------+------------------------------------------+
190
+ * | on_chat_model_stream | | AIMessageChunk("hello") |
191
+ * +----------------------+-----------------------------+------------------------------------------+
192
+ * | on_chat_model_end | {"messages": BaseMessage[]} | AIMessageChunk("hello world") |
193
+ * +----------------------+-----------------------------+------------------------------------------+
194
+ * | on_llm_start | {'input': 'hello'} | |
195
+ * +----------------------+-----------------------------+------------------------------------------+
196
+ * | on_llm_stream | | 'Hello' |
197
+ * +----------------------+-----------------------------+------------------------------------------+
198
+ * | on_llm_end | 'Hello human!' | |
199
+ * +----------------------+-----------------------------+------------------------------------------+
200
+ * | on_chain_start | | |
201
+ * +----------------------+-----------------------------+------------------------------------------+
202
+ * | on_chain_stream | | "hello world!" |
203
+ * +----------------------+-----------------------------+------------------------------------------+
204
+ * | on_chain_end | [Document(...)] | "hello world!, goodbye world!" |
205
+ * +----------------------+-----------------------------+------------------------------------------+
206
+ * | on_tool_start | {"x": 1, "y": "2"} | |
207
+ * +----------------------+-----------------------------+------------------------------------------+
208
+ * | on_tool_end | | {"x": 1, "y": "2"} |
209
+ * +----------------------+-----------------------------+------------------------------------------+
210
+ * | on_retriever_start | {"query": "hello"} | |
211
+ * +----------------------+-----------------------------+------------------------------------------+
212
+ * | on_retriever_end | {"query": "hello"} | [Document(...), ..] |
213
+ * +----------------------+-----------------------------+------------------------------------------+
214
+ * | on_prompt_start | {"question": "hello"} | |
215
+ * +----------------------+-----------------------------+------------------------------------------+
216
+ * | on_prompt_end | {"question": "hello"} | ChatPromptValue(messages: BaseMessage[]) |
217
+ * +----------------------+-----------------------------+------------------------------------------+
224
218
  * ```
225
219
  *
226
220
  * The "on_chain_*" events are the default for Runnables that don't fit one of the above categories.
@@ -232,13 +226,13 @@ export declare abstract class Runnable<RunInput = any, RunOutput = any, CallOpti
232
226
  * A custom event has following format:
233
227
  *
234
228
  * ```md
235
- * +-----------+------+-----------------------------------------------------------------------------------------------------------+
236
- * | Attribute | Type | Description |
237
- * +===========+======+===========================================================================================================+
238
- * | name | str | A user defined name for the event. |
239
- * +-----------+------+-----------------------------------------------------------------------------------------------------------+
240
- * | data | Any | The data associated with the event. This can be anything, though we suggest making it JSON serializable. |
241
- * +-----------+------+-----------------------------------------------------------------------------------------------------------+
229
+ * +-----------+------+------------------------------------------------------------+
230
+ * | Attribute | Type | Description |
231
+ * +===========+======+============================================================+
232
+ * | name | str | A user defined name for the event. |
233
+ * +-----------+------+------------------------------------------------------------+
234
+ * | data | Any | The data associated with the event. This can be anything. |
235
+ * +-----------+------+------------------------------------------------------------+
242
236
  * ```
243
237
  *
244
238
  * Here's an example:
@@ -351,7 +345,7 @@ export declare class RunnableBinding<RunInput, RunOutput, CallOptions extends Ru
351
345
  batch(inputs: RunInput[], options?: Partial<CallOptions> | Partial<CallOptions>[], batchOptions?: RunnableBatchOptions): Promise<(RunOutput | Error)[]>;
352
346
  _streamIterator(input: RunInput, options?: Partial<CallOptions> | undefined): AsyncGenerator<Awaited<RunOutput>, void, unknown>;
353
347
  stream(input: RunInput, options?: Partial<CallOptions> | undefined): Promise<IterableReadableStream<RunOutput>>;
354
- transform(generator: AsyncGenerator<RunInput>, options: Partial<CallOptions>): AsyncGenerator<RunOutput>;
348
+ transform(generator: AsyncGenerator<RunInput>, options?: Partial<CallOptions>): AsyncGenerator<RunOutput>;
355
349
  streamEvents(input: RunInput, options: Partial<CallOptions> & {
356
350
  version: "v1" | "v2";
357
351
  }, streamOptions?: Omit<LogStreamCallbackHandlerInput, "autoClose">): IterableReadableStream<StreamEvent>;
@@ -933,7 +933,7 @@ export class RunnableEach extends Runnable {
933
933
  * @returns A promise that resolves to the output of the runnable.
934
934
  */
935
935
  async invoke(inputs, config) {
936
- return this._callWithConfig(this._invoke, inputs, config);
936
+ return this._callWithConfig(this._invoke.bind(this), inputs, config);
937
937
  }
938
938
  /**
939
939
  * A helper method that is used to invoke the runnable with the specified input and configuration.
@@ -1016,7 +1016,7 @@ export class RunnableRetry extends RunnableBinding {
1016
1016
  * @returns A promise that resolves to the output of the runnable.
1017
1017
  */
1018
1018
  async invoke(input, config) {
1019
- return this._callWithConfig(this._invoke, input, config);
1019
+ return this._callWithConfig(this._invoke.bind(this), input, config);
1020
1020
  }
1021
1021
  async _batch(inputs, configs, runManagers, batchOptions) {
1022
1022
  const resultsMap = {};
@@ -1576,7 +1576,7 @@ export class RunnableLambda extends Runnable {
1576
1576
  });
1577
1577
  }
1578
1578
  async invoke(input, options) {
1579
- return this._callWithConfig(this._invoke, input, options);
1579
+ return this._callWithConfig(this._invoke.bind(this), input, options);
1580
1580
  }
1581
1581
  async *_transform(generator, runManager, config) {
1582
1582
  let finalChunk;
@@ -61,12 +61,7 @@ class RootListenersTracer extends base_js_1.BaseTracer {
61
61
  }
62
62
  this.rootId = run.id;
63
63
  if (this.argOnStart) {
64
- if (this.argOnStart.length === 1) {
65
- await this.argOnStart(run);
66
- }
67
- else if (this.argOnStart.length === 2) {
68
- await this.argOnStart(run, this.config);
69
- }
64
+ await this.argOnStart(run, this.config);
70
65
  }
71
66
  }
72
67
  async onRunUpdate(run) {
@@ -75,21 +70,11 @@ class RootListenersTracer extends base_js_1.BaseTracer {
75
70
  }
76
71
  if (!run.error) {
77
72
  if (this.argOnEnd) {
78
- if (this.argOnEnd.length === 1) {
79
- await this.argOnEnd(run);
80
- }
81
- else if (this.argOnEnd.length === 2) {
82
- await this.argOnEnd(run, this.config);
83
- }
73
+ await this.argOnEnd(run, this.config);
84
74
  }
85
75
  }
86
76
  else if (this.argOnError) {
87
- if (this.argOnError.length === 1) {
88
- await this.argOnError(run);
89
- }
90
- else if (this.argOnError.length === 2) {
91
- await this.argOnError(run, this.config);
92
- }
77
+ await this.argOnError(run, this.config);
93
78
  }
94
79
  }
95
80
  }
@@ -5,23 +5,14 @@ export declare class RootListenersTracer extends BaseTracer {
5
5
  /** The Run's ID. Type UUID */
6
6
  rootId?: string;
7
7
  config: RunnableConfig;
8
- argOnStart?: {
9
- (run: Run): void | Promise<void>;
10
- (run: Run, config: RunnableConfig): void | Promise<void>;
11
- };
12
- argOnEnd?: {
13
- (run: Run): void | Promise<void>;
14
- (run: Run, config: RunnableConfig): void | Promise<void>;
15
- };
16
- argOnError?: {
17
- (run: Run): void | Promise<void>;
18
- (run: Run, config: RunnableConfig): void | Promise<void>;
19
- };
8
+ argOnStart?: (run: Run, config: RunnableConfig) => void | Promise<void>;
9
+ argOnEnd?: (run: Run, config: RunnableConfig) => void | Promise<void>;
10
+ argOnError?: (run: Run, config: RunnableConfig) => void | Promise<void>;
20
11
  constructor({ config, onStart, onEnd, onError, }: {
21
12
  config: RunnableConfig;
22
- onStart?: (run: Run, config?: RunnableConfig) => void | Promise<void>;
23
- onEnd?: (run: Run, config?: RunnableConfig) => void | Promise<void>;
24
- onError?: (run: Run, config?: RunnableConfig) => void | Promise<void>;
13
+ onStart?: (run: Run, config: RunnableConfig) => void | Promise<void>;
14
+ onEnd?: (run: Run, config: RunnableConfig) => void | Promise<void>;
15
+ onError?: (run: Run, config: RunnableConfig) => void | Promise<void>;
25
16
  });
26
17
  /**
27
18
  * This is a legacy method only called once for an entire run tree
@@ -58,12 +58,7 @@ export class RootListenersTracer extends BaseTracer {
58
58
  }
59
59
  this.rootId = run.id;
60
60
  if (this.argOnStart) {
61
- if (this.argOnStart.length === 1) {
62
- await this.argOnStart(run);
63
- }
64
- else if (this.argOnStart.length === 2) {
65
- await this.argOnStart(run, this.config);
66
- }
61
+ await this.argOnStart(run, this.config);
67
62
  }
68
63
  }
69
64
  async onRunUpdate(run) {
@@ -72,21 +67,11 @@ export class RootListenersTracer extends BaseTracer {
72
67
  }
73
68
  if (!run.error) {
74
69
  if (this.argOnEnd) {
75
- if (this.argOnEnd.length === 1) {
76
- await this.argOnEnd(run);
77
- }
78
- else if (this.argOnEnd.length === 2) {
79
- await this.argOnEnd(run, this.config);
80
- }
70
+ await this.argOnEnd(run, this.config);
81
71
  }
82
72
  }
83
73
  else if (this.argOnError) {
84
- if (this.argOnError.length === 1) {
85
- await this.argOnError(run);
86
- }
87
- else if (this.argOnError.length === 2) {
88
- await this.argOnError(run, this.config);
89
- }
74
+ await this.argOnError(run, this.config);
90
75
  }
91
76
  }
92
77
  }
@@ -51,6 +51,7 @@ tool, fields) {
51
51
  toolDef = tool;
52
52
  }
53
53
  if (fieldsCopy?.strict !== undefined) {
54
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
54
55
  toolDef.function.strict = fieldsCopy.strict;
55
56
  }
56
57
  return toolDef;
@@ -47,6 +47,7 @@ tool, fields) {
47
47
  toolDef = tool;
48
48
  }
49
49
  if (fieldsCopy?.strict !== undefined) {
50
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
50
51
  toolDef.function.strict = fieldsCopy.strict;
51
52
  }
52
53
  return toolDef;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/core",
3
- "version": "0.2.32",
3
+ "version": "0.3.0",
4
4
  "description": "Core LangChain.js abstractions and schemas",
5
5
  "type": "module",
6
6
  "engines": {