@langchain/core 0.2.19 → 0.2.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/language_models/base.d.ts +0 -10
  2. package/dist/language_models/chat_models.cjs +5 -6
  3. package/dist/language_models/chat_models.d.ts +9 -10
  4. package/dist/language_models/chat_models.js +5 -6
  5. package/dist/language_models/llms.cjs +5 -6
  6. package/dist/language_models/llms.d.ts +10 -12
  7. package/dist/language_models/llms.js +5 -6
  8. package/dist/runnables/base.cjs +34 -9
  9. package/dist/runnables/base.js +34 -9
  10. package/dist/runnables/config.cjs +41 -0
  11. package/dist/runnables/config.js +41 -0
  12. package/dist/runnables/remote.cjs +14 -13
  13. package/dist/runnables/remote.js +14 -13
  14. package/dist/runnables/types.d.ts +10 -0
  15. package/dist/utils/math.cjs +6 -4
  16. package/dist/utils/math.js +6 -4
  17. package/dist/utils/ml-distance/distances.cjs +18 -0
  18. package/dist/utils/ml-distance/distances.d.ts +8 -0
  19. package/dist/utils/ml-distance/distances.js +14 -0
  20. package/dist/utils/ml-distance/similarities.cjs +21 -0
  21. package/dist/utils/ml-distance/similarities.d.ts +7 -0
  22. package/dist/utils/ml-distance/similarities.js +17 -0
  23. package/dist/utils/ml-distance-euclidean/euclidean.cjs +15 -0
  24. package/dist/utils/ml-distance-euclidean/euclidean.d.ts +2 -0
  25. package/dist/utils/ml-distance-euclidean/euclidean.js +10 -0
  26. package/dist/utils/signal.cjs +28 -0
  27. package/dist/utils/signal.d.ts +1 -0
  28. package/dist/utils/signal.js +24 -0
  29. package/dist/utils/stream.cjs +19 -4
  30. package/dist/utils/stream.d.ts +3 -1
  31. package/dist/utils/stream.js +19 -4
  32. package/dist/utils/testing/index.cjs +9 -3
  33. package/dist/utils/testing/index.d.ts +9 -6
  34. package/dist/utils/testing/index.js +9 -3
  35. package/package.json +1 -2
@@ -66,16 +66,6 @@ export interface BaseLanguageModelCallOptions extends RunnableConfig {
66
66
  * If not provided, the default stop tokens for the model will be used.
67
67
  */
68
68
  stop?: string[];
69
- /**
70
- * Timeout for this call in milliseconds.
71
- */
72
- timeout?: number;
73
- /**
74
- * Abort signal for this call.
75
- * If provided, the call will be aborted when the signal is aborted.
76
- * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal
77
- */
78
- signal?: AbortSignal;
79
69
  }
80
70
  export interface FunctionDefinition {
81
71
  /**
@@ -43,11 +43,10 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
43
43
  value: ["langchain", "chat_models", this._llmType()]
44
44
  });
45
45
  }
46
- _separateRunnableConfigFromCallOptions(options) {
46
+ _separateRunnableConfigFromCallOptionsCompat(options) {
47
+ // For backwards compat, keep `signal` in both runnableConfig and callOptions
47
48
  const [runnableConfig, callOptions] = super._separateRunnableConfigFromCallOptions(options);
48
- if (callOptions?.timeout && !callOptions.signal) {
49
- callOptions.signal = AbortSignal.timeout(callOptions.timeout);
50
- }
49
+ callOptions.signal = runnableConfig.signal;
51
50
  return [runnableConfig, callOptions];
52
51
  }
53
52
  /**
@@ -76,7 +75,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
76
75
  else {
77
76
  const prompt = BaseChatModel._convertInputToPromptValue(input);
78
77
  const messages = prompt.toChatMessages();
79
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options);
78
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(options);
80
79
  const inheritableMetadata = {
81
80
  ...runnableConfig.metadata,
82
81
  ...this.getLsParams(callOptions),
@@ -303,7 +302,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
303
302
  parsedOptions = options;
304
303
  }
305
304
  const baseMessages = messages.map((messageList) => messageList.map(index_js_1.coerceMessageLikeToMessage));
306
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(parsedOptions);
305
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);
307
306
  runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;
308
307
  if (!this.cache) {
309
308
  return this._generateUncached(baseMessages, callOptions, runnableConfig);
@@ -61,23 +61,16 @@ export type LangSmithParams = {
61
61
  ls_max_tokens?: number;
62
62
  ls_stop?: Array<string>;
63
63
  };
64
- interface ChatModelGenerateCachedParameters<T extends BaseChatModel<CallOptions>, CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions> {
65
- messages: BaseMessageLike[][];
66
- cache: BaseCache<Generation[]>;
67
- llmStringKey: string;
68
- parsedOptions: T["ParsedCallOptions"];
69
- handledOptions: RunnableConfig;
70
- }
71
64
  /**
72
65
  * Base class for chat models. It extends the BaseLanguageModel class and
73
66
  * provides methods for generating chat based on input messages.
74
67
  */
75
68
  export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions, OutputMessageType extends BaseMessageChunk = BaseMessageChunk> extends BaseLanguageModel<OutputMessageType, CallOptions> {
76
- ParsedCallOptions: Omit<CallOptions, keyof RunnableConfig & "timeout">;
69
+ ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, "signal" | "timeout" | "maxConcurrency">>;
77
70
  lc_namespace: string[];
78
71
  constructor(fields: BaseChatModelParams);
79
72
  _combineLLMOutput?(...llmOutputs: LLMResult["llmOutput"][]): LLMResult["llmOutput"];
80
- protected _separateRunnableConfigFromCallOptions(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
73
+ protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
81
74
  /**
82
75
  * Bind tool-like objects to this chat model.
83
76
  *
@@ -99,7 +92,13 @@ export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCal
99
92
  getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
100
93
  /** @ignore */
101
94
  _generateUncached(messages: BaseMessageLike[][], parsedOptions: this["ParsedCallOptions"], handledOptions: RunnableConfig): Promise<LLMResult>;
102
- _generateCached({ messages, cache, llmStringKey, parsedOptions, handledOptions, }: ChatModelGenerateCachedParameters<typeof this>): Promise<LLMResult & {
95
+ _generateCached({ messages, cache, llmStringKey, parsedOptions, handledOptions, }: {
96
+ messages: BaseMessageLike[][];
97
+ cache: BaseCache<Generation[]>;
98
+ llmStringKey: string;
99
+ parsedOptions: any;
100
+ handledOptions: RunnableConfig;
101
+ }): Promise<LLMResult & {
103
102
  missingPromptIndices: number[];
104
103
  }>;
105
104
  /**
@@ -39,11 +39,10 @@ export class BaseChatModel extends BaseLanguageModel {
39
39
  value: ["langchain", "chat_models", this._llmType()]
40
40
  });
41
41
  }
42
- _separateRunnableConfigFromCallOptions(options) {
42
+ _separateRunnableConfigFromCallOptionsCompat(options) {
43
+ // For backwards compat, keep `signal` in both runnableConfig and callOptions
43
44
  const [runnableConfig, callOptions] = super._separateRunnableConfigFromCallOptions(options);
44
- if (callOptions?.timeout && !callOptions.signal) {
45
- callOptions.signal = AbortSignal.timeout(callOptions.timeout);
46
- }
45
+ callOptions.signal = runnableConfig.signal;
47
46
  return [runnableConfig, callOptions];
48
47
  }
49
48
  /**
@@ -72,7 +71,7 @@ export class BaseChatModel extends BaseLanguageModel {
72
71
  else {
73
72
  const prompt = BaseChatModel._convertInputToPromptValue(input);
74
73
  const messages = prompt.toChatMessages();
75
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options);
74
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(options);
76
75
  const inheritableMetadata = {
77
76
  ...runnableConfig.metadata,
78
77
  ...this.getLsParams(callOptions),
@@ -299,7 +298,7 @@ export class BaseChatModel extends BaseLanguageModel {
299
298
  parsedOptions = options;
300
299
  }
301
300
  const baseMessages = messages.map((messageList) => messageList.map(coerceMessageLikeToMessage));
302
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(parsedOptions);
301
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);
303
302
  runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;
304
303
  if (!this.cache) {
305
304
  return this._generateUncached(baseMessages, callOptions, runnableConfig);
@@ -39,11 +39,10 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
39
39
  async *_streamResponseChunks(_input, _options, _runManager) {
40
40
  throw new Error("Not implemented.");
41
41
  }
42
- _separateRunnableConfigFromCallOptions(options) {
42
+ _separateRunnableConfigFromCallOptionsCompat(options) {
43
+ // For backwards compat, keep `signal` in both runnableConfig and callOptions
43
44
  const [runnableConfig, callOptions] = super._separateRunnableConfigFromCallOptions(options);
44
- if (callOptions?.timeout && !callOptions.signal) {
45
- callOptions.signal = AbortSignal.timeout(callOptions.timeout);
46
- }
45
+ callOptions.signal = runnableConfig.signal;
47
46
  return [runnableConfig, callOptions];
48
47
  }
49
48
  async *_streamIterator(input, options) {
@@ -53,7 +52,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
53
52
  }
54
53
  else {
55
54
  const prompt = BaseLLM._convertInputToPromptValue(input);
56
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options);
55
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(options);
57
56
  const callbackManager_ = await manager_js_1.CallbackManager.configure(runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, this.metadata, { verbose: this.verbose });
58
57
  const extra = {
59
58
  options: callOptions,
@@ -261,7 +260,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
261
260
  else {
262
261
  parsedOptions = options;
263
262
  }
264
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(parsedOptions);
263
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);
265
264
  runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;
266
265
  if (!this.cache) {
267
266
  return this._generateUncached(prompts, callOptions, runnableConfig);
@@ -17,19 +17,11 @@ export interface BaseLLMParams extends BaseLanguageModelParams {
17
17
  }
18
18
  export interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {
19
19
  }
20
- interface LLMGenerateCachedParameters<T extends BaseLLM<CallOptions>, CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> {
21
- prompts: string[];
22
- cache: BaseCache<Generation[]>;
23
- llmStringKey: string;
24
- parsedOptions: T["ParsedCallOptions"];
25
- handledOptions: RunnableConfig;
26
- runId?: string;
27
- }
28
20
  /**
29
21
  * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.
30
22
  */
31
23
  export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLanguageModel<string, CallOptions> {
32
- ParsedCallOptions: Omit<CallOptions, keyof RunnableConfig & "timeout">;
24
+ ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, "signal" | "timeout" | "maxConcurrency">>;
33
25
  lc_namespace: string[];
34
26
  constructor({ concurrency, ...rest }: BaseLLMParams);
35
27
  /**
@@ -42,7 +34,7 @@ export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = B
42
34
  */
43
35
  invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<string>;
44
36
  _streamResponseChunks(_input: string, _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
45
- protected _separateRunnableConfigFromCallOptions(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
37
+ protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
46
38
  _streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<string>;
47
39
  /**
48
40
  * This method takes prompt values, options, and callbacks, and generates
@@ -64,7 +56,14 @@ export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = B
64
56
  _flattenLLMResult(llmResult: LLMResult): LLMResult[];
65
57
  /** @ignore */
66
58
  _generateUncached(prompts: string[], parsedOptions: this["ParsedCallOptions"], handledOptions: BaseCallbackConfig): Promise<LLMResult>;
67
- _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }: LLMGenerateCachedParameters<typeof this>): Promise<LLMResult & {
59
+ _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }: {
60
+ prompts: string[];
61
+ cache: BaseCache<Generation[]>;
62
+ llmStringKey: string;
63
+ parsedOptions: any;
64
+ handledOptions: RunnableConfig;
65
+ runId?: string;
66
+ }): Promise<LLMResult & {
68
67
  missingPromptIndices: number[];
69
68
  }>;
70
69
  /**
@@ -127,4 +126,3 @@ export declare abstract class LLM<CallOptions extends BaseLLMCallOptions = BaseL
127
126
  abstract _call(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<string>;
128
127
  _generate(prompts: string[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;
129
128
  }
130
- export {};
@@ -36,11 +36,10 @@ export class BaseLLM extends BaseLanguageModel {
36
36
  async *_streamResponseChunks(_input, _options, _runManager) {
37
37
  throw new Error("Not implemented.");
38
38
  }
39
- _separateRunnableConfigFromCallOptions(options) {
39
+ _separateRunnableConfigFromCallOptionsCompat(options) {
40
+ // For backwards compat, keep `signal` in both runnableConfig and callOptions
40
41
  const [runnableConfig, callOptions] = super._separateRunnableConfigFromCallOptions(options);
41
- if (callOptions?.timeout && !callOptions.signal) {
42
- callOptions.signal = AbortSignal.timeout(callOptions.timeout);
43
- }
42
+ callOptions.signal = runnableConfig.signal;
44
43
  return [runnableConfig, callOptions];
45
44
  }
46
45
  async *_streamIterator(input, options) {
@@ -50,7 +49,7 @@ export class BaseLLM extends BaseLanguageModel {
50
49
  }
51
50
  else {
52
51
  const prompt = BaseLLM._convertInputToPromptValue(input);
53
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options);
52
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(options);
54
53
  const callbackManager_ = await CallbackManager.configure(runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, this.metadata, { verbose: this.verbose });
55
54
  const extra = {
56
55
  options: callOptions,
@@ -258,7 +257,7 @@ export class BaseLLM extends BaseLanguageModel {
258
257
  else {
259
258
  parsedOptions = options;
260
259
  }
261
- const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(parsedOptions);
260
+ const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);
262
261
  runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;
263
262
  if (!this.cache) {
264
263
  return this._generateUncached(prompts, callOptions, runnableConfig);
@@ -12,6 +12,7 @@ const log_stream_js_1 = require("../tracers/log_stream.cjs");
12
12
  const event_stream_js_1 = require("../tracers/event_stream.cjs");
13
13
  const serializable_js_1 = require("../load/serializable.cjs");
14
14
  const stream_js_1 = require("../utils/stream.cjs");
15
+ const signal_js_1 = require("../utils/signal.cjs");
15
16
  const config_js_1 = require("./config.cjs");
16
17
  const async_caller_js_1 = require("../utils/async_caller.cjs");
17
18
  const root_listener_js_1 = require("../tracers/root_listener.cjs");
@@ -194,6 +195,8 @@ class Runnable extends serializable_js_1.Serializable {
194
195
  recursionLimit: options.recursionLimit,
195
196
  maxConcurrency: options.maxConcurrency,
196
197
  runId: options.runId,
198
+ timeout: options.timeout,
199
+ signal: options.signal,
197
200
  });
198
201
  }
199
202
  const callOptions = { ...options };
@@ -205,6 +208,8 @@ class Runnable extends serializable_js_1.Serializable {
205
208
  delete callOptions.recursionLimit;
206
209
  delete callOptions.maxConcurrency;
207
210
  delete callOptions.runId;
211
+ delete callOptions.timeout;
212
+ delete callOptions.signal;
208
213
  return [runnableConfig, callOptions];
209
214
  }
210
215
  async _callWithConfig(func, input, options) {
@@ -214,7 +219,8 @@ class Runnable extends serializable_js_1.Serializable {
214
219
  delete config.runId;
215
220
  let output;
216
221
  try {
217
- output = await func.call(this, input, config, runManager);
222
+ const promise = func.call(this, input, config, runManager);
223
+ output = await (0, signal_js_1.raceWithSignal)(promise, options?.signal);
218
224
  }
219
225
  catch (e) {
220
226
  await runManager?.handleChainError(e);
@@ -242,7 +248,8 @@ class Runnable extends serializable_js_1.Serializable {
242
248
  }));
243
249
  let outputs;
244
250
  try {
245
- outputs = await func.call(this, inputs, optionsList, runManagers, batchOptions);
251
+ const promise = func.call(this, inputs, optionsList, runManagers, batchOptions);
252
+ outputs = await (0, signal_js_1.raceWithSignal)(promise, optionsList?.[0]?.signal);
246
253
  }
247
254
  catch (e) {
248
255
  await Promise.all(runManagers.map((runManager) => runManager?.handleChainError(e)));
@@ -285,7 +292,7 @@ class Runnable extends serializable_js_1.Serializable {
285
292
  }
286
293
  let runManager;
287
294
  try {
288
- const pipe = await (0, stream_js_1.pipeGeneratorWithSetup)(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, undefined, undefined, config.runName ?? this.getName()), config);
295
+ const pipe = await (0, stream_js_1.pipeGeneratorWithSetup)(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, undefined, undefined, config.runName ?? this.getName()), options?.signal, config);
289
296
  delete config.runId;
290
297
  runManager = pipe.setup;
291
298
  const streamEventsHandler = runManager?.handlers.find(event_stream_js_1.isStreamEventsHandler);
@@ -1141,11 +1148,15 @@ class RunnableSequence extends Runnable {
1141
1148
  const initialSteps = [this.first, ...this.middle];
1142
1149
  for (let i = 0; i < initialSteps.length; i += 1) {
1143
1150
  const step = initialSteps[i];
1144
- nextStepInput = await step.invoke(nextStepInput, (0, config_js_1.patchConfig)(config, {
1151
+ const promise = step.invoke(nextStepInput, (0, config_js_1.patchConfig)(config, {
1145
1152
  callbacks: runManager?.getChild(`seq:step:${i + 1}`),
1146
1153
  }));
1154
+ nextStepInput = await (0, signal_js_1.raceWithSignal)(promise, options?.signal);
1147
1155
  }
1148
1156
  // TypeScript can't detect that the last output of the sequence returns RunOutput, so call it out of the loop here
1157
+ if (options?.signal?.aborted) {
1158
+ throw new Error("Aborted");
1159
+ }
1149
1160
  finalOutput = await this.last.invoke(nextStepInput, (0, config_js_1.patchConfig)(config, {
1150
1161
  callbacks: runManager?.getChild(`seq:step:${this.steps.length}`),
1151
1162
  }));
@@ -1170,10 +1181,11 @@ class RunnableSequence extends Runnable {
1170
1181
  try {
1171
1182
  for (let i = 0; i < this.steps.length; i += 1) {
1172
1183
  const step = this.steps[i];
1173
- nextStepInputs = await step.batch(nextStepInputs, runManagers.map((runManager, j) => {
1184
+ const promise = step.batch(nextStepInputs, runManagers.map((runManager, j) => {
1174
1185
  const childRunManager = runManager?.getChild(`seq:step:${i + 1}`);
1175
1186
  return (0, config_js_1.patchConfig)(configList[j], { callbacks: childRunManager });
1176
1187
  }), batchOptions);
1188
+ nextStepInputs = await (0, signal_js_1.raceWithSignal)(promise, configList[0]?.signal);
1177
1189
  }
1178
1190
  }
1179
1191
  catch (e) {
@@ -1204,6 +1216,7 @@ class RunnableSequence extends Runnable {
1204
1216
  }));
1205
1217
  }
1206
1218
  for await (const chunk of finalGenerator) {
1219
+ options?.signal?.throwIfAborted();
1207
1220
  yield chunk;
1208
1221
  if (concatSupported) {
1209
1222
  if (finalOutput === undefined) {
@@ -1350,11 +1363,12 @@ class RunnableMap extends Runnable {
1350
1363
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
1351
1364
  const output = {};
1352
1365
  try {
1353
- await Promise.all(Object.entries(this.steps).map(async ([key, runnable]) => {
1366
+ const promises = Object.entries(this.steps).map(async ([key, runnable]) => {
1354
1367
  output[key] = await runnable.invoke(input, (0, config_js_1.patchConfig)(config, {
1355
1368
  callbacks: runManager?.getChild(`map:key:${key}`),
1356
1369
  }));
1357
- }));
1370
+ });
1371
+ await (0, signal_js_1.raceWithSignal)(Promise.all(promises), options?.signal);
1358
1372
  }
1359
1373
  catch (e) {
1360
1374
  await runManager?.handleChainError(e);
@@ -1379,7 +1393,8 @@ class RunnableMap extends Runnable {
1379
1393
  // starting new iterations as needed,
1380
1394
  // until all iterators are done
1381
1395
  while (tasks.size) {
1382
- const { key, result, gen } = await Promise.race(tasks.values());
1396
+ const promise = Promise.race(tasks.values());
1397
+ const { key, result, gen } = await (0, signal_js_1.raceWithSignal)(promise, options?.signal);
1383
1398
  tasks.delete(key);
1384
1399
  if (!result.done) {
1385
1400
  yield { [key]: result.value };
@@ -1436,18 +1451,22 @@ class RunnableTraceable extends Runnable {
1436
1451
  async invoke(input, options) {
1437
1452
  const [config] = this._getOptionsList(options ?? {}, 1);
1438
1453
  const callbacks = await (0, config_js_1.getCallbackManagerForConfig)(config);
1439
- return (await this.func((0, config_js_1.patchConfig)(config, { callbacks }), input));
1454
+ const promise = this.func((0, config_js_1.patchConfig)(config, { callbacks }), input);
1455
+ return (0, signal_js_1.raceWithSignal)(promise, config?.signal);
1440
1456
  }
1441
1457
  async *_streamIterator(input, options) {
1458
+ const [config] = this._getOptionsList(options ?? {}, 1);
1442
1459
  const result = await this.invoke(input, options);
1443
1460
  if ((0, iter_js_1.isAsyncIterable)(result)) {
1444
1461
  for await (const item of result) {
1462
+ config?.signal?.throwIfAborted();
1445
1463
  yield item;
1446
1464
  }
1447
1465
  return;
1448
1466
  }
1449
1467
  if ((0, iter_js_1.isIterator)(result)) {
1450
1468
  while (true) {
1469
+ config?.signal?.throwIfAborted();
1451
1470
  const state = result.next();
1452
1471
  if (state.done)
1453
1472
  break;
@@ -1524,6 +1543,7 @@ class RunnableLambda extends Runnable {
1524
1543
  else if ((0, iter_js_1.isAsyncIterable)(output)) {
1525
1544
  let finalOutput;
1526
1545
  for await (const chunk of (0, iter_js_1.consumeAsyncIterableInContext)(childConfig, output)) {
1546
+ config?.signal?.throwIfAborted();
1527
1547
  if (finalOutput === undefined) {
1528
1548
  finalOutput = chunk;
1529
1549
  }
@@ -1543,6 +1563,7 @@ class RunnableLambda extends Runnable {
1543
1563
  else if ((0, iter_js_1.isIterableIterator)(output)) {
1544
1564
  let finalOutput;
1545
1565
  for (const chunk of (0, iter_js_1.consumeIteratorInContext)(childConfig, output)) {
1566
+ config?.signal?.throwIfAborted();
1546
1567
  if (finalOutput === undefined) {
1547
1568
  finalOutput = chunk;
1548
1569
  }
@@ -1616,11 +1637,13 @@ class RunnableLambda extends Runnable {
1616
1637
  }
1617
1638
  else if ((0, iter_js_1.isAsyncIterable)(output)) {
1618
1639
  for await (const chunk of (0, iter_js_1.consumeAsyncIterableInContext)(childConfig, output)) {
1640
+ config?.signal?.throwIfAborted();
1619
1641
  yield chunk;
1620
1642
  }
1621
1643
  }
1622
1644
  else if ((0, iter_js_1.isIterableIterator)(output)) {
1623
1645
  for (const chunk of (0, iter_js_1.consumeIteratorInContext)(childConfig, output)) {
1646
+ config?.signal?.throwIfAborted();
1624
1647
  yield chunk;
1625
1648
  }
1626
1649
  }
@@ -1697,6 +1720,7 @@ class RunnableWithFallbacks extends Runnable {
1697
1720
  const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherConfigFields?.runName);
1698
1721
  let firstError;
1699
1722
  for (const runnable of this.runnables()) {
1723
+ config?.signal?.throwIfAborted();
1700
1724
  try {
1701
1725
  const output = await runnable.invoke(input, (0, config_js_1.patchConfig)(otherConfigFields, { callbacks: runManager?.getChild() }));
1702
1726
  await runManager?.handleChainEnd(_coerceToDict(output, "output"));
@@ -1728,6 +1752,7 @@ class RunnableWithFallbacks extends Runnable {
1728
1752
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
1729
1753
  let firstError;
1730
1754
  for (const runnable of this.runnables()) {
1755
+ configList[0].signal?.throwIfAborted();
1731
1756
  try {
1732
1757
  const outputs = await runnable.batch(inputs, runManagers.map((runManager, j) => (0, config_js_1.patchConfig)(configList[j], {
1733
1758
  callbacks: runManager?.getChild(),
@@ -6,6 +6,7 @@ import { LogStreamCallbackHandler, RunLog, RunLogPatch, isLogStreamHandler, } fr
6
6
  import { EventStreamCallbackHandler, isStreamEventsHandler, } from "../tracers/event_stream.js";
7
7
  import { Serializable } from "../load/serializable.js";
8
8
  import { IterableReadableStream, concat, atee, pipeGeneratorWithSetup, AsyncGeneratorWithSetup, } from "../utils/stream.js";
9
+ import { raceWithSignal } from "../utils/signal.js";
9
10
  import { DEFAULT_RECURSION_LIMIT, ensureConfig, getCallbackManagerForConfig, mergeConfigs, patchConfig, } from "./config.js";
10
11
  import { AsyncCaller } from "../utils/async_caller.js";
11
12
  import { RootListenersTracer } from "../tracers/root_listener.js";
@@ -187,6 +188,8 @@ export class Runnable extends Serializable {
187
188
  recursionLimit: options.recursionLimit,
188
189
  maxConcurrency: options.maxConcurrency,
189
190
  runId: options.runId,
191
+ timeout: options.timeout,
192
+ signal: options.signal,
190
193
  });
191
194
  }
192
195
  const callOptions = { ...options };
@@ -198,6 +201,8 @@ export class Runnable extends Serializable {
198
201
  delete callOptions.recursionLimit;
199
202
  delete callOptions.maxConcurrency;
200
203
  delete callOptions.runId;
204
+ delete callOptions.timeout;
205
+ delete callOptions.signal;
201
206
  return [runnableConfig, callOptions];
202
207
  }
203
208
  async _callWithConfig(func, input, options) {
@@ -207,7 +212,8 @@ export class Runnable extends Serializable {
207
212
  delete config.runId;
208
213
  let output;
209
214
  try {
210
- output = await func.call(this, input, config, runManager);
215
+ const promise = func.call(this, input, config, runManager);
216
+ output = await raceWithSignal(promise, options?.signal);
211
217
  }
212
218
  catch (e) {
213
219
  await runManager?.handleChainError(e);
@@ -235,7 +241,8 @@ export class Runnable extends Serializable {
235
241
  }));
236
242
  let outputs;
237
243
  try {
238
- outputs = await func.call(this, inputs, optionsList, runManagers, batchOptions);
244
+ const promise = func.call(this, inputs, optionsList, runManagers, batchOptions);
245
+ outputs = await raceWithSignal(promise, optionsList?.[0]?.signal);
239
246
  }
240
247
  catch (e) {
241
248
  await Promise.all(runManagers.map((runManager) => runManager?.handleChainError(e)));
@@ -278,7 +285,7 @@ export class Runnable extends Serializable {
278
285
  }
279
286
  let runManager;
280
287
  try {
281
- const pipe = await pipeGeneratorWithSetup(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, undefined, undefined, config.runName ?? this.getName()), config);
288
+ const pipe = await pipeGeneratorWithSetup(transformer.bind(this), wrapInputForTracing(), async () => callbackManager_?.handleChainStart(this.toJSON(), { input: "" }, config.runId, config.runType, undefined, undefined, config.runName ?? this.getName()), options?.signal, config);
282
289
  delete config.runId;
283
290
  runManager = pipe.setup;
284
291
  const streamEventsHandler = runManager?.handlers.find(isStreamEventsHandler);
@@ -1130,11 +1137,15 @@ export class RunnableSequence extends Runnable {
1130
1137
  const initialSteps = [this.first, ...this.middle];
1131
1138
  for (let i = 0; i < initialSteps.length; i += 1) {
1132
1139
  const step = initialSteps[i];
1133
- nextStepInput = await step.invoke(nextStepInput, patchConfig(config, {
1140
+ const promise = step.invoke(nextStepInput, patchConfig(config, {
1134
1141
  callbacks: runManager?.getChild(`seq:step:${i + 1}`),
1135
1142
  }));
1143
+ nextStepInput = await raceWithSignal(promise, options?.signal);
1136
1144
  }
1137
1145
  // TypeScript can't detect that the last output of the sequence returns RunOutput, so call it out of the loop here
1146
+ if (options?.signal?.aborted) {
1147
+ throw new Error("Aborted");
1148
+ }
1138
1149
  finalOutput = await this.last.invoke(nextStepInput, patchConfig(config, {
1139
1150
  callbacks: runManager?.getChild(`seq:step:${this.steps.length}`),
1140
1151
  }));
@@ -1159,10 +1170,11 @@ export class RunnableSequence extends Runnable {
1159
1170
  try {
1160
1171
  for (let i = 0; i < this.steps.length; i += 1) {
1161
1172
  const step = this.steps[i];
1162
- nextStepInputs = await step.batch(nextStepInputs, runManagers.map((runManager, j) => {
1173
+ const promise = step.batch(nextStepInputs, runManagers.map((runManager, j) => {
1163
1174
  const childRunManager = runManager?.getChild(`seq:step:${i + 1}`);
1164
1175
  return patchConfig(configList[j], { callbacks: childRunManager });
1165
1176
  }), batchOptions);
1177
+ nextStepInputs = await raceWithSignal(promise, configList[0]?.signal);
1166
1178
  }
1167
1179
  }
1168
1180
  catch (e) {
@@ -1193,6 +1205,7 @@ export class RunnableSequence extends Runnable {
1193
1205
  }));
1194
1206
  }
1195
1207
  for await (const chunk of finalGenerator) {
1208
+ options?.signal?.throwIfAborted();
1196
1209
  yield chunk;
1197
1210
  if (concatSupported) {
1198
1211
  if (finalOutput === undefined) {
@@ -1338,11 +1351,12 @@ export class RunnableMap extends Runnable {
1338
1351
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
1339
1352
  const output = {};
1340
1353
  try {
1341
- await Promise.all(Object.entries(this.steps).map(async ([key, runnable]) => {
1354
+ const promises = Object.entries(this.steps).map(async ([key, runnable]) => {
1342
1355
  output[key] = await runnable.invoke(input, patchConfig(config, {
1343
1356
  callbacks: runManager?.getChild(`map:key:${key}`),
1344
1357
  }));
1345
- }));
1358
+ });
1359
+ await raceWithSignal(Promise.all(promises), options?.signal);
1346
1360
  }
1347
1361
  catch (e) {
1348
1362
  await runManager?.handleChainError(e);
@@ -1367,7 +1381,8 @@ export class RunnableMap extends Runnable {
1367
1381
  // starting new iterations as needed,
1368
1382
  // until all iterators are done
1369
1383
  while (tasks.size) {
1370
- const { key, result, gen } = await Promise.race(tasks.values());
1384
+ const promise = Promise.race(tasks.values());
1385
+ const { key, result, gen } = await raceWithSignal(promise, options?.signal);
1371
1386
  tasks.delete(key);
1372
1387
  if (!result.done) {
1373
1388
  yield { [key]: result.value };
@@ -1423,18 +1438,22 @@ export class RunnableTraceable extends Runnable {
1423
1438
  async invoke(input, options) {
1424
1439
  const [config] = this._getOptionsList(options ?? {}, 1);
1425
1440
  const callbacks = await getCallbackManagerForConfig(config);
1426
- return (await this.func(patchConfig(config, { callbacks }), input));
1441
+ const promise = this.func(patchConfig(config, { callbacks }), input);
1442
+ return raceWithSignal(promise, config?.signal);
1427
1443
  }
1428
1444
  async *_streamIterator(input, options) {
1445
+ const [config] = this._getOptionsList(options ?? {}, 1);
1429
1446
  const result = await this.invoke(input, options);
1430
1447
  if (isAsyncIterable(result)) {
1431
1448
  for await (const item of result) {
1449
+ config?.signal?.throwIfAborted();
1432
1450
  yield item;
1433
1451
  }
1434
1452
  return;
1435
1453
  }
1436
1454
  if (isIterator(result)) {
1437
1455
  while (true) {
1456
+ config?.signal?.throwIfAborted();
1438
1457
  const state = result.next();
1439
1458
  if (state.done)
1440
1459
  break;
@@ -1510,6 +1529,7 @@ export class RunnableLambda extends Runnable {
1510
1529
  else if (isAsyncIterable(output)) {
1511
1530
  let finalOutput;
1512
1531
  for await (const chunk of consumeAsyncIterableInContext(childConfig, output)) {
1532
+ config?.signal?.throwIfAborted();
1513
1533
  if (finalOutput === undefined) {
1514
1534
  finalOutput = chunk;
1515
1535
  }
@@ -1529,6 +1549,7 @@ export class RunnableLambda extends Runnable {
1529
1549
  else if (isIterableIterator(output)) {
1530
1550
  let finalOutput;
1531
1551
  for (const chunk of consumeIteratorInContext(childConfig, output)) {
1552
+ config?.signal?.throwIfAborted();
1532
1553
  if (finalOutput === undefined) {
1533
1554
  finalOutput = chunk;
1534
1555
  }
@@ -1602,11 +1623,13 @@ export class RunnableLambda extends Runnable {
1602
1623
  }
1603
1624
  else if (isAsyncIterable(output)) {
1604
1625
  for await (const chunk of consumeAsyncIterableInContext(childConfig, output)) {
1626
+ config?.signal?.throwIfAborted();
1605
1627
  yield chunk;
1606
1628
  }
1607
1629
  }
1608
1630
  else if (isIterableIterator(output)) {
1609
1631
  for (const chunk of consumeIteratorInContext(childConfig, output)) {
1632
+ config?.signal?.throwIfAborted();
1610
1633
  yield chunk;
1611
1634
  }
1612
1635
  }
@@ -1681,6 +1704,7 @@ export class RunnableWithFallbacks extends Runnable {
1681
1704
  const runManager = await callbackManager_?.handleChainStart(this.toJSON(), _coerceToDict(input, "input"), runId, undefined, undefined, undefined, otherConfigFields?.runName);
1682
1705
  let firstError;
1683
1706
  for (const runnable of this.runnables()) {
1707
+ config?.signal?.throwIfAborted();
1684
1708
  try {
1685
1709
  const output = await runnable.invoke(input, patchConfig(otherConfigFields, { callbacks: runManager?.getChild() }));
1686
1710
  await runManager?.handleChainEnd(_coerceToDict(output, "output"));
@@ -1712,6 +1736,7 @@ export class RunnableWithFallbacks extends Runnable {
1712
1736
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
1713
1737
  let firstError;
1714
1738
  for (const runnable of this.runnables()) {
1739
+ configList[0].signal?.throwIfAborted();
1715
1740
  try {
1716
1741
  const outputs = await runnable.batch(inputs, runManagers.map((runManager, j) => patchConfig(configList[j], {
1717
1742
  callbacks: runManager?.getChild(),