@langchain/core 0.1.52 → 0.1.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/dist/callbacks/base.d.ts +1 -1
  2. package/dist/callbacks/manager.cjs +13 -11
  3. package/dist/callbacks/manager.d.ts +8 -3
  4. package/dist/callbacks/manager.js +13 -11
  5. package/dist/example_selectors/conditional.d.ts +1 -1
  6. package/dist/language_models/chat_models.cjs +3 -3
  7. package/dist/language_models/chat_models.js +3 -3
  8. package/dist/language_models/llms.cjs +5 -4
  9. package/dist/language_models/llms.d.ts +2 -1
  10. package/dist/language_models/llms.js +5 -4
  11. package/dist/load/import_map.cjs +2 -2
  12. package/dist/load/import_map.d.ts +2 -2
  13. package/dist/load/import_map.js +2 -2
  14. package/dist/messages/index.d.ts +10 -3
  15. package/dist/output_parsers/string.cjs +12 -5
  16. package/dist/output_parsers/string.js +12 -5
  17. package/dist/prompts/base.d.ts +1 -1
  18. package/dist/prompts/chat.cjs +32 -3
  19. package/dist/prompts/chat.d.ts +1 -1
  20. package/dist/prompts/chat.js +32 -3
  21. package/dist/prompts/few_shot.d.ts +1 -1
  22. package/dist/prompts/image.d.ts +1 -1
  23. package/dist/prompts/pipeline.d.ts +1 -1
  24. package/dist/prompts/prompt.d.ts +1 -1
  25. package/dist/prompts/string.d.ts +1 -1
  26. package/dist/prompts/structured.d.ts +1 -1
  27. package/dist/prompts/template.d.ts +1 -1
  28. package/dist/retrievers/document_compressors/base.cjs +13 -0
  29. package/dist/retrievers/document_compressors/base.d.ts +18 -0
  30. package/dist/retrievers/document_compressors/base.js +9 -0
  31. package/dist/{retrievers.cjs → retrievers/index.cjs} +4 -4
  32. package/dist/{retrievers.d.ts → retrievers/index.d.ts} +4 -4
  33. package/dist/{retrievers.js → retrievers/index.js} +4 -4
  34. package/dist/runnables/base.cjs +36 -11
  35. package/dist/runnables/base.js +36 -11
  36. package/dist/runnables/branch.cjs +1 -1
  37. package/dist/runnables/branch.js +1 -1
  38. package/dist/runnables/config.cjs +5 -1
  39. package/dist/runnables/config.d.ts +1 -1
  40. package/dist/runnables/config.js +5 -1
  41. package/dist/runnables/remote.cjs +120 -79
  42. package/dist/runnables/remote.d.ts +3 -3
  43. package/dist/runnables/remote.js +123 -82
  44. package/dist/tools.cjs +2 -1
  45. package/dist/tools.js +2 -1
  46. package/dist/tracers/base.d.ts +1 -1
  47. package/dist/tracers/tracer_langchain_v1.d.ts +1 -1
  48. package/dist/utils/testing/index.cjs +37 -3
  49. package/dist/utils/testing/index.d.ts +10 -1
  50. package/dist/utils/testing/index.js +34 -1
  51. package/dist/utils/types/index.cjs +17 -0
  52. package/dist/utils/{types.d.ts → types/index.d.ts} +1 -0
  53. package/dist/utils/types/index.js +1 -0
  54. package/dist/utils/types/is_zod_schema.cjs +16 -0
  55. package/dist/utils/types/is_zod_schema.d.ts +8 -0
  56. package/dist/utils/types/is_zod_schema.js +12 -0
  57. package/dist/vectorstores.cjs +2 -2
  58. package/dist/vectorstores.d.ts +1 -1
  59. package/dist/vectorstores.js +1 -1
  60. package/package.json +27 -14
  61. package/retrievers/document_compressors.cjs +1 -0
  62. package/retrievers/document_compressors.d.cts +1 -0
  63. package/retrievers/document_compressors.d.ts +1 -0
  64. package/retrievers/document_compressors.js +1 -0
  65. package/retrievers.cjs +1 -1
  66. package/retrievers.d.cts +1 -1
  67. package/retrievers.d.ts +1 -1
  68. package/retrievers.js +1 -1
  69. package/utils/types.cjs +1 -1
  70. package/utils/types.d.cts +1 -1
  71. package/utils/types.d.ts +1 -1
  72. package/utils/types.js +1 -1
  73. package/dist/utils/types.cjs +0 -5
  74. package/dist/utils/types.js +0 -4
@@ -1,4 +1,4 @@
1
- import type { ChainValues } from "../utils/types.js";
1
+ import type { ChainValues } from "../utils/types/index.js";
2
2
  import type { BaseMessage } from "../messages/index.js";
3
3
  import type { AgentAction, AgentFinish } from "../agents.js";
4
4
  import type { ChatGenerationChunk, GenerationChunk, LLMResult } from "../outputs.js";
@@ -372,34 +372,36 @@ class CallbackManager extends BaseCallbackManager {
372
372
  getParentRunId() {
373
373
  return this._parentRunId;
374
374
  }
375
- async handleLLMStart(llm, prompts, _runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
376
- return Promise.all(prompts.map(async (prompt) => {
377
- const runId = (0, uuid_1.v4)();
375
+ async handleLLMStart(llm, prompts, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
376
+ return Promise.all(prompts.map(async (prompt, idx) => {
377
+ // Can't have duplicate runs with the same run ID (if provided)
378
+ const runId_ = idx === 0 && runId ? runId : (0, uuid_1.v4)();
378
379
  await Promise.all(this.handlers.map((handler) => (0, promises_js_1.consumeCallback)(async () => {
379
380
  if (!handler.ignoreLLM) {
380
381
  try {
381
- await handler.handleLLMStart?.(llm, [prompt], runId, this._parentRunId, extraParams, this.tags, this.metadata, runName);
382
+ await handler.handleLLMStart?.(llm, [prompt], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
382
383
  }
383
384
  catch (err) {
384
385
  console.error(`Error in handler ${handler.constructor.name}, handleLLMStart: ${err}`);
385
386
  }
386
387
  }
387
388
  }, handler.awaitHandlers)));
388
- return new CallbackManagerForLLMRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
389
+ return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
389
390
  }));
390
391
  }
391
- async handleChatModelStart(llm, messages, _runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
392
- return Promise.all(messages.map(async (messageGroup) => {
393
- const runId = (0, uuid_1.v4)();
392
+ async handleChatModelStart(llm, messages, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
393
+ return Promise.all(messages.map(async (messageGroup, idx) => {
394
+ // Can't have duplicate runs with the same run ID (if provided)
395
+ const runId_ = idx === 0 && runId ? runId : (0, uuid_1.v4)();
394
396
  await Promise.all(this.handlers.map((handler) => (0, promises_js_1.consumeCallback)(async () => {
395
397
  if (!handler.ignoreLLM) {
396
398
  try {
397
399
  if (handler.handleChatModelStart) {
398
- await handler.handleChatModelStart?.(llm, [messageGroup], runId, this._parentRunId, extraParams, this.tags, this.metadata, runName);
400
+ await handler.handleChatModelStart?.(llm, [messageGroup], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
399
401
  }
400
402
  else if (handler.handleLLMStart) {
401
403
  const messageString = (0, index_js_1.getBufferString)(messageGroup);
402
- await handler.handleLLMStart?.(llm, [messageString], runId, this._parentRunId, extraParams, this.tags, this.metadata, runName);
404
+ await handler.handleLLMStart?.(llm, [messageString], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
403
405
  }
404
406
  }
405
407
  catch (err) {
@@ -407,7 +409,7 @@ class CallbackManager extends BaseCallbackManager {
407
409
  }
408
410
  }
409
411
  }, handler.awaitHandlers)));
410
- return new CallbackManagerForLLMRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
412
+ return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
411
413
  }));
412
414
  }
413
415
  async handleChainStart(chain, inputs, runId = (0, uuid_1.v4)(), runType = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
@@ -1,5 +1,5 @@
1
1
  import { AgentAction, AgentFinish } from "../agents.js";
2
- import type { ChainValues } from "../utils/types.js";
2
+ import type { ChainValues } from "../utils/types/index.js";
3
3
  import { LLMResult } from "../outputs.js";
4
4
  import { BaseCallbackHandler, CallbackHandlerMethods, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "./base.js";
5
5
  import { type BaseMessage } from "../messages/index.js";
@@ -34,6 +34,11 @@ export interface BaseCallbackConfig {
34
34
  * Tags are passed to all callbacks, metadata is passed to handle*Start callbacks.
35
35
  */
36
36
  callbacks?: Callbacks;
37
+ /**
38
+ * Unique identifier for the tracer run for this call. If not provided, a new UUID
39
+ * will be generated.
40
+ */
41
+ runId?: string;
37
42
  }
38
43
  export declare function parseCallbackConfigArg(arg: Callbacks | BaseCallbackConfig | undefined): BaseCallbackConfig;
39
44
  /**
@@ -130,8 +135,8 @@ export declare class CallbackManager extends BaseCallbackManager implements Base
130
135
  * @returns The parent run ID.
131
136
  */
132
137
  getParentRunId(): string | undefined;
133
- handleLLMStart(llm: Serialized, prompts: string[], _runId?: string | undefined, _parentRunId?: string | undefined, extraParams?: Record<string, unknown> | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForLLMRun[]>;
134
- handleChatModelStart(llm: Serialized, messages: BaseMessage[][], _runId?: string | undefined, _parentRunId?: string | undefined, extraParams?: Record<string, unknown> | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForLLMRun[]>;
138
+ handleLLMStart(llm: Serialized, prompts: string[], runId?: string | undefined, _parentRunId?: string | undefined, extraParams?: Record<string, unknown> | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForLLMRun[]>;
139
+ handleChatModelStart(llm: Serialized, messages: BaseMessage[][], runId?: string | undefined, _parentRunId?: string | undefined, extraParams?: Record<string, unknown> | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForLLMRun[]>;
135
140
  handleChainStart(chain: Serialized, inputs: ChainValues, runId?: string, runType?: string | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForChainRun>;
136
141
  handleToolStart(tool: Serialized, input: string, runId?: string, _parentRunId?: string | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForToolRun>;
137
142
  handleRetrieverStart(retriever: Serialized, query: string, runId?: string, _parentRunId?: string | undefined, _tags?: string[] | undefined, _metadata?: Record<string, unknown> | undefined, runName?: string | undefined): Promise<CallbackManagerForRetrieverRun>;
@@ -363,34 +363,36 @@ export class CallbackManager extends BaseCallbackManager {
363
363
  getParentRunId() {
364
364
  return this._parentRunId;
365
365
  }
366
- async handleLLMStart(llm, prompts, _runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
367
- return Promise.all(prompts.map(async (prompt) => {
368
- const runId = uuidv4();
366
+ async handleLLMStart(llm, prompts, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
367
+ return Promise.all(prompts.map(async (prompt, idx) => {
368
+ // Can't have duplicate runs with the same run ID (if provided)
369
+ const runId_ = idx === 0 && runId ? runId : uuidv4();
369
370
  await Promise.all(this.handlers.map((handler) => consumeCallback(async () => {
370
371
  if (!handler.ignoreLLM) {
371
372
  try {
372
- await handler.handleLLMStart?.(llm, [prompt], runId, this._parentRunId, extraParams, this.tags, this.metadata, runName);
373
+ await handler.handleLLMStart?.(llm, [prompt], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
373
374
  }
374
375
  catch (err) {
375
376
  console.error(`Error in handler ${handler.constructor.name}, handleLLMStart: ${err}`);
376
377
  }
377
378
  }
378
379
  }, handler.awaitHandlers)));
379
- return new CallbackManagerForLLMRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
380
+ return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
380
381
  }));
381
382
  }
382
- async handleChatModelStart(llm, messages, _runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
383
- return Promise.all(messages.map(async (messageGroup) => {
384
- const runId = uuidv4();
383
+ async handleChatModelStart(llm, messages, runId = undefined, _parentRunId = undefined, extraParams = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
384
+ return Promise.all(messages.map(async (messageGroup, idx) => {
385
+ // Can't have duplicate runs with the same run ID (if provided)
386
+ const runId_ = idx === 0 && runId ? runId : uuidv4();
385
387
  await Promise.all(this.handlers.map((handler) => consumeCallback(async () => {
386
388
  if (!handler.ignoreLLM) {
387
389
  try {
388
390
  if (handler.handleChatModelStart) {
389
- await handler.handleChatModelStart?.(llm, [messageGroup], runId, this._parentRunId, extraParams, this.tags, this.metadata, runName);
391
+ await handler.handleChatModelStart?.(llm, [messageGroup], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
390
392
  }
391
393
  else if (handler.handleLLMStart) {
392
394
  const messageString = getBufferString(messageGroup);
393
- await handler.handleLLMStart?.(llm, [messageString], runId, this._parentRunId, extraParams, this.tags, this.metadata, runName);
395
+ await handler.handleLLMStart?.(llm, [messageString], runId_, this._parentRunId, extraParams, this.tags, this.metadata, runName);
394
396
  }
395
397
  }
396
398
  catch (err) {
@@ -398,7 +400,7 @@ export class CallbackManager extends BaseCallbackManager {
398
400
  }
399
401
  }
400
402
  }, handler.awaitHandlers)));
401
- return new CallbackManagerForLLMRun(runId, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
403
+ return new CallbackManagerForLLMRun(runId_, this.handlers, this.inheritableHandlers, this.tags, this.inheritableTags, this.metadata, this.inheritableMetadata, this._parentRunId);
402
404
  }));
403
405
  }
404
406
  async handleChainStart(chain, inputs, runId = uuidv4(), runType = undefined, _tags = undefined, _metadata = undefined, runName = undefined) {
@@ -2,7 +2,7 @@ import type { BaseChatModel } from "../language_models/chat_models.js";
2
2
  import type { BasePromptTemplate } from "../prompts/base.js";
3
3
  import type { BaseLanguageModelInterface } from "../language_models/base.js";
4
4
  import type { BaseLLM } from "../language_models/llms.js";
5
- import type { PartialValues } from "../utils/types.js";
5
+ import type { PartialValues } from "../utils/types/index.js";
6
6
  export type BaseGetPromptAsyncOptions = {
7
7
  partialVariables?: PartialValues;
8
8
  };
@@ -76,7 +76,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
76
76
  invocation_params: this?.invocationParams(callOptions),
77
77
  batch_size: 1,
78
78
  };
79
- const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], undefined, undefined, extra, undefined, undefined, runnableConfig.runName);
79
+ const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
80
80
  let generationChunk;
81
81
  try {
82
82
  for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
@@ -113,7 +113,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
113
113
  invocation_params: this?.invocationParams(parsedOptions),
114
114
  batch_size: 1,
115
115
  };
116
- const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, undefined, undefined, extra, undefined, undefined, handledOptions.runName);
116
+ const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
117
117
  // generate results
118
118
  const results = await Promise.allSettled(baseMessages.map((messageList, i) => this._generate(messageList, { ...parsedOptions, promptIndex: i }, runManagers?.[i])));
119
119
  // handle results
@@ -172,7 +172,7 @@ class BaseChatModel extends base_js_1.BaseLanguageModel {
172
172
  batch_size: 1,
173
173
  cached: true,
174
174
  };
175
- const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, undefined, undefined, extra, undefined, undefined, handledOptions.runName);
175
+ const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
176
176
  // generate results
177
177
  const missingPromptIndices = [];
178
178
  const results = await Promise.allSettled(baseMessages.map(async (baseMessage, index) => {
@@ -72,7 +72,7 @@ export class BaseChatModel extends BaseLanguageModel {
72
72
  invocation_params: this?.invocationParams(callOptions),
73
73
  batch_size: 1,
74
74
  };
75
- const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], undefined, undefined, extra, undefined, undefined, runnableConfig.runName);
75
+ const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
76
76
  let generationChunk;
77
77
  try {
78
78
  for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
@@ -109,7 +109,7 @@ export class BaseChatModel extends BaseLanguageModel {
109
109
  invocation_params: this?.invocationParams(parsedOptions),
110
110
  batch_size: 1,
111
111
  };
112
- const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, undefined, undefined, extra, undefined, undefined, handledOptions.runName);
112
+ const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
113
113
  // generate results
114
114
  const results = await Promise.allSettled(baseMessages.map((messageList, i) => this._generate(messageList, { ...parsedOptions, promptIndex: i }, runManagers?.[i])));
115
115
  // handle results
@@ -168,7 +168,7 @@ export class BaseChatModel extends BaseLanguageModel {
168
168
  batch_size: 1,
169
169
  cached: true,
170
170
  };
171
- const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, undefined, undefined, extra, undefined, undefined, handledOptions.runName);
171
+ const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions.runName);
172
172
  // generate results
173
173
  const missingPromptIndices = [];
174
174
  const results = await Promise.allSettled(baseMessages.map(async (baseMessage, index) => {
@@ -57,7 +57,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
57
57
  invocation_params: this?.invocationParams(callOptions),
58
58
  batch_size: 1,
59
59
  };
60
- const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], undefined, undefined, extra, undefined, undefined, runnableConfig.runName);
60
+ const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
61
61
  let generation = new outputs_js_1.GenerationChunk({
62
62
  text: "",
63
63
  });
@@ -132,7 +132,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
132
132
  invocation_params: this?.invocationParams(parsedOptions),
133
133
  batch_size: prompts.length,
134
134
  };
135
- const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, undefined, undefined, extra, undefined, undefined, handledOptions?.runName);
135
+ const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions?.runName);
136
136
  let output;
137
137
  try {
138
138
  output = await this._generate(prompts, parsedOptions, runManagers?.[0]);
@@ -153,7 +153,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
153
153
  });
154
154
  return output;
155
155
  }
156
- async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, }) {
156
+ async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }) {
157
157
  const callbackManager_ = await manager_js_1.CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, this.metadata, { verbose: this.verbose });
158
158
  const extra = {
159
159
  options: parsedOptions,
@@ -161,7 +161,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
161
161
  batch_size: prompts.length,
162
162
  cached: true,
163
163
  };
164
- const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, undefined, undefined, extra, undefined, undefined, handledOptions?.runName);
164
+ const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, runId, undefined, extra, undefined, undefined, handledOptions?.runName);
165
165
  // generate results
166
166
  const missingPromptIndices = [];
167
167
  const results = await Promise.allSettled(prompts.map(async (prompt, index) => {
@@ -238,6 +238,7 @@ class BaseLLM extends base_js_1.BaseLanguageModel {
238
238
  llmStringKey,
239
239
  parsedOptions: callOptions,
240
240
  handledOptions: runnableConfig,
241
+ runId: runnableConfig.runId,
241
242
  });
242
243
  let llmOutput = {};
243
244
  if (missingPromptIndices.length > 0) {
@@ -23,6 +23,7 @@ interface LLMGenerateCachedParameters<T extends BaseLLM<CallOptions>, CallOption
23
23
  llmStringKey: string;
24
24
  parsedOptions: T["ParsedCallOptions"];
25
25
  handledOptions: RunnableConfig;
26
+ runId?: string;
26
27
  }
27
28
  /**
28
29
  * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.
@@ -63,7 +64,7 @@ export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = B
63
64
  _flattenLLMResult(llmResult: LLMResult): LLMResult[];
64
65
  /** @ignore */
65
66
  _generateUncached(prompts: string[], parsedOptions: this["ParsedCallOptions"], handledOptions: BaseCallbackConfig): Promise<LLMResult>;
66
- _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, }: LLMGenerateCachedParameters<typeof this>): Promise<LLMResult & {
67
+ _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }: LLMGenerateCachedParameters<typeof this>): Promise<LLMResult & {
67
68
  missingPromptIndices: number[];
68
69
  }>;
69
70
  /**
@@ -54,7 +54,7 @@ export class BaseLLM extends BaseLanguageModel {
54
54
  invocation_params: this?.invocationParams(callOptions),
55
55
  batch_size: 1,
56
56
  };
57
- const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], undefined, undefined, extra, undefined, undefined, runnableConfig.runName);
57
+ const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], runnableConfig.runId, undefined, extra, undefined, undefined, runnableConfig.runName);
58
58
  let generation = new GenerationChunk({
59
59
  text: "",
60
60
  });
@@ -129,7 +129,7 @@ export class BaseLLM extends BaseLanguageModel {
129
129
  invocation_params: this?.invocationParams(parsedOptions),
130
130
  batch_size: prompts.length,
131
131
  };
132
- const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, undefined, undefined, extra, undefined, undefined, handledOptions?.runName);
132
+ const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, handledOptions.runId, undefined, extra, undefined, undefined, handledOptions?.runName);
133
133
  let output;
134
134
  try {
135
135
  output = await this._generate(prompts, parsedOptions, runManagers?.[0]);
@@ -150,7 +150,7 @@ export class BaseLLM extends BaseLanguageModel {
150
150
  });
151
151
  return output;
152
152
  }
153
- async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, }) {
153
+ async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId, }) {
154
154
  const callbackManager_ = await CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, this.metadata, { verbose: this.verbose });
155
155
  const extra = {
156
156
  options: parsedOptions,
@@ -158,7 +158,7 @@ export class BaseLLM extends BaseLanguageModel {
158
158
  batch_size: prompts.length,
159
159
  cached: true,
160
160
  };
161
- const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, undefined, undefined, extra, undefined, undefined, handledOptions?.runName);
161
+ const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, runId, undefined, extra, undefined, undefined, handledOptions?.runName);
162
162
  // generate results
163
163
  const missingPromptIndices = [];
164
164
  const results = await Promise.allSettled(prompts.map(async (prompt, index) => {
@@ -235,6 +235,7 @@ export class BaseLLM extends BaseLanguageModel {
235
235
  llmStringKey,
236
236
  parsedOptions: callOptions,
237
237
  handledOptions: runnableConfig,
238
+ runId: runnableConfig.runId,
238
239
  });
239
240
  let llmOutput = {};
240
241
  if (missingPromptIndices.length > 0) {
@@ -45,7 +45,7 @@ exports.outputs = __importStar(require("../outputs.cjs"));
45
45
  exports.prompts = __importStar(require("../prompts/index.cjs"));
46
46
  exports.prompt_values = __importStar(require("../prompt_values.cjs"));
47
47
  exports.runnables = __importStar(require("../runnables/index.cjs"));
48
- exports.retrievers = __importStar(require("../retrievers.cjs"));
48
+ exports.retrievers = __importStar(require("../retrievers/index.cjs"));
49
49
  exports.stores = __importStar(require("../stores.cjs"));
50
50
  exports.tools = __importStar(require("../tools.cjs"));
51
51
  exports.tracers__base = __importStar(require("../tracers/base.cjs"));
@@ -66,5 +66,5 @@ exports.utils__math = __importStar(require("../utils/math.cjs"));
66
66
  exports.utils__stream = __importStar(require("../utils/stream.cjs"));
67
67
  exports.utils__testing = __importStar(require("../utils/testing/index.cjs"));
68
68
  exports.utils__tiktoken = __importStar(require("../utils/tiktoken.cjs"));
69
- exports.utils__types = __importStar(require("../utils/types.cjs"));
69
+ exports.utils__types = __importStar(require("../utils/types/index.cjs"));
70
70
  exports.vectorstores = __importStar(require("../vectorstores.cjs"));
@@ -18,7 +18,7 @@ export * as outputs from "../outputs.js";
18
18
  export * as prompts from "../prompts/index.js";
19
19
  export * as prompt_values from "../prompt_values.js";
20
20
  export * as runnables from "../runnables/index.js";
21
- export * as retrievers from "../retrievers.js";
21
+ export * as retrievers from "../retrievers/index.js";
22
22
  export * as stores from "../stores.js";
23
23
  export * as tools from "../tools.js";
24
24
  export * as tracers__base from "../tracers/base.js";
@@ -39,5 +39,5 @@ export * as utils__math from "../utils/math.js";
39
39
  export * as utils__stream from "../utils/stream.js";
40
40
  export * as utils__testing from "../utils/testing/index.js";
41
41
  export * as utils__tiktoken from "../utils/tiktoken.js";
42
- export * as utils__types from "../utils/types.js";
42
+ export * as utils__types from "../utils/types/index.js";
43
43
  export * as vectorstores from "../vectorstores.js";
@@ -19,7 +19,7 @@ export * as outputs from "../outputs.js";
19
19
  export * as prompts from "../prompts/index.js";
20
20
  export * as prompt_values from "../prompt_values.js";
21
21
  export * as runnables from "../runnables/index.js";
22
- export * as retrievers from "../retrievers.js";
22
+ export * as retrievers from "../retrievers/index.js";
23
23
  export * as stores from "../stores.js";
24
24
  export * as tools from "../tools.js";
25
25
  export * as tracers__base from "../tracers/base.js";
@@ -40,5 +40,5 @@ export * as utils__math from "../utils/math.js";
40
40
  export * as utils__stream from "../utils/stream.js";
41
41
  export * as utils__testing from "../utils/testing/index.js";
42
42
  export * as utils__tiktoken from "../utils/tiktoken.js";
43
- export * as utils__types from "../utils/types.js";
43
+ export * as utils__types from "../utils/types/index.js";
44
44
  export * as vectorstores from "../vectorstores.js";
@@ -1,5 +1,5 @@
1
1
  import { Serializable } from "../load/serializable.js";
2
- import type { StringWithAutocomplete } from "../utils/types.js";
2
+ import type { StringWithAutocomplete } from "../utils/types/index.js";
3
3
  export interface StoredMessageData {
4
4
  content: string;
5
5
  role: string | undefined;
@@ -30,7 +30,11 @@ export type MessageContentImageUrl = {
30
30
  detail?: ImageDetail;
31
31
  };
32
32
  };
33
- export type MessageContentComplex = MessageContentText | MessageContentImageUrl;
33
+ export type MessageContentComplex = MessageContentText | MessageContentImageUrl | (Record<string, any> & {
34
+ type?: "text" | "image_url" | string;
35
+ }) | (Record<string, any> & {
36
+ type?: never;
37
+ });
34
38
  export type MessageContent = string | MessageContentComplex[];
35
39
  export interface FunctionCall {
36
40
  /**
@@ -225,7 +229,10 @@ export declare class ChatMessage extends BaseMessage implements ChatMessageField
225
229
  _getType(): MessageType;
226
230
  static isInstance(message: BaseMessage): message is ChatMessage;
227
231
  }
228
- export type BaseMessageLike = BaseMessage | [StringWithAutocomplete<MessageType | "user" | "assistant">, MessageContent] | string;
232
+ export type BaseMessageLike = BaseMessage | [
233
+ StringWithAutocomplete<MessageType | "user" | "assistant" | "placeholder">,
234
+ MessageContent
235
+ ] | string;
229
236
  export declare function isBaseMessage(messageLike?: unknown): messageLike is BaseMessage;
230
237
  export declare function isBaseMessageChunk(messageLike?: unknown): messageLike is BaseMessageChunk;
231
238
  export declare function coerceMessageLikeToMessage(messageLike: BaseMessageLike): BaseMessage;
@@ -62,14 +62,21 @@ class StringOutputParser extends transform_js_1.BaseTransformOutputParser {
62
62
  _messageContentComplexToString(content) {
63
63
  switch (content.type) {
64
64
  case "text":
65
- return this._textContentToString(content);
65
+ if ("text" in content) {
66
+ // Type guard for MessageContentText
67
+ return this._textContentToString(content);
68
+ }
69
+ break;
66
70
  case "image_url":
67
- return this._imageUrlContentToString(content);
71
+ if ("image_url" in content) {
72
+ // Type guard for MessageContentImageUrl
73
+ return this._imageUrlContentToString(content);
74
+ }
75
+ break;
68
76
  default:
69
- throw new Error(
70
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
71
- `Cannot coerce "${content.type}" message part into a string.`);
77
+ throw new Error(`Cannot coerce "${content.type}" message part into a string.`);
72
78
  }
79
+ throw new Error(`Invalid content type: ${content.type}`);
73
80
  }
74
81
  _baseMessageContentToString(content) {
75
82
  return content.reduce((acc, item) => acc + this._messageContentComplexToString(item), "");
@@ -59,14 +59,21 @@ export class StringOutputParser extends BaseTransformOutputParser {
59
59
  _messageContentComplexToString(content) {
60
60
  switch (content.type) {
61
61
  case "text":
62
- return this._textContentToString(content);
62
+ if ("text" in content) {
63
+ // Type guard for MessageContentText
64
+ return this._textContentToString(content);
65
+ }
66
+ break;
63
67
  case "image_url":
64
- return this._imageUrlContentToString(content);
68
+ if ("image_url" in content) {
69
+ // Type guard for MessageContentImageUrl
70
+ return this._imageUrlContentToString(content);
71
+ }
72
+ break;
65
73
  default:
66
- throw new Error(
67
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
68
- `Cannot coerce "${content.type}" message part into a string.`);
74
+ throw new Error(`Cannot coerce "${content.type}" message part into a string.`);
69
75
  }
76
+ throw new Error(`Invalid content type: ${content.type}`);
70
77
  }
71
78
  _baseMessageContentToString(content) {
72
79
  return content.reduce((acc, item) => acc + this._messageContentComplexToString(item), "");
@@ -1,4 +1,4 @@
1
- import type { InputValues, PartialValues, StringWithAutocomplete } from "../utils/types.js";
1
+ import type { InputValues, PartialValues, StringWithAutocomplete } from "../utils/types/index.js";
2
2
  import { type BasePromptValueInterface } from "../prompt_values.js";
3
3
  import { BaseOutputParser } from "../output_parsers/index.js";
4
4
  import type { SerializedFields } from "../load/map_keys.js";
@@ -451,15 +451,44 @@ function _coerceMessagePromptTemplateLike(messagePromptTemplateLike) {
451
451
  (0, index_js_1.isBaseMessage)(messagePromptTemplateLike)) {
452
452
  return messagePromptTemplateLike;
453
453
  }
454
+ if (Array.isArray(messagePromptTemplateLike) &&
455
+ messagePromptTemplateLike[0] === "placeholder") {
456
+ const messageContent = messagePromptTemplateLike[1];
457
+ if (typeof messageContent !== "string" ||
458
+ messageContent[0] !== "{" ||
459
+ messageContent[messageContent.length - 1] !== "}") {
460
+ throw new Error(`Invalid placeholder template: "${messagePromptTemplateLike[1]}". Expected a variable name surrounded by curly braces.`);
461
+ }
462
+ const variableName = messageContent.slice(1, -1);
463
+ return new MessagesPlaceholder({ variableName, optional: true });
464
+ }
454
465
  const message = (0, index_js_1.coerceMessageLikeToMessage)(messagePromptTemplateLike);
466
+ let templateData;
467
+ if (typeof message.content === "string") {
468
+ templateData = message.content;
469
+ }
470
+ else {
471
+ // Assuming message.content is an array of complex objects, transform it.
472
+ templateData = message.content.map((item) => {
473
+ if ("text" in item) {
474
+ return { text: item.text };
475
+ }
476
+ else if ("image_url" in item) {
477
+ return { image_url: item.image_url };
478
+ }
479
+ else {
480
+ throw new Error("Invalid message content");
481
+ }
482
+ });
483
+ }
455
484
  if (message._getType() === "human") {
456
- return HumanMessagePromptTemplate.fromTemplate(message.content);
485
+ return HumanMessagePromptTemplate.fromTemplate(templateData);
457
486
  }
458
487
  else if (message._getType() === "ai") {
459
- return AIMessagePromptTemplate.fromTemplate(message.content);
488
+ return AIMessagePromptTemplate.fromTemplate(templateData);
460
489
  }
461
490
  else if (message._getType() === "system") {
462
- return SystemMessagePromptTemplate.fromTemplate(message.content);
491
+ return SystemMessagePromptTemplate.fromTemplate(templateData);
463
492
  }
464
493
  else if (index_js_1.ChatMessage.isInstance(message)) {
465
494
  return ChatMessagePromptTemplate.fromTemplate(message.content, message.role);
@@ -1,7 +1,7 @@
1
1
  import type { BaseCallbackConfig } from "../callbacks/manager.js";
2
2
  import { AIMessage, HumanMessage, SystemMessage, BaseMessage, ChatMessage, type BaseMessageLike, MessageContent } from "../messages/index.js";
3
3
  import { type ChatPromptValueInterface } from "../prompt_values.js";
4
- import type { InputValues, PartialValues } from "../utils/types.js";
4
+ import type { InputValues, PartialValues } from "../utils/types/index.js";
5
5
  import { Runnable } from "../runnables/base.js";
6
6
  import { BaseStringPromptTemplate } from "./string.js";
7
7
  import { BasePromptTemplate, type BasePromptTemplateInput, type TypedPromptInputValues } from "./base.js";
@@ -440,15 +440,44 @@ function _coerceMessagePromptTemplateLike(messagePromptTemplateLike) {
440
440
  isBaseMessage(messagePromptTemplateLike)) {
441
441
  return messagePromptTemplateLike;
442
442
  }
443
+ if (Array.isArray(messagePromptTemplateLike) &&
444
+ messagePromptTemplateLike[0] === "placeholder") {
445
+ const messageContent = messagePromptTemplateLike[1];
446
+ if (typeof messageContent !== "string" ||
447
+ messageContent[0] !== "{" ||
448
+ messageContent[messageContent.length - 1] !== "}") {
449
+ throw new Error(`Invalid placeholder template: "${messagePromptTemplateLike[1]}". Expected a variable name surrounded by curly braces.`);
450
+ }
451
+ const variableName = messageContent.slice(1, -1);
452
+ return new MessagesPlaceholder({ variableName, optional: true });
453
+ }
443
454
  const message = coerceMessageLikeToMessage(messagePromptTemplateLike);
455
+ let templateData;
456
+ if (typeof message.content === "string") {
457
+ templateData = message.content;
458
+ }
459
+ else {
460
+ // Assuming message.content is an array of complex objects, transform it.
461
+ templateData = message.content.map((item) => {
462
+ if ("text" in item) {
463
+ return { text: item.text };
464
+ }
465
+ else if ("image_url" in item) {
466
+ return { image_url: item.image_url };
467
+ }
468
+ else {
469
+ throw new Error("Invalid message content");
470
+ }
471
+ });
472
+ }
444
473
  if (message._getType() === "human") {
445
- return HumanMessagePromptTemplate.fromTemplate(message.content);
474
+ return HumanMessagePromptTemplate.fromTemplate(templateData);
446
475
  }
447
476
  else if (message._getType() === "ai") {
448
- return AIMessagePromptTemplate.fromTemplate(message.content);
477
+ return AIMessagePromptTemplate.fromTemplate(templateData);
449
478
  }
450
479
  else if (message._getType() === "system") {
451
- return SystemMessagePromptTemplate.fromTemplate(message.content);
480
+ return SystemMessagePromptTemplate.fromTemplate(templateData);
452
481
  }
453
482
  else if (ChatMessage.isInstance(message)) {
454
483
  return ChatMessagePromptTemplate.fromTemplate(message.content, message.role);
@@ -4,7 +4,7 @@ import type { BaseExampleSelector } from "../example_selectors/base.js";
4
4
  import { type TemplateFormat } from "./template.js";
5
5
  import { PromptTemplate } from "./prompt.js";
6
6
  import type { SerializedFewShotTemplate } from "./serde.js";
7
- import type { InputValues, PartialValues } from "../utils/types.js";
7
+ import type { InputValues, PartialValues } from "../utils/types/index.js";
8
8
  import type { BaseMessage } from "../messages/index.js";
9
9
  import { BaseChatPromptTemplate, type BaseMessagePromptTemplate } from "./chat.js";
10
10
  export interface FewShotPromptTemplateInput extends BasePromptTemplateInput<InputValues> {
@@ -1,5 +1,5 @@
1
1
  import { ImagePromptValue, ImageContent } from "../prompt_values.js";
2
- import type { InputValues, PartialValues } from "../utils/types.js";
2
+ import type { InputValues, PartialValues } from "../utils/types/index.js";
3
3
  import { BasePromptTemplate, BasePromptTemplateInput, TypedPromptInputValues } from "./base.js";
4
4
  import { TemplateFormat } from "./template.js";
5
5
  /**
@@ -1,4 +1,4 @@
1
- import type { InputValues, PartialValues } from "../utils/types.js";
1
+ import type { InputValues, PartialValues } from "../utils/types/index.js";
2
2
  import type { SerializedBasePromptTemplate } from "./serde.js";
3
3
  import { BasePromptTemplate, type BasePromptTemplateInput } from "./base.js";
4
4
  /**
@@ -2,7 +2,7 @@ import { BaseStringPromptTemplate } from "./string.js";
2
2
  import type { BasePromptTemplateInput, TypedPromptInputValues } from "./base.js";
3
3
  import { type TemplateFormat } from "./template.js";
4
4
  import type { SerializedPromptTemplate } from "./serde.js";
5
- import type { InputValues, PartialValues } from "../utils/types.js";
5
+ import type { InputValues, PartialValues } from "../utils/types/index.js";
6
6
  import { MessageContent } from "../messages/index.js";
7
7
  /**
8
8
  * Inputs to create a {@link PromptTemplate}
@@ -1,4 +1,4 @@
1
- import type { InputValues } from "../utils/types.js";
1
+ import type { InputValues } from "../utils/types/index.js";
2
2
  import { type StringPromptValueInterface } from "../prompt_values.js";
3
3
  import { BasePromptTemplate, type TypedPromptInputValues } from "./base.js";
4
4
  /**