@langchain/core 1.0.0-alpha.2 → 1.0.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents.d.ts.map +1 -1
- package/dist/caches/base.cjs +5 -18
- package/dist/caches/base.cjs.map +1 -1
- package/dist/caches/base.d.cts +4 -20
- package/dist/caches/base.d.cts.map +1 -1
- package/dist/caches/base.d.ts +4 -20
- package/dist/caches/base.d.ts.map +1 -1
- package/dist/caches/base.js +5 -18
- package/dist/caches/base.js.map +1 -1
- package/dist/callbacks/base.d.cts +3 -2
- package/dist/callbacks/base.d.cts.map +1 -1
- package/dist/callbacks/base.d.ts +3 -2
- package/dist/callbacks/base.d.ts.map +1 -1
- package/dist/callbacks/manager.d.cts +1 -1
- package/dist/callbacks/manager.d.ts +1 -1
- package/dist/language_models/base.d.cts +1 -1
- package/dist/language_models/base.d.ts +1 -1
- package/dist/language_models/chat_models.cjs +1 -0
- package/dist/language_models/chat_models.cjs.map +1 -1
- package/dist/language_models/chat_models.d.cts +2 -1
- package/dist/language_models/chat_models.d.cts.map +1 -1
- package/dist/language_models/chat_models.d.ts +2 -1
- package/dist/language_models/chat_models.d.ts.map +1 -1
- package/dist/language_models/chat_models.js +2 -1
- package/dist/language_models/chat_models.js.map +1 -1
- package/dist/messages/ai.cjs +32 -60
- package/dist/messages/ai.cjs.map +1 -1
- package/dist/messages/ai.d.cts +26 -119
- package/dist/messages/ai.d.cts.map +1 -1
- package/dist/messages/ai.d.ts +26 -119
- package/dist/messages/ai.d.ts.map +1 -1
- package/dist/messages/ai.js +32 -60
- package/dist/messages/ai.js.map +1 -1
- package/dist/messages/base.cjs +48 -28
- package/dist/messages/base.cjs.map +1 -1
- package/dist/messages/base.d.cts +37 -39
- package/dist/messages/base.d.cts.map +1 -1
- package/dist/messages/base.d.ts +37 -39
- package/dist/messages/base.d.ts.map +1 -1
- package/dist/messages/base.js +48 -28
- package/dist/messages/base.js.map +1 -1
- package/dist/messages/chat.cjs +18 -12
- package/dist/messages/chat.cjs.map +1 -1
- package/dist/messages/chat.d.cts +21 -13
- package/dist/messages/chat.d.cts.map +1 -1
- package/dist/messages/chat.d.ts +21 -13
- package/dist/messages/chat.d.ts.map +1 -1
- package/dist/messages/chat.js +18 -12
- package/dist/messages/chat.js.map +1 -1
- package/dist/messages/content/index.cjs.map +1 -1
- package/dist/messages/content/index.d.cts +1 -1
- package/dist/messages/content/index.d.cts.map +1 -1
- package/dist/messages/content/index.d.ts +1 -1
- package/dist/messages/content/index.d.ts.map +1 -1
- package/dist/messages/content/index.js.map +1 -1
- package/dist/messages/function.cjs +8 -13
- package/dist/messages/function.cjs.map +1 -1
- package/dist/messages/function.d.cts +11 -11
- package/dist/messages/function.d.cts.map +1 -1
- package/dist/messages/function.d.ts +11 -11
- package/dist/messages/function.d.ts.map +1 -1
- package/dist/messages/function.js +8 -13
- package/dist/messages/function.js.map +1 -1
- package/dist/messages/human.cjs +20 -11
- package/dist/messages/human.cjs.map +1 -1
- package/dist/messages/human.d.cts +20 -15
- package/dist/messages/human.d.cts.map +1 -1
- package/dist/messages/human.d.ts +20 -15
- package/dist/messages/human.d.ts.map +1 -1
- package/dist/messages/human.js +20 -11
- package/dist/messages/human.js.map +1 -1
- package/dist/messages/index.cjs +10 -0
- package/dist/messages/index.cjs.map +1 -1
- package/dist/messages/index.d.cts +9 -7
- package/dist/messages/index.d.ts +9 -7
- package/dist/messages/index.js +8 -2
- package/dist/messages/index.js.map +1 -1
- package/dist/messages/message.cjs +15 -0
- package/dist/messages/message.cjs.map +1 -0
- package/dist/messages/message.d.cts +598 -0
- package/dist/messages/message.d.cts.map +1 -0
- package/dist/messages/message.d.ts +598 -0
- package/dist/messages/message.d.ts.map +1 -0
- package/dist/messages/message.js +14 -0
- package/dist/messages/message.js.map +1 -0
- package/dist/messages/metadata.cjs +41 -0
- package/dist/messages/metadata.cjs.map +1 -0
- package/dist/messages/metadata.d.cts +98 -0
- package/dist/messages/metadata.d.cts.map +1 -0
- package/dist/messages/metadata.d.ts +98 -0
- package/dist/messages/metadata.d.ts.map +1 -0
- package/dist/messages/metadata.js +40 -0
- package/dist/messages/metadata.js.map +1 -0
- package/dist/messages/modifier.cjs +5 -7
- package/dist/messages/modifier.cjs.map +1 -1
- package/dist/messages/modifier.d.cts +7 -5
- package/dist/messages/modifier.d.cts.map +1 -1
- package/dist/messages/modifier.d.ts +7 -5
- package/dist/messages/modifier.d.ts.map +1 -1
- package/dist/messages/modifier.js +5 -7
- package/dist/messages/modifier.js.map +1 -1
- package/dist/messages/system.cjs +20 -11
- package/dist/messages/system.cjs.map +1 -1
- package/dist/messages/system.d.cts +20 -15
- package/dist/messages/system.d.cts.map +1 -1
- package/dist/messages/system.d.ts +20 -15
- package/dist/messages/system.d.ts.map +1 -1
- package/dist/messages/system.js +20 -11
- package/dist/messages/system.js.map +1 -1
- package/dist/messages/tool.cjs +15 -13
- package/dist/messages/tool.cjs.map +1 -1
- package/dist/messages/tool.d.cts +72 -46
- package/dist/messages/tool.d.cts.map +1 -1
- package/dist/messages/tool.d.ts +72 -46
- package/dist/messages/tool.d.ts.map +1 -1
- package/dist/messages/tool.js +15 -13
- package/dist/messages/tool.js.map +1 -1
- package/dist/messages/transformers.cjs.map +1 -1
- package/dist/messages/transformers.d.cts +2 -1
- package/dist/messages/transformers.d.cts.map +1 -1
- package/dist/messages/transformers.d.ts +2 -1
- package/dist/messages/transformers.d.ts.map +1 -1
- package/dist/messages/transformers.js.map +1 -1
- package/dist/messages/utils.cjs +8 -1
- package/dist/messages/utils.cjs.map +1 -1
- package/dist/messages/utils.d.cts +83 -3
- package/dist/messages/utils.d.cts.map +1 -1
- package/dist/messages/utils.d.ts +83 -3
- package/dist/messages/utils.d.ts.map +1 -1
- package/dist/messages/utils.js +8 -2
- package/dist/messages/utils.js.map +1 -1
- package/dist/output_parsers/openai_tools/json_output_tools_parsers.d.cts +1 -1
- package/dist/output_parsers/openai_tools/json_output_tools_parsers.d.ts +1 -1
- package/dist/prompt_values.d.cts +4 -3
- package/dist/prompt_values.d.cts.map +1 -1
- package/dist/prompt_values.d.ts +4 -3
- package/dist/prompt_values.d.ts.map +1 -1
- package/dist/prompts/chat.d.cts +3 -3
- package/dist/prompts/chat.d.cts.map +1 -1
- package/dist/prompts/chat.d.ts +3 -3
- package/dist/prompts/chat.d.ts.map +1 -1
- package/dist/prompts/few_shot.d.cts +1 -1
- package/dist/prompts/few_shot.d.ts +1 -1
- package/dist/prompts/image.d.cts +1 -1
- package/dist/prompts/image.d.ts +1 -1
- package/dist/prompts/prompt.d.cts +1 -1
- package/dist/prompts/prompt.d.ts +1 -1
- package/dist/prompts/template.d.cts +1 -1
- package/dist/prompts/template.d.ts +1 -1
- package/dist/runnables/base.d.cts +1 -1
- package/dist/runnables/base.d.ts +1 -1
- package/dist/tools/index.d.cts +1 -1
- package/dist/tools/index.d.ts +1 -1
- package/dist/tools/types.d.cts +1 -1
- package/dist/tools/types.d.ts +1 -1
- package/dist/tracers/base.d.cts +1 -1
- package/dist/tracers/base.d.ts +1 -1
- package/dist/tracers/tracer_langchain_v1.d.cts +1 -1
- package/dist/tracers/tracer_langchain_v1.d.ts +1 -1
- package/dist/utils/testing/index.d.cts +4 -3
- package/dist/utils/testing/index.d.cts.map +1 -1
- package/dist/utils/testing/index.d.ts +4 -3
- package/dist/utils/testing/index.d.ts.map +1 -1
- package/package.json +1 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.js","names":["chunk: BaseMessageChunk","messages: BaseMessage[]","messagesToTrace: BaseMessage[]","fields: BaseChatModelParams","options?: Partial<CallOptions>","input: BaseLanguageModelInput","options?: CallOptions","_messages: BaseMessage[]","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generationChunk: ChatGenerationChunk | undefined","llmOutput: Record<string, any> | undefined","options: this[\"ParsedCallOptions\"]","messages: BaseMessageLike[][]","parsedOptions: this[\"ParsedCallOptions\"]","handledOptions: RunnableConfig","startedRunManagers?: CallbackManagerForLLMRun[]","runManagers: CallbackManagerForLLMRun[] | undefined","generations: ChatGeneration[][]","llmOutputs: LLMResult[\"llmOutput\"][]","output: LLMResult","missingPromptIndices: number[]","generations: Generation[][]","result","options?: string[] | CallOptions","callbacks?: Callbacks","parsedOptions: CallOptions | undefined","_options?: this[\"ParsedCallOptions\"]","promptValues: BasePromptValueInterface[]","promptMessages: BaseMessage[][]","messages: BaseMessageLike[]","promptValue: BasePromptValueInterface","promptMessages: BaseMessage[]","text: string","outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","config?: StructuredOutputMethodOptions<boolean>","schema: Record<string, any> | InteropZodType<RunOutput>","tools: ToolDefinition[]","input: AIMessageChunk","input: any","config","runManager?: CallbackManagerForLLMRun"],"sources":["../../src/language_models/chat_models.ts"],"sourcesContent":["import type { ZodType as ZodTypeV3 } from \"zod/v3\";\nimport type { $ZodType as ZodTypeV4 } from \"zod/v4/core\";\nimport {\n AIMessage,\n type BaseMessage,\n BaseMessageChunk,\n type BaseMessageLike,\n HumanMessage,\n coerceMessageLikeToMessage,\n AIMessageChunk,\n isAIMessageChunk,\n isBaseMessage,\n isAIMessage,\n} from \"../messages/index.js\";\nimport {\n convertToOpenAIImageBlock,\n isURLContentBlock,\n isBase64ContentBlock,\n} from \"../messages/content/data.js\";\nimport type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n LLMResult,\n RUN_KEY,\n type ChatGeneration,\n ChatGenerationChunk,\n type ChatResult,\n type Generation,\n} from \"../outputs.js\";\nimport {\n BaseLanguageModel,\n type StructuredOutputMethodOptions,\n type ToolDefinition,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport {\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/base.js\";\nimport {\n StructuredToolInterface,\n StructuredToolParams,\n} from \"../tools/index.js\";\nimport {\n Runnable,\n RunnableLambda,\n RunnableSequence,\n RunnableToolLike,\n} from \"../runnables/base.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { RunnablePassthrough } from \"../runnables/passthrough.js\";\nimport {\n getSchemaDescription,\n InteropZodType,\n isInteropZodSchema,\n} from \"../utils/types/zod.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\nimport { toJsonSchema } from \"../utils/json_schema.js\";\nimport { getEnvironmentVariable } from \"../utils/env.js\";\nimport { castStandardMessageContent, iife } from \"./utils.js\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport type ToolChoice = string | Record<string, any> | \"auto\" | \"any\";\n\n/**\n * Represents a serialized chat model.\n */\nexport type SerializedChatModel = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\n// todo?\n/**\n * Represents a serialized large language model.\n */\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\n/**\n * Represents the parameters for a base chat model.\n */\nexport type BaseChatModelParams = BaseLanguageModelParams & {\n /**\n * Whether to disable streaming.\n *\n * If streaming is bypassed, then `stream()` will defer to\n * `invoke()`.\n *\n * - If true, will always bypass streaming case.\n * - If false (default), will always use streaming case if available.\n */\n disableStreaming?: boolean;\n /**\n * Version of `AIMessage` output format to store in message content.\n *\n * `AIMessage.contentBlocks` will lazily parse the contents of `content` into a\n * standard format. This flag can be used to additionally store the standard format\n * as the message content, e.g., for serialization purposes.\n *\n * - \"v0\": provider-specific format in content (can lazily parse with `.contentBlocks`)\n * - \"v1\": standardized format in content (consistent with `.contentBlocks`)\n *\n * You can also set `LC_OUTPUT_VERSION` as an environment variable to \"v1\" to\n * enable this by default.\n *\n * @default \"v0\"\n */\n outputVersion?: \"v0\" | \"v1\";\n};\n\n/**\n * Represents the call options for a base chat model.\n */\nexport type BaseChatModelCallOptions = BaseLanguageModelCallOptions & {\n /**\n * Specifies how the chat model should use tools.\n * @default undefined\n *\n * Possible values:\n * - \"auto\": The model may choose to use any of the provided tools, or none.\n * - \"any\": The model must use one of the provided tools.\n * - \"none\": The model must not use any tools.\n * - A string (not \"auto\", \"any\", or \"none\"): The name of a specific tool the model must use.\n * - An object: A custom schema specifying tool choice parameters. Specific to the provider.\n *\n * Note: Not all providers support tool_choice. An error will be thrown\n * if used with an unsupported model.\n */\n tool_choice?: ToolChoice;\n};\n\n/**\n * Creates a transform stream for encoding chat message chunks.\n * @deprecated Use {@link BytesOutputParser} instead\n * @returns A TransformStream instance that encodes chat message chunks.\n */\nexport function createChatMessageChunkEncoderStream() {\n const textEncoder = new TextEncoder();\n return new TransformStream<BaseMessageChunk>({\n transform(chunk: BaseMessageChunk, controller) {\n controller.enqueue(\n textEncoder.encode(\n typeof chunk.content === \"string\"\n ? chunk.content\n : JSON.stringify(chunk.content)\n )\n );\n },\n });\n}\n\nfunction _formatForTracing(messages: BaseMessage[]): BaseMessage[] {\n const messagesToTrace: BaseMessage[] = [];\n for (const message of messages) {\n let messageToTrace = message;\n if (Array.isArray(message.content)) {\n for (let idx = 0; idx < message.content.length; idx++) {\n const block = message.content[idx];\n if (isURLContentBlock(block) || isBase64ContentBlock(block)) {\n if (messageToTrace === message) {\n // Also shallow-copy content\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n messageToTrace = new (message.constructor as any)({\n ...messageToTrace,\n content: [\n ...message.content.slice(0, idx),\n convertToOpenAIImageBlock(block),\n ...message.content.slice(idx + 1),\n ],\n });\n }\n }\n }\n }\n messagesToTrace.push(messageToTrace);\n }\n return messagesToTrace;\n}\n\nexport type LangSmithParams = {\n ls_provider?: string;\n ls_model_name?: string;\n ls_model_type: \"chat\";\n ls_temperature?: number;\n ls_max_tokens?: number;\n ls_stop?: Array<string>;\n};\n\nexport type BindToolsInput =\n | StructuredToolInterface\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>\n | ToolDefinition\n | RunnableToolLike\n | StructuredToolParams;\n\n/**\n * Base class for chat models. It extends the BaseLanguageModel class and\n * provides methods for generating chat based on input messages.\n */\nexport abstract class BaseChatModel<\n CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions,\n // TODO: Fix the parameter order on the next minor version.\n OutputMessageType extends BaseMessageChunk = AIMessageChunk\n> extends BaseLanguageModel<OutputMessageType, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"chat_models\", this._llmType()];\n\n disableStreaming = false;\n\n outputVersion?: \"v0\" | \"v1\";\n\n constructor(fields: BaseChatModelParams) {\n super(fields);\n this.outputVersion = iife(() => {\n const outputVersion =\n fields.outputVersion ?? getEnvironmentVariable(\"LC_OUTPUT_VERSION\");\n if (outputVersion && [\"v0\", \"v1\"].includes(outputVersion)) {\n return outputVersion as \"v0\" | \"v1\";\n }\n return \"v0\";\n });\n }\n\n _combineLLMOutput?(\n ...llmOutputs: LLMResult[\"llmOutput\"][]\n ): LLMResult[\"llmOutput\"];\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n /**\n * Bind tool-like objects to this chat model.\n *\n * @param tools A list of tool definitions to bind to this chat model.\n * Can be a structured tool, an OpenAI formatted tool, or an object\n * matching the provider's specific tool schema.\n * @param kwargs Any additional parameters to bind.\n */\n bindTools?(\n tools: BindToolsInput[],\n kwargs?: Partial<CallOptions>\n ): Runnable<BaseLanguageModelInput, OutputMessageType, CallOptions>;\n\n /**\n * Invokes the chat model with a single input.\n * @param input The input for the language model.\n * @param options The call options.\n * @returns A Promise that resolves to a BaseMessageChunk.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): Promise<OutputMessageType> {\n const promptValue = BaseChatModel._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n const chatGeneration = result.generations[0][0] as ChatGeneration;\n // TODO: Remove cast after figuring out inheritance\n return chatGeneration.message as OutputMessageType;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): AsyncGenerator<OutputMessageType> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks ===\n BaseChatModel.prototype._streamResponseChunks ||\n this.disableStreaming\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseChatModel._convertInputToPromptValue(input);\n const messages = prompt.toChatMessages();\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n\n const inheritableMetadata = {\n ...runnableConfig.metadata,\n ...this.getLsParams(callOptions),\n };\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n inheritableMetadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleChatModelStart(\n this.toJSON(),\n [_formatForTracing(messages)],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generationChunk: ChatGenerationChunk | undefined;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let llmOutput: Record<string, any> | undefined;\n try {\n for await (const chunk of this._streamResponseChunks(\n messages,\n callOptions,\n runManagers?.[0]\n )) {\n if (chunk.message.id == null) {\n const runId = runManagers?.at(0)?.runId;\n if (runId != null) chunk.message._updateId(`run-${runId}`);\n }\n chunk.message.response_metadata = {\n ...chunk.generationInfo,\n ...chunk.message.response_metadata,\n };\n if (this.outputVersion === \"v1\") {\n yield castStandardMessageContent(\n chunk.message\n ) as OutputMessageType;\n } else {\n yield chunk.message as OutputMessageType;\n }\n if (!generationChunk) {\n generationChunk = chunk;\n } else {\n generationChunk = generationChunk.concat(chunk);\n }\n if (\n isAIMessageChunk(chunk.message) &&\n chunk.message.usage_metadata !== undefined\n ) {\n llmOutput = {\n tokenUsage: {\n promptTokens: chunk.message.usage_metadata.input_tokens,\n completionTokens: chunk.message.usage_metadata.output_tokens,\n totalTokens: chunk.message.usage_metadata.total_tokens,\n },\n };\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n // TODO: Remove cast after figuring out inheritance\n generations: [[generationChunk as ChatGeneration]],\n llmOutput,\n })\n )\n );\n }\n }\n\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams {\n const providerName = this.getName().startsWith(\"Chat\")\n ? this.getName().replace(\"Chat\", \"\")\n : this.getName();\n\n return {\n ls_model_type: \"chat\",\n ls_stop: options.stop,\n ls_provider: providerName,\n };\n }\n\n /** @ignore */\n async _generateUncached(\n messages: BaseMessageLike[][],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: RunnableConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n const baseMessages = messages.map((messageList) =>\n messageList.map(coerceMessageLikeToMessage)\n );\n\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === baseMessages.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const inheritableMetadata = {\n ...handledOptions.metadata,\n ...this.getLsParams(parsedOptions),\n };\n // create callback manager and start run\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n inheritableMetadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: 1,\n };\n runManagers = await callbackManager_?.handleChatModelStart(\n this.toJSON(),\n baseMessages.map(_formatForTracing),\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions.runName\n );\n }\n const generations: ChatGeneration[][] = [];\n const llmOutputs: LLMResult[\"llmOutput\"][] = [];\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n if (\n hasStreamingHandler &&\n !this.disableStreaming &&\n baseMessages.length === 1 &&\n this._streamResponseChunks !==\n BaseChatModel.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n baseMessages[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let llmOutput: Record<string, any> | undefined;\n for await (const chunk of stream) {\n if (chunk.message.id == null) {\n const runId = runManagers?.at(0)?.runId;\n if (runId != null) chunk.message._updateId(`run-${runId}`);\n }\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n if (\n isAIMessageChunk(chunk.message) &&\n chunk.message.usage_metadata !== undefined\n ) {\n llmOutput = {\n tokenUsage: {\n promptTokens: chunk.message.usage_metadata.input_tokens,\n completionTokens: chunk.message.usage_metadata.output_tokens,\n totalTokens: chunk.message.usage_metadata.total_tokens,\n },\n };\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n generations.push([aggregated]);\n await runManagers?.[0].handleLLMEnd({\n generations,\n llmOutput,\n });\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n // generate results\n const results = await Promise.allSettled(\n baseMessages.map(async (messageList, i) => {\n const generateResults = await this._generate(\n messageList,\n { ...parsedOptions, promptIndex: i },\n runManagers?.[i]\n );\n if (this.outputVersion === \"v1\") {\n for (const generation of generateResults.generations) {\n generation.message = castStandardMessageContent(\n generation.message\n );\n }\n }\n return generateResults;\n })\n );\n // handle results\n await Promise.all(\n results.map(async (pResult, i) => {\n if (pResult.status === \"fulfilled\") {\n const result = pResult.value;\n for (const generation of result.generations) {\n if (generation.message.id == null) {\n const runId = runManagers?.at(0)?.runId;\n if (runId != null) generation.message._updateId(`run-${runId}`);\n }\n generation.message.response_metadata = {\n ...generation.generationInfo,\n ...generation.message.response_metadata,\n };\n }\n if (result.generations.length === 1) {\n result.generations[0].message.response_metadata = {\n ...result.llmOutput,\n ...result.generations[0].message.response_metadata,\n };\n }\n generations[i] = result.generations;\n llmOutputs[i] = result.llmOutput;\n return runManagers?.[i]?.handleLLMEnd({\n generations: [result.generations],\n llmOutput: result.llmOutput,\n });\n } else {\n // status === \"rejected\"\n await runManagers?.[i]?.handleLLMError(pResult.reason);\n return Promise.reject(pResult.reason);\n }\n })\n );\n }\n // create combined output\n const output: LLMResult = {\n generations,\n llmOutput: llmOutputs.length\n ? this._combineLLMOutput?.(...llmOutputs)\n : undefined,\n };\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n messages,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n }: {\n messages: BaseMessageLike[][];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const baseMessages = messages.map((messageList) =>\n messageList.map(coerceMessageLikeToMessage)\n );\n\n const inheritableMetadata = {\n ...handledOptions.metadata,\n ...this.getLsParams(parsedOptions),\n };\n // create callback manager and start run\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n inheritableMetadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleChatModelStart(\n this.toJSON(),\n baseMessages.map(_formatForTracing),\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n baseMessages.map(async (baseMessage, index) => {\n // Join all content into one string for the prompt index\n const prompt =\n BaseChatModel._convertInputToPromptValue(baseMessage).toString();\n const result = await cache.lookup(prompt, llmStringKey);\n\n if (result == null) {\n missingPromptIndices.push(index);\n }\n\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n if (\n \"message\" in result &&\n isBaseMessage(result.message) &&\n isAIMessage(result.message)\n ) {\n // eslint-disable-next-line no-param-reassign\n result.message.usage_metadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n if (this.outputVersion === \"v1\") {\n // eslint-disable-next-line no-param-reassign\n result.message = castStandardMessageContent(result.message);\n }\n }\n // eslint-disable-next-line no-param-reassign\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Generates chat based on the input messages.\n * @param messages An array of arrays of BaseMessage instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to an LLMResult.\n */\n async generate(\n messages: BaseMessageLike[][],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n // parse call options\n let parsedOptions: CallOptions | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as CallOptions;\n } else {\n parsedOptions = options;\n }\n\n const baseMessages = messages.map((messageList) =>\n messageList.map(coerceMessageLikeToMessage)\n );\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(baseMessages, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n messages: baseMessages,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => baseMessages[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n // Join all content into one string for the prompt index\n const prompt = BaseChatModel._convertInputToPromptValue(\n baseMessages[promptIndex]\n ).toString();\n return cache.update(prompt, llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _modelType(): string {\n return \"base_chat_model\" as const;\n }\n\n abstract _llmType(): string;\n\n /**\n * @deprecated\n * Return a json-like object representing this LLM.\n */\n serialize(): SerializedLLM {\n return {\n ...this.invocationParams(),\n _type: this._llmType(),\n _model: this._modelType(),\n };\n }\n\n /**\n * Generates a prompt based on the input prompt values.\n * @param promptValues An array of BasePromptValue instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to an LLMResult.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const promptMessages: BaseMessage[][] = promptValues.map((promptValue) =>\n promptValue.toChatMessages()\n );\n return this.generate(promptMessages, options, callbacks);\n }\n\n abstract _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult>;\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Makes a single call to the chat model.\n * @param messages An array of BaseMessage instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a BaseMessage.\n */\n async call(\n messages: BaseMessageLike[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n const result = await this.generate(\n [messages.map(coerceMessageLikeToMessage)],\n options,\n callbacks\n );\n const generations = result.generations as ChatGeneration[][];\n return generations[0][0].message;\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Makes a single call to the chat model with a prompt value.\n * @param promptValue The value of the prompt.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a BaseMessage.\n */\n async callPrompt(\n promptValue: BasePromptValueInterface,\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n const promptMessages: BaseMessage[] = promptValue.toChatMessages();\n return this.call(promptMessages, options, callbacks);\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Predicts the next message based on the input messages.\n * @param messages An array of BaseMessage instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a BaseMessage.\n */\n async predictMessages(\n messages: BaseMessage[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n return this.call(messages, options, callbacks);\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Predicts the next message based on a text input.\n * @param text The text input.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a string.\n */\n async predict(\n text: string,\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<string> {\n const message = new HumanMessage(text);\n const result = await this.call([message], options, callbacks);\n if (typeof result.content !== \"string\") {\n throw new Error(\"Cannot use predict when output is not a string.\");\n }\n return result.content;\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV4<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV4<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV3<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV3<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n {\n raw: BaseMessage;\n parsed: RunOutput;\n }\n > {\n if (typeof this.bindTools !== \"function\") {\n throw new Error(\n `Chat model must implement \".bindTools()\" to use withStructuredOutput.`\n );\n }\n if (config?.strict) {\n throw new Error(\n `\"strict\" mode is not supported for this model by default.`\n );\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const schema: Record<string, any> | InteropZodType<RunOutput> =\n outputSchema;\n const name = config?.name;\n const description =\n getSchemaDescription(schema) ?? \"A function available to call.\";\n const method = config?.method;\n const includeRaw = config?.includeRaw;\n if (method === \"jsonMode\") {\n throw new Error(\n `Base withStructuredOutput implementation only supports \"functionCalling\" as a method.`\n );\n }\n\n let functionName = name ?? \"extract\";\n let tools: ToolDefinition[];\n if (isInteropZodSchema(schema)) {\n tools = [\n {\n type: \"function\",\n function: {\n name: functionName,\n description,\n parameters: toJsonSchema(schema),\n },\n },\n ];\n } else {\n if (\"name\" in schema) {\n functionName = schema.name;\n }\n tools = [\n {\n type: \"function\",\n function: {\n name: functionName,\n description,\n parameters: schema,\n },\n },\n ];\n }\n\n const llm = this.bindTools(tools);\n const outputParser = RunnableLambda.from<AIMessageChunk, RunOutput>(\n (input: AIMessageChunk): RunOutput => {\n if (!input.tool_calls || input.tool_calls.length === 0) {\n throw new Error(\"No tool calls found in the response.\");\n }\n const toolCall = input.tool_calls.find(\n (tc) => tc.name === functionName\n );\n if (!toolCall) {\n throw new Error(`No tool call found with name ${functionName}.`);\n }\n return toolCall.args as RunOutput;\n }\n );\n\n if (!includeRaw) {\n return llm.pipe(outputParser).withConfig({\n runName: \"StructuredOutput\",\n }) as Runnable<BaseLanguageModelInput, RunOutput>;\n }\n\n const parserAssign = RunnablePassthrough.assign({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsed: (input: any, config) => outputParser.invoke(input.raw, config),\n });\n const parserNone = RunnablePassthrough.assign({\n parsed: () => null,\n });\n const parsedWithFallback = parserAssign.withFallbacks({\n fallbacks: [parserNone],\n });\n return RunnableSequence.from<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n >([\n {\n raw: llm,\n },\n parsedWithFallback,\n ]).withConfig({\n runName: \"StructuredOutputRunnable\",\n });\n }\n}\n\n/**\n * An abstract class that extends BaseChatModel and provides a simple\n * implementation of _generate.\n */\nexport abstract class SimpleChatModel<\n CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions\n> extends BaseChatModel<CallOptions> {\n abstract _call(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n const text = await this._call(messages, options, runManager);\n const message = new AIMessage(text);\n if (typeof message.content !== \"string\") {\n throw new Error(\n \"Cannot generate with a simple chat model when output is not a string.\"\n );\n }\n return {\n generations: [\n {\n text: message.content,\n message,\n },\n ],\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiJA,SAAgB,sCAAsC;CACpD,MAAM,cAAc,IAAI;AACxB,QAAO,IAAI,gBAAkC,EAC3C,UAAUA,OAAyB,YAAY;EAC7C,WAAW,QACT,YAAY,OACV,OAAO,MAAM,YAAY,WACrB,MAAM,UACN,KAAK,UAAU,MAAM,QAAQ,CAClC,CACF;CACF,EACF;AACF;AAED,SAAS,kBAAkBC,UAAwC;CACjE,MAAMC,kBAAiC,CAAE;AACzC,MAAK,MAAM,WAAW,UAAU;EAC9B,IAAI,iBAAiB;AACrB,MAAI,MAAM,QAAQ,QAAQ,QAAQ,CAChC,MAAK,IAAI,MAAM,GAAG,MAAM,QAAQ,QAAQ,QAAQ,OAAO;GACrD,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,OAAI,kBAAkB,MAAM,IAAI,qBAAqB,MAAM,EACzD;QAAI,mBAAmB,SAGrB,iBAAiB,IAAK,QAAQ,YAAoB;KAChD,GAAG;KACH,SAAS;MACP,GAAG,QAAQ,QAAQ,MAAM,GAAG,IAAI;MAChC,0BAA0B,MAAM;MAChC,GAAG,QAAQ,QAAQ,MAAM,MAAM,EAAE;KAClC;IACF;GACF;EAEJ;EAEH,gBAAgB,KAAK,eAAe;CACrC;AACD,QAAO;AACR;;;;;AAuBD,IAAsB,gBAAtB,MAAsB,sBAIZ,kBAAkD;CAQ1D,eAAe;EAAC;EAAa;EAAe,KAAK,UAAU;CAAC;CAE5D,mBAAmB;CAEnB;CAEA,YAAYC,QAA6B;EACvC,MAAM,OAAO;EACb,KAAK,gBAAgB,KAAK,MAAM;GAC9B,MAAM,gBACJ,OAAO,iBAAiB,uBAAuB,oBAAoB;AACrE,OAAI,iBAAiB,CAAC,MAAM,IAAK,EAAC,SAAS,cAAc,CACvD,QAAO;AAET,UAAO;EACR,EAAC;CACH;CAMD,AAAU,6CACRC,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,YAAY,GACjC,MAAM,uCAAuC,QAAQ;EACtD,YAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,WAAyC;CAClE;;;;;;;CAqBD,MAAM,OACJC,OACAC,SAC4B;EAC5B,MAAM,cAAc,cAAc,2BAA2B,MAAM;EACnE,MAAM,SAAS,MAAM,KAAK,eACxB,CAAC,WAAY,GACb,SACA,SAAS,UACV;EACD,MAAM,iBAAiB,OAAO,YAAY,GAAG;AAE7C,SAAO,eAAe;CACvB;CAGD,OAAO,sBACLC,WACAC,UACAC,aACqC;AACrC,QAAM,IAAI,MAAM;CACjB;CAED,OAAO,gBACLJ,OACAC,SACmC;AAEnC,MACE,KAAK,0BACH,cAAc,UAAU,yBAC1B,KAAK,kBAEL,MAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,cAAc,2BAA2B,MAAM;GAC9D,MAAM,WAAW,OAAO,gBAAgB;GACxC,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,QAAQ;GAE5D,MAAM,sBAAsB;IAC1B,GAAG,eAAe;IAClB,GAAG,KAAK,YAAY,YAAY;GACjC;GACD,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,qBACA,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;GACb;GACD,MAAM,cAAc,MAAM,kBAAkB,qBAC1C,KAAK,QAAQ,EACb,CAAC,kBAAkB,SAAS,AAAC,GAC7B,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;GACD,IAAII;GAEJ,IAAIC;AACJ,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,UACA,aACA,cAAc,GACf,EAAE;AACD,SAAI,MAAM,QAAQ,MAAM,MAAM;MAC5B,MAAM,QAAQ,aAAa,GAAG,EAAE,EAAE;AAClC,UAAI,SAAS,MAAM,MAAM,QAAQ,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;KAC3D;KACD,MAAM,QAAQ,oBAAoB;MAChC,GAAG,MAAM;MACT,GAAG,MAAM,QAAQ;KAClB;AACD,SAAI,KAAK,kBAAkB,MACzB,MAAM,2BACJ,MAAM,QACP;UAED,MAAM,MAAM;AAEd,SAAI,CAAC,iBACH,kBAAkB;UAElB,kBAAkB,gBAAgB,OAAO,MAAM;AAEjD,SACE,iBAAiB,MAAM,QAAQ,IAC/B,MAAM,QAAQ,mBAAmB,QAEjC,YAAY,EACV,YAAY;MACV,cAAc,MAAM,QAAQ,eAAe;MAC3C,kBAAkB,MAAM,QAAQ,eAAe;MAC/C,aAAa,MAAM,QAAQ,eAAe;KAC3C,EACF;IAEJ;GACF,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GACD,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,aAAa;IAEvB,aAAa,CAAC,CAAC,eAAkC,CAAC;IAClD;GACD,EAAC,CACH,CACF;EACF;CACF;CAED,YAAYC,SAAqD;EAC/D,MAAM,eAAe,KAAK,SAAS,CAAC,WAAW,OAAO,GAClD,KAAK,SAAS,CAAC,QAAQ,QAAQ,GAAG,GAClC,KAAK,SAAS;AAElB,SAAO;GACL,eAAe;GACf,SAAS,QAAQ;GACjB,aAAa;EACd;CACF;;CAGD,MAAM,kBACJC,UACAC,eACAC,gBACAC,oBACoB;EACpB,MAAM,eAAe,SAAS,IAAI,CAAC,gBACjC,YAAY,IAAI,2BAA2B,CAC5C;EAED,IAAIC;AACJ,MACE,uBAAuB,UACvB,mBAAmB,WAAW,aAAa,QAE3C,cAAc;OACT;GACL,MAAM,sBAAsB;IAC1B,GAAG,eAAe;IAClB,GAAG,KAAK,YAAY,cAAc;GACnC;GAED,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,qBACA,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY;GACb;GACD,cAAc,MAAM,kBAAkB,qBACpC,KAAK,QAAQ,EACb,aAAa,IAAI,kBAAkB,EACnC,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;EACF;EACD,MAAMC,cAAkC,CAAE;EAC1C,MAAMC,aAAuC,CAAE;EAI/C,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtD,gCACD;AACD,MACE,uBACA,CAAC,KAAK,oBACN,aAAa,WAAW,KACxB,KAAK,0BACH,cAAc,UAAU,sBAE1B,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,aAAa,IACb,eACA,cAAc,GACf;GACD,IAAI;GAEJ,IAAIR;AACJ,cAAW,MAAM,SAAS,QAAQ;AAChC,QAAI,MAAM,QAAQ,MAAM,MAAM;KAC5B,MAAM,QAAQ,aAAa,GAAG,EAAE,EAAE;AAClC,SAAI,SAAS,MAAM,MAAM,QAAQ,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;IAC3D;AACD,QAAI,eAAe,QACjB,aAAa;SAEb,aAAa,OAAO,YAAY,MAAM;AAExC,QACE,iBAAiB,MAAM,QAAQ,IAC/B,MAAM,QAAQ,mBAAmB,QAEjC,YAAY,EACV,YAAY;KACV,cAAc,MAAM,QAAQ,eAAe;KAC3C,kBAAkB,MAAM,QAAQ,eAAe;KAC/C,aAAa,MAAM,QAAQ,eAAe;IAC3C,EACF;GAEJ;AACD,OAAI,eAAe,OACjB,OAAM,IAAI,MAAM;GAElB,YAAY,KAAK,CAAC,UAAW,EAAC;GAC9B,MAAM,cAAc,GAAG,aAAa;IAClC;IACA;GACD,EAAC;EACH,SAAQ,GAAG;GACV,MAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;EACP;OACI;GAEL,MAAM,UAAU,MAAM,QAAQ,WAC5B,aAAa,IAAI,OAAO,aAAa,MAAM;IACzC,MAAM,kBAAkB,MAAM,KAAK,UACjC,aACA;KAAE,GAAG;KAAe,aAAa;IAAG,GACpC,cAAc,GACf;AACD,QAAI,KAAK,kBAAkB,KACzB,MAAK,MAAM,cAAc,gBAAgB,aACvC,WAAW,UAAU,2BACnB,WAAW,QACZ;AAGL,WAAO;GACR,EAAC,CACH;GAED,MAAM,QAAQ,IACZ,QAAQ,IAAI,OAAO,SAAS,MAAM;AAChC,QAAI,QAAQ,WAAW,aAAa;KAClC,MAAM,SAAS,QAAQ;AACvB,UAAK,MAAM,cAAc,OAAO,aAAa;AAC3C,UAAI,WAAW,QAAQ,MAAM,MAAM;OACjC,MAAM,QAAQ,aAAa,GAAG,EAAE,EAAE;AAClC,WAAI,SAAS,MAAM,WAAW,QAAQ,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;MAChE;MACD,WAAW,QAAQ,oBAAoB;OACrC,GAAG,WAAW;OACd,GAAG,WAAW,QAAQ;MACvB;KACF;AACD,SAAI,OAAO,YAAY,WAAW,GAChC,OAAO,YAAY,GAAG,QAAQ,oBAAoB;MAChD,GAAG,OAAO;MACV,GAAG,OAAO,YAAY,GAAG,QAAQ;KAClC;KAEH,YAAY,KAAK,OAAO;KACxB,WAAW,KAAK,OAAO;AACvB,YAAO,cAAc,IAAI,aAAa;MACpC,aAAa,CAAC,OAAO,WAAY;MACjC,WAAW,OAAO;KACnB,EAAC;IACH,OAAM;KAEL,MAAM,cAAc,IAAI,eAAe,QAAQ,OAAO;AACtD,YAAO,QAAQ,OAAO,QAAQ,OAAO;IACtC;GACF,EAAC,CACH;EACF;EAED,MAAMS,SAAoB;GACxB;GACA,WAAW,WAAW,SAClB,KAAK,oBAAoB,GAAG,WAAW,GACvC;EACL;EACD,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AACF,SAAO;CACR;CAED,MAAM,gBAAgB,EACpB,UACA,OACA,cACA,eACA,gBAQD,EAKC;EACA,MAAM,eAAe,SAAS,IAAI,CAAC,gBACjC,YAAY,IAAI,2BAA2B,CAC5C;EAED,MAAM,sBAAsB;GAC1B,GAAG,eAAe;GAClB,GAAG,KAAK,YAAY,cAAc;EACnC;EAED,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,qBACA,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY;EACb;EACD,MAAM,cAAc,MAAM,kBAAkB,qBAC1C,KAAK,QAAQ,EACb,aAAa,IAAI,kBAAkB,EACnC,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;EAGD,MAAMC,uBAAiC,CAAE;EACzC,MAAM,UAAU,MAAM,QAAQ,WAC5B,aAAa,IAAI,OAAO,aAAa,UAAU;GAE7C,MAAM,SACJ,cAAc,2BAA2B,YAAY,CAAC,UAAU;GAClE,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AAEvD,OAAI,UAAU,MACZ,qBAAqB,KAAK,MAAM;AAGlC,UAAO;EACR,EAAC,CACH;EAID,MAAM,gBAAgB,QACnB,IAAI,CAAC,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;EAAQ,GAAE,CACtE,OACC,CAAC,EAAE,QAAQ,KACR,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAMC,cAA8B,CAAE;EACtC,MAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,YAAY,EAAE,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;IAC7B,YAAY,KAAK,OAAO,IAAI,CAACC,aAAW;AACtC,SACE,aAAaA,YACb,cAAcA,SAAO,QAAQ,IAC7B,YAAYA,SAAO,QAAQ,EAC3B;MAEAA,SAAO,QAAQ,iBAAiB;OAC9B,cAAc;OACd,eAAe;OACf,cAAc;MACf;AACD,UAAI,KAAK,kBAAkB,MAEzBA,SAAO,UAAU,2BAA2BA,SAAO,QAAQ;KAE9D;KAEDA,SAAO,iBAAiB;MACtB,GAAGA,SAAO;MACV,YAAY,CAAE;KACf;AACD,YAAOA;IACR,EAAC;AACF,QAAI,OAAO,QACT,MAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,MAAO,EACtB,GACD,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;GACF,OAAM;IAEL,MAAM,YAAY,eAChB,cAAc,QACd,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;GAC5C;EACF,EAAC,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;EACrB;EAKD,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AAEF,SAAO;CACR;;;;;;;;CASD,MAAM,SACJV,UACAW,SACAC,WACoB;EAEpB,IAAIC;AACJ,MAAI,MAAM,QAAQ,QAAQ,EACxB,gBAAgB,EAAE,MAAM,QAAS;OAEjC,gBAAgB;EAGlB,MAAM,eAAe,SAAS,IAAI,CAAC,gBACjC,YAAY,IAAI,2BAA2B,CAC5C;EAED,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,cAAc;EAClE,eAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,cAAc,aAAa,eAAe;EAG1E,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EAED,MAAM,EAAE,aAAa,sBAAsB,oBAAoB,GAC7D,MAAM,KAAK,gBAAgB;GACzB,UAAU;GACV;GACA;GACA,eAAe;GACf,gBAAgB;EACjB,EAAC;EAEJ,IAAI,YAAY,CAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,IAAI,CAAC,MAAM,aAAa,GAAG,EAChD,aACA,gBACA,uBAAuB,SACnB,qBAAqB,IAAI,CAAC,MAAM,qBAAqB,GAAG,GACxD,OACL;GACD,MAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;IACzC,YAAY,eAAe;IAE3B,MAAM,SAAS,cAAc,2BAC3B,aAAa,aACd,CAAC,UAAU;AACZ,WAAO,MAAM,OAAO,QAAQ,cAAc,WAAW;GACtD,EAAC,CACH;GACD,YAAY,QAAQ,aAAa,CAAE;EACpC;AAED,SAAO;GAAE;GAAa;EAAW;CAClC;;;;CAMD,iBAAiBC,UAA2C;AAC1D,SAAO,CAAE;CACV;CAED,aAAqB;AACnB,SAAO;CACR;;;;;CAQD,YAA2B;AACzB,SAAO;GACL,GAAG,KAAK,kBAAkB;GAC1B,OAAO,KAAK,UAAU;GACtB,QAAQ,KAAK,YAAY;EAC1B;CACF;;;;;;;;CASD,MAAM,eACJC,cACAJ,SACAC,WACoB;EACpB,MAAMI,iBAAkC,aAAa,IAAI,CAAC,gBACxD,YAAY,gBAAgB,CAC7B;AACD,SAAO,KAAK,SAAS,gBAAgB,SAAS,UAAU;CACzD;;;;;;;;;;CAiBD,MAAM,KACJC,UACAN,SACAC,WACsB;EACtB,MAAM,SAAS,MAAM,KAAK,SACxB,CAAC,SAAS,IAAI,2BAA2B,AAAC,GAC1C,SACA,UACD;EACD,MAAM,cAAc,OAAO;AAC3B,SAAO,YAAY,GAAG,GAAG;CAC1B;;;;;;;;;;CAWD,MAAM,WACJM,aACAP,SACAC,WACsB;EACtB,MAAMO,iBAAgC,YAAY,gBAAgB;AAClE,SAAO,KAAK,KAAK,gBAAgB,SAAS,UAAU;CACrD;;;;;;;;;;CAWD,MAAM,gBACJ/B,UACAuB,SACAC,WACsB;AACtB,SAAO,KAAK,KAAK,UAAU,SAAS,UAAU;CAC/C;;;;;;;;;;CAWD,MAAM,QACJQ,MACAT,SACAC,WACiB;EACjB,MAAM,UAAU,IAAI,aAAa;EACjC,MAAM,SAAS,MAAM,KAAK,KAAK,CAAC,OAAQ,GAAE,SAAS,UAAU;AAC7D,MAAI,OAAO,OAAO,YAAY,SAC5B,OAAM,IAAI,MAAM;AAElB,SAAO,OAAO;CACf;CA8CD,qBAIES,cAIAC,QASI;AACJ,MAAI,OAAO,KAAK,cAAc,WAC5B,OAAM,IAAI,MACR,CAAC,qEAAqE,CAAC;AAG3E,MAAI,QAAQ,OACV,OAAM,IAAI,MACR,CAAC,yDAAyD,CAAC;EAI/D,MAAMC,SACJ;EACF,MAAM,OAAO,QAAQ;EACrB,MAAM,cACJ,qBAAqB,OAAO,IAAI;EAClC,MAAM,SAAS,QAAQ;EACvB,MAAM,aAAa,QAAQ;AAC3B,MAAI,WAAW,WACb,OAAM,IAAI,MACR,CAAC,qFAAqF,CAAC;EAI3F,IAAI,eAAe,QAAQ;EAC3B,IAAIC;AACJ,MAAI,mBAAmB,OAAO,EAC5B,QAAQ,CACN;GACE,MAAM;GACN,UAAU;IACR,MAAM;IACN;IACA,YAAY,aAAa,OAAO;GACjC;EACF,CACF;OACI;AACL,OAAI,UAAU,QACZ,eAAe,OAAO;GAExB,QAAQ,CACN;IACE,MAAM;IACN,UAAU;KACR,MAAM;KACN;KACA,YAAY;IACb;GACF,CACF;EACF;EAED,MAAM,MAAM,KAAK,UAAU,MAAM;EACjC,MAAM,eAAe,eAAe,KAClC,CAACC,UAAqC;AACpC,OAAI,CAAC,MAAM,cAAc,MAAM,WAAW,WAAW,EACnD,OAAM,IAAI,MAAM;GAElB,MAAM,WAAW,MAAM,WAAW,KAChC,CAAC,OAAO,GAAG,SAAS,aACrB;AACD,OAAI,CAAC,SACH,OAAM,IAAI,MAAM,CAAC,6BAA6B,EAAE,aAAa,CAAC,CAAC;AAEjE,UAAO,SAAS;EACjB,EACF;AAED,MAAI,CAAC,WACH,QAAO,IAAI,KAAK,aAAa,CAAC,WAAW,EACvC,SAAS,mBACV,EAAC;EAGJ,MAAM,eAAe,oBAAoB,OAAO,EAE9C,QAAQ,CAACC,OAAYC,aAAW,aAAa,OAAO,MAAM,KAAKA,SAAO,CACvE,EAAC;EACF,MAAM,aAAa,oBAAoB,OAAO,EAC5C,QAAQ,MAAM,KACf,EAAC;EACF,MAAM,qBAAqB,aAAa,cAAc,EACpD,WAAW,CAAC,UAAW,EACxB,EAAC;AACF,SAAO,iBAAiB,KAGtB,CACA,EACE,KAAK,IACN,GACD,kBACD,EAAC,CAAC,WAAW,EACZ,SAAS,2BACV,EAAC;CACH;AACF;;;;;AAMD,IAAsB,kBAAtB,cAEU,cAA2B;CAOnC,MAAM,UACJvC,UACAW,SACA6B,YACqB;EACrB,MAAM,OAAO,MAAM,KAAK,MAAM,UAAU,SAAS,WAAW;EAC5D,MAAM,UAAU,IAAI,UAAU;AAC9B,MAAI,OAAO,QAAQ,YAAY,SAC7B,OAAM,IAAI,MACR;AAGJ,SAAO,EACL,aAAa,CACX;GACE,MAAM,QAAQ;GACd;EACD,CACF,EACF;CACF;AACF"}
|
|
1
|
+
{"version":3,"file":"chat_models.js","names":["chunk: BaseMessageChunk","messages: BaseMessage[]","messagesToTrace: BaseMessage[]","fields: BaseChatModelParams","options?: Partial<CallOptions>","input: BaseLanguageModelInput","options?: CallOptions","_messages: BaseMessage[]","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generationChunk: ChatGenerationChunk | undefined","llmOutput: Record<string, any> | undefined","options: this[\"ParsedCallOptions\"]","messages: BaseMessageLike[][]","parsedOptions: this[\"ParsedCallOptions\"]","handledOptions: RunnableConfig","startedRunManagers?: CallbackManagerForLLMRun[]","runManagers: CallbackManagerForLLMRun[] | undefined","generations: ChatGeneration[][]","llmOutputs: LLMResult[\"llmOutput\"][]","output: LLMResult","missingPromptIndices: number[]","generations: Generation[][]","result","options?: string[] | CallOptions","callbacks?: Callbacks","parsedOptions: CallOptions | undefined","_options?: this[\"ParsedCallOptions\"]","promptValues: BasePromptValueInterface[]","promptMessages: BaseMessage[][]","messages: BaseMessageLike[]","promptValue: BasePromptValueInterface","promptMessages: BaseMessage[]","text: string","outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","config?: StructuredOutputMethodOptions<boolean>","schema: Record<string, any> | InteropZodType<RunOutput>","tools: ToolDefinition[]","input: BaseMessageChunk","input: any","config","runManager?: CallbackManagerForLLMRun"],"sources":["../../src/language_models/chat_models.ts"],"sourcesContent":["import type { ZodType as ZodTypeV3 } from \"zod/v3\";\nimport type { $ZodType as ZodTypeV4 } from \"zod/v4/core\";\nimport {\n AIMessage,\n type BaseMessage,\n BaseMessageChunk,\n type BaseMessageLike,\n HumanMessage,\n coerceMessageLikeToMessage,\n AIMessageChunk,\n isAIMessageChunk,\n isBaseMessage,\n isAIMessage,\n} from \"../messages/index.js\";\nimport {\n convertToOpenAIImageBlock,\n isURLContentBlock,\n isBase64ContentBlock,\n} from \"../messages/content/data.js\";\nimport type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n LLMResult,\n RUN_KEY,\n type ChatGeneration,\n ChatGenerationChunk,\n type ChatResult,\n type Generation,\n} from \"../outputs.js\";\nimport {\n BaseLanguageModel,\n type StructuredOutputMethodOptions,\n type ToolDefinition,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport {\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/base.js\";\nimport {\n StructuredToolInterface,\n StructuredToolParams,\n} from \"../tools/index.js\";\nimport {\n Runnable,\n RunnableLambda,\n RunnableSequence,\n RunnableToolLike,\n} from \"../runnables/base.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { RunnablePassthrough } from \"../runnables/passthrough.js\";\nimport {\n getSchemaDescription,\n InteropZodType,\n isInteropZodSchema,\n} from \"../utils/types/zod.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\nimport { toJsonSchema } from \"../utils/json_schema.js\";\nimport { getEnvironmentVariable } from \"../utils/env.js\";\nimport { castStandardMessageContent, iife } from \"./utils.js\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport type ToolChoice = string | Record<string, any> | \"auto\" | \"any\";\n\n/**\n * Represents a serialized chat model.\n */\nexport type SerializedChatModel = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\n// todo?\n/**\n * Represents a serialized large language model.\n */\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\n/**\n * Represents the parameters for a base chat model.\n */\nexport type BaseChatModelParams = BaseLanguageModelParams & {\n /**\n * Whether to disable streaming.\n *\n * If streaming is bypassed, then `stream()` will defer to\n * `invoke()`.\n *\n * - If true, will always bypass streaming case.\n * - If false (default), will always use streaming case if available.\n */\n disableStreaming?: boolean;\n /**\n * Version of `AIMessage` output format to store in message content.\n *\n * `AIMessage.contentBlocks` will lazily parse the contents of `content` into a\n * standard format. This flag can be used to additionally store the standard format\n * as the message content, e.g., for serialization purposes.\n *\n * - \"v0\": provider-specific format in content (can lazily parse with `.contentBlocks`)\n * - \"v1\": standardized format in content (consistent with `.contentBlocks`)\n *\n * You can also set `LC_OUTPUT_VERSION` as an environment variable to \"v1\" to\n * enable this by default.\n *\n * @default \"v0\"\n */\n outputVersion?: \"v0\" | \"v1\";\n};\n\n/**\n * Represents the call options for a base chat model.\n */\nexport type BaseChatModelCallOptions = BaseLanguageModelCallOptions & {\n /**\n * Specifies how the chat model should use tools.\n * @default undefined\n *\n * Possible values:\n * - \"auto\": The model may choose to use any of the provided tools, or none.\n * - \"any\": The model must use one of the provided tools.\n * - \"none\": The model must not use any tools.\n * - A string (not \"auto\", \"any\", or \"none\"): The name of a specific tool the model must use.\n * - An object: A custom schema specifying tool choice parameters. Specific to the provider.\n *\n * Note: Not all providers support tool_choice. An error will be thrown\n * if used with an unsupported model.\n */\n tool_choice?: ToolChoice;\n};\n\n/**\n * Creates a transform stream for encoding chat message chunks.\n * @deprecated Use {@link BytesOutputParser} instead\n * @returns A TransformStream instance that encodes chat message chunks.\n */\nexport function createChatMessageChunkEncoderStream() {\n const textEncoder = new TextEncoder();\n return new TransformStream<BaseMessageChunk>({\n transform(chunk: BaseMessageChunk, controller) {\n controller.enqueue(\n textEncoder.encode(\n typeof chunk.content === \"string\"\n ? chunk.content\n : JSON.stringify(chunk.content)\n )\n );\n },\n });\n}\n\nfunction _formatForTracing(messages: BaseMessage[]): BaseMessage[] {\n const messagesToTrace: BaseMessage[] = [];\n for (const message of messages) {\n let messageToTrace = message;\n if (Array.isArray(message.content)) {\n for (let idx = 0; idx < message.content.length; idx++) {\n const block = message.content[idx];\n if (isURLContentBlock(block) || isBase64ContentBlock(block)) {\n if (messageToTrace === message) {\n // Also shallow-copy content\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n messageToTrace = new (message.constructor as any)({\n ...messageToTrace,\n content: [\n ...message.content.slice(0, idx),\n convertToOpenAIImageBlock(block),\n ...message.content.slice(idx + 1),\n ],\n });\n }\n }\n }\n }\n messagesToTrace.push(messageToTrace);\n }\n return messagesToTrace;\n}\n\nexport type LangSmithParams = {\n ls_provider?: string;\n ls_model_name?: string;\n ls_model_type: \"chat\";\n ls_temperature?: number;\n ls_max_tokens?: number;\n ls_stop?: Array<string>;\n};\n\nexport type BindToolsInput =\n | StructuredToolInterface\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>\n | ToolDefinition\n | RunnableToolLike\n | StructuredToolParams;\n\n/**\n * Base class for chat models. It extends the BaseLanguageModel class and\n * provides methods for generating chat based on input messages.\n */\nexport abstract class BaseChatModel<\n CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions,\n // TODO: Fix the parameter order on the next minor version.\n OutputMessageType extends BaseMessageChunk = AIMessageChunk\n> extends BaseLanguageModel<OutputMessageType, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"chat_models\", this._llmType()];\n\n disableStreaming = false;\n\n outputVersion?: \"v0\" | \"v1\";\n\n constructor(fields: BaseChatModelParams) {\n super(fields);\n this.outputVersion = iife(() => {\n const outputVersion =\n fields.outputVersion ?? getEnvironmentVariable(\"LC_OUTPUT_VERSION\");\n if (outputVersion && [\"v0\", \"v1\"].includes(outputVersion)) {\n return outputVersion as \"v0\" | \"v1\";\n }\n return \"v0\";\n });\n }\n\n _combineLLMOutput?(\n ...llmOutputs: LLMResult[\"llmOutput\"][]\n ): LLMResult[\"llmOutput\"];\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n /**\n * Bind tool-like objects to this chat model.\n *\n * @param tools A list of tool definitions to bind to this chat model.\n * Can be a structured tool, an OpenAI formatted tool, or an object\n * matching the provider's specific tool schema.\n * @param kwargs Any additional parameters to bind.\n */\n bindTools?(\n tools: BindToolsInput[],\n kwargs?: Partial<CallOptions>\n ): Runnable<BaseLanguageModelInput, OutputMessageType, CallOptions>;\n\n /**\n * Invokes the chat model with a single input.\n * @param input The input for the language model.\n * @param options The call options.\n * @returns A Promise that resolves to a BaseMessageChunk.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): Promise<OutputMessageType> {\n const promptValue = BaseChatModel._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n const chatGeneration = result.generations[0][0] as ChatGeneration;\n // TODO: Remove cast after figuring out inheritance\n return chatGeneration.message as OutputMessageType;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): AsyncGenerator<OutputMessageType> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks ===\n BaseChatModel.prototype._streamResponseChunks ||\n this.disableStreaming\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseChatModel._convertInputToPromptValue(input);\n const messages = prompt.toChatMessages();\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n\n const inheritableMetadata = {\n ...runnableConfig.metadata,\n ...this.getLsParams(callOptions),\n };\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n inheritableMetadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleChatModelStart(\n this.toJSON(),\n [_formatForTracing(messages)],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generationChunk: ChatGenerationChunk | undefined;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let llmOutput: Record<string, any> | undefined;\n try {\n for await (const chunk of this._streamResponseChunks(\n messages,\n callOptions,\n runManagers?.[0]\n )) {\n if (chunk.message.id == null) {\n const runId = runManagers?.at(0)?.runId;\n if (runId != null) chunk.message._updateId(`run-${runId}`);\n }\n chunk.message.response_metadata = {\n ...chunk.generationInfo,\n ...chunk.message.response_metadata,\n };\n if (this.outputVersion === \"v1\") {\n yield castStandardMessageContent(\n chunk.message\n ) as OutputMessageType;\n } else {\n yield chunk.message as OutputMessageType;\n }\n if (!generationChunk) {\n generationChunk = chunk;\n } else {\n generationChunk = generationChunk.concat(chunk);\n }\n if (\n isAIMessageChunk(chunk.message) &&\n chunk.message.usage_metadata !== undefined\n ) {\n llmOutput = {\n tokenUsage: {\n promptTokens: chunk.message.usage_metadata.input_tokens,\n completionTokens: chunk.message.usage_metadata.output_tokens,\n totalTokens: chunk.message.usage_metadata.total_tokens,\n },\n };\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n // TODO: Remove cast after figuring out inheritance\n generations: [[generationChunk as ChatGeneration]],\n llmOutput,\n })\n )\n );\n }\n }\n\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams {\n const providerName = this.getName().startsWith(\"Chat\")\n ? this.getName().replace(\"Chat\", \"\")\n : this.getName();\n\n return {\n ls_model_type: \"chat\",\n ls_stop: options.stop,\n ls_provider: providerName,\n };\n }\n\n /** @ignore */\n async _generateUncached(\n messages: BaseMessageLike[][],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: RunnableConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n const baseMessages = messages.map((messageList) =>\n messageList.map(coerceMessageLikeToMessage)\n );\n\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === baseMessages.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const inheritableMetadata = {\n ...handledOptions.metadata,\n ...this.getLsParams(parsedOptions),\n };\n // create callback manager and start run\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n inheritableMetadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: 1,\n };\n runManagers = await callbackManager_?.handleChatModelStart(\n this.toJSON(),\n baseMessages.map(_formatForTracing),\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions.runName\n );\n }\n const generations: ChatGeneration[][] = [];\n const llmOutputs: LLMResult[\"llmOutput\"][] = [];\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n if (\n hasStreamingHandler &&\n !this.disableStreaming &&\n baseMessages.length === 1 &&\n this._streamResponseChunks !==\n BaseChatModel.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n baseMessages[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let llmOutput: Record<string, any> | undefined;\n for await (const chunk of stream) {\n if (chunk.message.id == null) {\n const runId = runManagers?.at(0)?.runId;\n if (runId != null) chunk.message._updateId(`run-${runId}`);\n }\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n if (\n isAIMessageChunk(chunk.message) &&\n chunk.message.usage_metadata !== undefined\n ) {\n llmOutput = {\n tokenUsage: {\n promptTokens: chunk.message.usage_metadata.input_tokens,\n completionTokens: chunk.message.usage_metadata.output_tokens,\n totalTokens: chunk.message.usage_metadata.total_tokens,\n },\n };\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n generations.push([aggregated]);\n await runManagers?.[0].handleLLMEnd({\n generations,\n llmOutput,\n });\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n // generate results\n const results = await Promise.allSettled(\n baseMessages.map(async (messageList, i) => {\n const generateResults = await this._generate(\n messageList,\n { ...parsedOptions, promptIndex: i },\n runManagers?.[i]\n );\n if (this.outputVersion === \"v1\") {\n for (const generation of generateResults.generations) {\n generation.message = castStandardMessageContent(\n generation.message\n );\n }\n }\n return generateResults;\n })\n );\n // handle results\n await Promise.all(\n results.map(async (pResult, i) => {\n if (pResult.status === \"fulfilled\") {\n const result = pResult.value;\n for (const generation of result.generations) {\n if (generation.message.id == null) {\n const runId = runManagers?.at(0)?.runId;\n if (runId != null) generation.message._updateId(`run-${runId}`);\n }\n generation.message.response_metadata = {\n ...generation.generationInfo,\n ...generation.message.response_metadata,\n };\n }\n if (result.generations.length === 1) {\n result.generations[0].message.response_metadata = {\n ...result.llmOutput,\n ...result.generations[0].message.response_metadata,\n };\n }\n generations[i] = result.generations;\n llmOutputs[i] = result.llmOutput;\n return runManagers?.[i]?.handleLLMEnd({\n generations: [result.generations],\n llmOutput: result.llmOutput,\n });\n } else {\n // status === \"rejected\"\n await runManagers?.[i]?.handleLLMError(pResult.reason);\n return Promise.reject(pResult.reason);\n }\n })\n );\n }\n // create combined output\n const output: LLMResult = {\n generations,\n llmOutput: llmOutputs.length\n ? this._combineLLMOutput?.(...llmOutputs)\n : undefined,\n };\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n messages,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n }: {\n messages: BaseMessageLike[][];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const baseMessages = messages.map((messageList) =>\n messageList.map(coerceMessageLikeToMessage)\n );\n\n const inheritableMetadata = {\n ...handledOptions.metadata,\n ...this.getLsParams(parsedOptions),\n };\n // create callback manager and start run\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n inheritableMetadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleChatModelStart(\n this.toJSON(),\n baseMessages.map(_formatForTracing),\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n baseMessages.map(async (baseMessage, index) => {\n // Join all content into one string for the prompt index\n const prompt =\n BaseChatModel._convertInputToPromptValue(baseMessage).toString();\n const result = await cache.lookup(prompt, llmStringKey);\n\n if (result == null) {\n missingPromptIndices.push(index);\n }\n\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n if (\n \"message\" in result &&\n isBaseMessage(result.message) &&\n isAIMessage(result.message)\n ) {\n // eslint-disable-next-line no-param-reassign\n result.message.usage_metadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n if (this.outputVersion === \"v1\") {\n // eslint-disable-next-line no-param-reassign\n result.message = castStandardMessageContent(result.message);\n }\n }\n // eslint-disable-next-line no-param-reassign\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Generates chat based on the input messages.\n * @param messages An array of arrays of BaseMessage instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to an LLMResult.\n */\n async generate(\n messages: BaseMessageLike[][],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n // parse call options\n let parsedOptions: CallOptions | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as CallOptions;\n } else {\n parsedOptions = options;\n }\n\n const baseMessages = messages.map((messageList) =>\n messageList.map(coerceMessageLikeToMessage)\n );\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(baseMessages, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n messages: baseMessages,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => baseMessages[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n // Join all content into one string for the prompt index\n const prompt = BaseChatModel._convertInputToPromptValue(\n baseMessages[promptIndex]\n ).toString();\n return cache.update(prompt, llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _modelType(): string {\n return \"base_chat_model\" as const;\n }\n\n abstract _llmType(): string;\n\n /**\n * @deprecated\n * Return a json-like object representing this LLM.\n */\n serialize(): SerializedLLM {\n return {\n ...this.invocationParams(),\n _type: this._llmType(),\n _model: this._modelType(),\n };\n }\n\n /**\n * Generates a prompt based on the input prompt values.\n * @param promptValues An array of BasePromptValue instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to an LLMResult.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const promptMessages: BaseMessage[][] = promptValues.map((promptValue) =>\n promptValue.toChatMessages()\n );\n return this.generate(promptMessages, options, callbacks);\n }\n\n abstract _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult>;\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Makes a single call to the chat model.\n * @param messages An array of BaseMessage instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a BaseMessage.\n */\n async call(\n messages: BaseMessageLike[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n const result = await this.generate(\n [messages.map(coerceMessageLikeToMessage)],\n options,\n callbacks\n );\n const generations = result.generations as ChatGeneration[][];\n return generations[0][0].message;\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Makes a single call to the chat model with a prompt value.\n * @param promptValue The value of the prompt.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a BaseMessage.\n */\n async callPrompt(\n promptValue: BasePromptValueInterface,\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n const promptMessages: BaseMessage[] = promptValue.toChatMessages();\n return this.call(promptMessages, options, callbacks);\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Predicts the next message based on the input messages.\n * @param messages An array of BaseMessage instances.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a BaseMessage.\n */\n async predictMessages(\n messages: BaseMessage[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n return this.call(messages, options, callbacks);\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * Predicts the next message based on a text input.\n * @param text The text input.\n * @param options The call options or an array of stop sequences.\n * @param callbacks The callbacks for the language model.\n * @returns A Promise that resolves to a string.\n */\n async predict(\n text: string,\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<string> {\n const message = new HumanMessage(text);\n const result = await this.call([message], options, callbacks);\n if (typeof result.content !== \"string\") {\n throw new Error(\"Cannot use predict when output is not a string.\");\n }\n return result.content;\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV4<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV4<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV3<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | ZodTypeV3<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n {\n raw: BaseMessage;\n parsed: RunOutput;\n }\n > {\n if (typeof this.bindTools !== \"function\") {\n throw new Error(\n `Chat model must implement \".bindTools()\" to use withStructuredOutput.`\n );\n }\n if (config?.strict) {\n throw new Error(\n `\"strict\" mode is not supported for this model by default.`\n );\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const schema: Record<string, any> | InteropZodType<RunOutput> =\n outputSchema;\n const name = config?.name;\n const description =\n getSchemaDescription(schema) ?? \"A function available to call.\";\n const method = config?.method;\n const includeRaw = config?.includeRaw;\n if (method === \"jsonMode\") {\n throw new Error(\n `Base withStructuredOutput implementation only supports \"functionCalling\" as a method.`\n );\n }\n\n let functionName = name ?? \"extract\";\n let tools: ToolDefinition[];\n if (isInteropZodSchema(schema)) {\n tools = [\n {\n type: \"function\",\n function: {\n name: functionName,\n description,\n parameters: toJsonSchema(schema),\n },\n },\n ];\n } else {\n if (\"name\" in schema) {\n functionName = schema.name;\n }\n tools = [\n {\n type: \"function\",\n function: {\n name: functionName,\n description,\n parameters: schema,\n },\n },\n ];\n }\n\n const llm = this.bindTools(tools);\n const outputParser = RunnableLambda.from<OutputMessageType, RunOutput>(\n (input: BaseMessageChunk): RunOutput => {\n if (!AIMessageChunk.isInstance(input)) {\n throw new Error(\"Input is not an AIMessageChunk.\");\n }\n if (!input.tool_calls || input.tool_calls.length === 0) {\n throw new Error(\"No tool calls found in the response.\");\n }\n const toolCall = input.tool_calls.find(\n (tc) => tc.name === functionName\n );\n if (!toolCall) {\n throw new Error(`No tool call found with name ${functionName}.`);\n }\n return toolCall.args as RunOutput;\n }\n );\n\n if (!includeRaw) {\n return llm.pipe(outputParser).withConfig({\n runName: \"StructuredOutput\",\n }) as Runnable<BaseLanguageModelInput, RunOutput>;\n }\n\n const parserAssign = RunnablePassthrough.assign({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsed: (input: any, config) => outputParser.invoke(input.raw, config),\n });\n const parserNone = RunnablePassthrough.assign({\n parsed: () => null,\n });\n const parsedWithFallback = parserAssign.withFallbacks({\n fallbacks: [parserNone],\n });\n return RunnableSequence.from<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n >([\n {\n raw: llm,\n },\n parsedWithFallback,\n ]).withConfig({\n runName: \"StructuredOutputRunnable\",\n });\n }\n}\n\n/**\n * An abstract class that extends BaseChatModel and provides a simple\n * implementation of _generate.\n */\nexport abstract class SimpleChatModel<\n CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions\n> extends BaseChatModel<CallOptions> {\n abstract _call(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n const text = await this._call(messages, options, runManager);\n const message = new AIMessage(text);\n if (typeof message.content !== \"string\") {\n throw new Error(\n \"Cannot generate with a simple chat model when output is not a string.\"\n );\n }\n return {\n generations: [\n {\n text: message.content,\n message,\n },\n ],\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiJA,SAAgB,sCAAsC;CACpD,MAAM,cAAc,IAAI;AACxB,QAAO,IAAI,gBAAkC,EAC3C,UAAUA,OAAyB,YAAY;EAC7C,WAAW,QACT,YAAY,OACV,OAAO,MAAM,YAAY,WACrB,MAAM,UACN,KAAK,UAAU,MAAM,QAAQ,CAClC,CACF;CACF,EACF;AACF;AAED,SAAS,kBAAkBC,UAAwC;CACjE,MAAMC,kBAAiC,CAAE;AACzC,MAAK,MAAM,WAAW,UAAU;EAC9B,IAAI,iBAAiB;AACrB,MAAI,MAAM,QAAQ,QAAQ,QAAQ,CAChC,MAAK,IAAI,MAAM,GAAG,MAAM,QAAQ,QAAQ,QAAQ,OAAO;GACrD,MAAM,QAAQ,QAAQ,QAAQ;AAC9B,OAAI,kBAAkB,MAAM,IAAI,qBAAqB,MAAM,EACzD;QAAI,mBAAmB,SAGrB,iBAAiB,IAAK,QAAQ,YAAoB;KAChD,GAAG;KACH,SAAS;MACP,GAAG,QAAQ,QAAQ,MAAM,GAAG,IAAI;MAChC,0BAA0B,MAAM;MAChC,GAAG,QAAQ,QAAQ,MAAM,MAAM,EAAE;KAClC;IACF;GACF;EAEJ;EAEH,gBAAgB,KAAK,eAAe;CACrC;AACD,QAAO;AACR;;;;;AAuBD,IAAsB,gBAAtB,MAAsB,sBAIZ,kBAAkD;CAQ1D,eAAe;EAAC;EAAa;EAAe,KAAK,UAAU;CAAC;CAE5D,mBAAmB;CAEnB;CAEA,YAAYC,QAA6B;EACvC,MAAM,OAAO;EACb,KAAK,gBAAgB,KAAK,MAAM;GAC9B,MAAM,gBACJ,OAAO,iBAAiB,uBAAuB,oBAAoB;AACrE,OAAI,iBAAiB,CAAC,MAAM,IAAK,EAAC,SAAS,cAAc,CACvD,QAAO;AAET,UAAO;EACR,EAAC;CACH;CAMD,AAAU,6CACRC,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,YAAY,GACjC,MAAM,uCAAuC,QAAQ;EACtD,YAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,WAAyC;CAClE;;;;;;;CAqBD,MAAM,OACJC,OACAC,SAC4B;EAC5B,MAAM,cAAc,cAAc,2BAA2B,MAAM;EACnE,MAAM,SAAS,MAAM,KAAK,eACxB,CAAC,WAAY,GACb,SACA,SAAS,UACV;EACD,MAAM,iBAAiB,OAAO,YAAY,GAAG;AAE7C,SAAO,eAAe;CACvB;CAGD,OAAO,sBACLC,WACAC,UACAC,aACqC;AACrC,QAAM,IAAI,MAAM;CACjB;CAED,OAAO,gBACLJ,OACAC,SACmC;AAEnC,MACE,KAAK,0BACH,cAAc,UAAU,yBAC1B,KAAK,kBAEL,MAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,cAAc,2BAA2B,MAAM;GAC9D,MAAM,WAAW,OAAO,gBAAgB;GACxC,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,QAAQ;GAE5D,MAAM,sBAAsB;IAC1B,GAAG,eAAe;IAClB,GAAG,KAAK,YAAY,YAAY;GACjC;GACD,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,qBACA,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;GACb;GACD,MAAM,cAAc,MAAM,kBAAkB,qBAC1C,KAAK,QAAQ,EACb,CAAC,kBAAkB,SAAS,AAAC,GAC7B,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;GACD,IAAII;GAEJ,IAAIC;AACJ,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,UACA,aACA,cAAc,GACf,EAAE;AACD,SAAI,MAAM,QAAQ,MAAM,MAAM;MAC5B,MAAM,QAAQ,aAAa,GAAG,EAAE,EAAE;AAClC,UAAI,SAAS,MAAM,MAAM,QAAQ,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;KAC3D;KACD,MAAM,QAAQ,oBAAoB;MAChC,GAAG,MAAM;MACT,GAAG,MAAM,QAAQ;KAClB;AACD,SAAI,KAAK,kBAAkB,MACzB,MAAM,2BACJ,MAAM,QACP;UAED,MAAM,MAAM;AAEd,SAAI,CAAC,iBACH,kBAAkB;UAElB,kBAAkB,gBAAgB,OAAO,MAAM;AAEjD,SACE,iBAAiB,MAAM,QAAQ,IAC/B,MAAM,QAAQ,mBAAmB,QAEjC,YAAY,EACV,YAAY;MACV,cAAc,MAAM,QAAQ,eAAe;MAC3C,kBAAkB,MAAM,QAAQ,eAAe;MAC/C,aAAa,MAAM,QAAQ,eAAe;KAC3C,EACF;IAEJ;GACF,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GACD,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,aAAa;IAEvB,aAAa,CAAC,CAAC,eAAkC,CAAC;IAClD;GACD,EAAC,CACH,CACF;EACF;CACF;CAED,YAAYC,SAAqD;EAC/D,MAAM,eAAe,KAAK,SAAS,CAAC,WAAW,OAAO,GAClD,KAAK,SAAS,CAAC,QAAQ,QAAQ,GAAG,GAClC,KAAK,SAAS;AAElB,SAAO;GACL,eAAe;GACf,SAAS,QAAQ;GACjB,aAAa;EACd;CACF;;CAGD,MAAM,kBACJC,UACAC,eACAC,gBACAC,oBACoB;EACpB,MAAM,eAAe,SAAS,IAAI,CAAC,gBACjC,YAAY,IAAI,2BAA2B,CAC5C;EAED,IAAIC;AACJ,MACE,uBAAuB,UACvB,mBAAmB,WAAW,aAAa,QAE3C,cAAc;OACT;GACL,MAAM,sBAAsB;IAC1B,GAAG,eAAe;IAClB,GAAG,KAAK,YAAY,cAAc;GACnC;GAED,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,qBACA,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY;GACb;GACD,cAAc,MAAM,kBAAkB,qBACpC,KAAK,QAAQ,EACb,aAAa,IAAI,kBAAkB,EACnC,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;EACF;EACD,MAAMC,cAAkC,CAAE;EAC1C,MAAMC,aAAuC,CAAE;EAI/C,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtD,gCACD;AACD,MACE,uBACA,CAAC,KAAK,oBACN,aAAa,WAAW,KACxB,KAAK,0BACH,cAAc,UAAU,sBAE1B,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,aAAa,IACb,eACA,cAAc,GACf;GACD,IAAI;GAEJ,IAAIR;AACJ,cAAW,MAAM,SAAS,QAAQ;AAChC,QAAI,MAAM,QAAQ,MAAM,MAAM;KAC5B,MAAM,QAAQ,aAAa,GAAG,EAAE,EAAE;AAClC,SAAI,SAAS,MAAM,MAAM,QAAQ,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;IAC3D;AACD,QAAI,eAAe,QACjB,aAAa;SAEb,aAAa,OAAO,YAAY,MAAM;AAExC,QACE,iBAAiB,MAAM,QAAQ,IAC/B,MAAM,QAAQ,mBAAmB,QAEjC,YAAY,EACV,YAAY;KACV,cAAc,MAAM,QAAQ,eAAe;KAC3C,kBAAkB,MAAM,QAAQ,eAAe;KAC/C,aAAa,MAAM,QAAQ,eAAe;IAC3C,EACF;GAEJ;AACD,OAAI,eAAe,OACjB,OAAM,IAAI,MAAM;GAElB,YAAY,KAAK,CAAC,UAAW,EAAC;GAC9B,MAAM,cAAc,GAAG,aAAa;IAClC;IACA;GACD,EAAC;EACH,SAAQ,GAAG;GACV,MAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;EACP;OACI;GAEL,MAAM,UAAU,MAAM,QAAQ,WAC5B,aAAa,IAAI,OAAO,aAAa,MAAM;IACzC,MAAM,kBAAkB,MAAM,KAAK,UACjC,aACA;KAAE,GAAG;KAAe,aAAa;IAAG,GACpC,cAAc,GACf;AACD,QAAI,KAAK,kBAAkB,KACzB,MAAK,MAAM,cAAc,gBAAgB,aACvC,WAAW,UAAU,2BACnB,WAAW,QACZ;AAGL,WAAO;GACR,EAAC,CACH;GAED,MAAM,QAAQ,IACZ,QAAQ,IAAI,OAAO,SAAS,MAAM;AAChC,QAAI,QAAQ,WAAW,aAAa;KAClC,MAAM,SAAS,QAAQ;AACvB,UAAK,MAAM,cAAc,OAAO,aAAa;AAC3C,UAAI,WAAW,QAAQ,MAAM,MAAM;OACjC,MAAM,QAAQ,aAAa,GAAG,EAAE,EAAE;AAClC,WAAI,SAAS,MAAM,WAAW,QAAQ,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC;MAChE;MACD,WAAW,QAAQ,oBAAoB;OACrC,GAAG,WAAW;OACd,GAAG,WAAW,QAAQ;MACvB;KACF;AACD,SAAI,OAAO,YAAY,WAAW,GAChC,OAAO,YAAY,GAAG,QAAQ,oBAAoB;MAChD,GAAG,OAAO;MACV,GAAG,OAAO,YAAY,GAAG,QAAQ;KAClC;KAEH,YAAY,KAAK,OAAO;KACxB,WAAW,KAAK,OAAO;AACvB,YAAO,cAAc,IAAI,aAAa;MACpC,aAAa,CAAC,OAAO,WAAY;MACjC,WAAW,OAAO;KACnB,EAAC;IACH,OAAM;KAEL,MAAM,cAAc,IAAI,eAAe,QAAQ,OAAO;AACtD,YAAO,QAAQ,OAAO,QAAQ,OAAO;IACtC;GACF,EAAC,CACH;EACF;EAED,MAAMS,SAAoB;GACxB;GACA,WAAW,WAAW,SAClB,KAAK,oBAAoB,GAAG,WAAW,GACvC;EACL;EACD,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AACF,SAAO;CACR;CAED,MAAM,gBAAgB,EACpB,UACA,OACA,cACA,eACA,gBAQD,EAKC;EACA,MAAM,eAAe,SAAS,IAAI,CAAC,gBACjC,YAAY,IAAI,2BAA2B,CAC5C;EAED,MAAM,sBAAsB;GAC1B,GAAG,eAAe;GAClB,GAAG,KAAK,YAAY,cAAc;EACnC;EAED,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,qBACA,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY;EACb;EACD,MAAM,cAAc,MAAM,kBAAkB,qBAC1C,KAAK,QAAQ,EACb,aAAa,IAAI,kBAAkB,EACnC,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;EAGD,MAAMC,uBAAiC,CAAE;EACzC,MAAM,UAAU,MAAM,QAAQ,WAC5B,aAAa,IAAI,OAAO,aAAa,UAAU;GAE7C,MAAM,SACJ,cAAc,2BAA2B,YAAY,CAAC,UAAU;GAClE,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AAEvD,OAAI,UAAU,MACZ,qBAAqB,KAAK,MAAM;AAGlC,UAAO;EACR,EAAC,CACH;EAID,MAAM,gBAAgB,QACnB,IAAI,CAAC,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;EAAQ,GAAE,CACtE,OACC,CAAC,EAAE,QAAQ,KACR,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAMC,cAA8B,CAAE;EACtC,MAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,YAAY,EAAE,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;IAC7B,YAAY,KAAK,OAAO,IAAI,CAACC,aAAW;AACtC,SACE,aAAaA,YACb,cAAcA,SAAO,QAAQ,IAC7B,YAAYA,SAAO,QAAQ,EAC3B;MAEAA,SAAO,QAAQ,iBAAiB;OAC9B,cAAc;OACd,eAAe;OACf,cAAc;MACf;AACD,UAAI,KAAK,kBAAkB,MAEzBA,SAAO,UAAU,2BAA2BA,SAAO,QAAQ;KAE9D;KAEDA,SAAO,iBAAiB;MACtB,GAAGA,SAAO;MACV,YAAY,CAAE;KACf;AACD,YAAOA;IACR,EAAC;AACF,QAAI,OAAO,QACT,MAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,MAAO,EACtB,GACD,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;GACF,OAAM;IAEL,MAAM,YAAY,eAChB,cAAc,QACd,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;GAC5C;EACF,EAAC,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;EACrB;EAKD,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AAEF,SAAO;CACR;;;;;;;;CASD,MAAM,SACJV,UACAW,SACAC,WACoB;EAEpB,IAAIC;AACJ,MAAI,MAAM,QAAQ,QAAQ,EACxB,gBAAgB,EAAE,MAAM,QAAS;OAEjC,gBAAgB;EAGlB,MAAM,eAAe,SAAS,IAAI,CAAC,gBACjC,YAAY,IAAI,2BAA2B,CAC5C;EAED,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,cAAc;EAClE,eAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,cAAc,aAAa,eAAe;EAG1E,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EAED,MAAM,EAAE,aAAa,sBAAsB,oBAAoB,GAC7D,MAAM,KAAK,gBAAgB;GACzB,UAAU;GACV;GACA;GACA,eAAe;GACf,gBAAgB;EACjB,EAAC;EAEJ,IAAI,YAAY,CAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,IAAI,CAAC,MAAM,aAAa,GAAG,EAChD,aACA,gBACA,uBAAuB,SACnB,qBAAqB,IAAI,CAAC,MAAM,qBAAqB,GAAG,GACxD,OACL;GACD,MAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;IACzC,YAAY,eAAe;IAE3B,MAAM,SAAS,cAAc,2BAC3B,aAAa,aACd,CAAC,UAAU;AACZ,WAAO,MAAM,OAAO,QAAQ,cAAc,WAAW;GACtD,EAAC,CACH;GACD,YAAY,QAAQ,aAAa,CAAE;EACpC;AAED,SAAO;GAAE;GAAa;EAAW;CAClC;;;;CAMD,iBAAiBC,UAA2C;AAC1D,SAAO,CAAE;CACV;CAED,aAAqB;AACnB,SAAO;CACR;;;;;CAQD,YAA2B;AACzB,SAAO;GACL,GAAG,KAAK,kBAAkB;GAC1B,OAAO,KAAK,UAAU;GACtB,QAAQ,KAAK,YAAY;EAC1B;CACF;;;;;;;;CASD,MAAM,eACJC,cACAJ,SACAC,WACoB;EACpB,MAAMI,iBAAkC,aAAa,IAAI,CAAC,gBACxD,YAAY,gBAAgB,CAC7B;AACD,SAAO,KAAK,SAAS,gBAAgB,SAAS,UAAU;CACzD;;;;;;;;;;CAiBD,MAAM,KACJC,UACAN,SACAC,WACsB;EACtB,MAAM,SAAS,MAAM,KAAK,SACxB,CAAC,SAAS,IAAI,2BAA2B,AAAC,GAC1C,SACA,UACD;EACD,MAAM,cAAc,OAAO;AAC3B,SAAO,YAAY,GAAG,GAAG;CAC1B;;;;;;;;;;CAWD,MAAM,WACJM,aACAP,SACAC,WACsB;EACtB,MAAMO,iBAAgC,YAAY,gBAAgB;AAClE,SAAO,KAAK,KAAK,gBAAgB,SAAS,UAAU;CACrD;;;;;;;;;;CAWD,MAAM,gBACJ/B,UACAuB,SACAC,WACsB;AACtB,SAAO,KAAK,KAAK,UAAU,SAAS,UAAU;CAC/C;;;;;;;;;;CAWD,MAAM,QACJQ,MACAT,SACAC,WACiB;EACjB,MAAM,UAAU,IAAI,aAAa;EACjC,MAAM,SAAS,MAAM,KAAK,KAAK,CAAC,OAAQ,GAAE,SAAS,UAAU;AAC7D,MAAI,OAAO,OAAO,YAAY,SAC5B,OAAM,IAAI,MAAM;AAElB,SAAO,OAAO;CACf;CA8CD,qBAIES,cAIAC,QASI;AACJ,MAAI,OAAO,KAAK,cAAc,WAC5B,OAAM,IAAI,MACR,CAAC,qEAAqE,CAAC;AAG3E,MAAI,QAAQ,OACV,OAAM,IAAI,MACR,CAAC,yDAAyD,CAAC;EAI/D,MAAMC,SACJ;EACF,MAAM,OAAO,QAAQ;EACrB,MAAM,cACJ,qBAAqB,OAAO,IAAI;EAClC,MAAM,SAAS,QAAQ;EACvB,MAAM,aAAa,QAAQ;AAC3B,MAAI,WAAW,WACb,OAAM,IAAI,MACR,CAAC,qFAAqF,CAAC;EAI3F,IAAI,eAAe,QAAQ;EAC3B,IAAIC;AACJ,MAAI,mBAAmB,OAAO,EAC5B,QAAQ,CACN;GACE,MAAM;GACN,UAAU;IACR,MAAM;IACN;IACA,YAAY,aAAa,OAAO;GACjC;EACF,CACF;OACI;AACL,OAAI,UAAU,QACZ,eAAe,OAAO;GAExB,QAAQ,CACN;IACE,MAAM;IACN,UAAU;KACR,MAAM;KACN;KACA,YAAY;IACb;GACF,CACF;EACF;EAED,MAAM,MAAM,KAAK,UAAU,MAAM;EACjC,MAAM,eAAe,eAAe,KAClC,CAACC,UAAuC;AACtC,OAAI,CAAC,eAAe,WAAW,MAAM,CACnC,OAAM,IAAI,MAAM;AAElB,OAAI,CAAC,MAAM,cAAc,MAAM,WAAW,WAAW,EACnD,OAAM,IAAI,MAAM;GAElB,MAAM,WAAW,MAAM,WAAW,KAChC,CAAC,OAAO,GAAG,SAAS,aACrB;AACD,OAAI,CAAC,SACH,OAAM,IAAI,MAAM,CAAC,6BAA6B,EAAE,aAAa,CAAC,CAAC;AAEjE,UAAO,SAAS;EACjB,EACF;AAED,MAAI,CAAC,WACH,QAAO,IAAI,KAAK,aAAa,CAAC,WAAW,EACvC,SAAS,mBACV,EAAC;EAGJ,MAAM,eAAe,oBAAoB,OAAO,EAE9C,QAAQ,CAACC,OAAYC,aAAW,aAAa,OAAO,MAAM,KAAKA,SAAO,CACvE,EAAC;EACF,MAAM,aAAa,oBAAoB,OAAO,EAC5C,QAAQ,MAAM,KACf,EAAC;EACF,MAAM,qBAAqB,aAAa,cAAc,EACpD,WAAW,CAAC,UAAW,EACxB,EAAC;AACF,SAAO,iBAAiB,KAGtB,CACA,EACE,KAAK,IACN,GACD,kBACD,EAAC,CAAC,WAAW,EACZ,SAAS,2BACV,EAAC;CACH;AACF;;;;;AAMD,IAAsB,kBAAtB,cAEU,cAA2B;CAOnC,MAAM,UACJvC,UACAW,SACA6B,YACqB;EACrB,MAAM,OAAO,MAAM,KAAK,MAAM,UAAU,SAAS,WAAW;EAC5D,MAAM,UAAU,IAAI,UAAU;AAC9B,MAAI,OAAO,QAAQ,YAAY,SAC7B,OAAM,IAAI,MACR;AAGJ,SAAO,EACL,aAAa,CACX;GACE,MAAM,QAAQ;GACd;EACD,CACF,EACF;CACF;AACF"}
|
package/dist/messages/ai.cjs
CHANGED
|
@@ -1,18 +1,14 @@
|
|
|
1
1
|
const require_json = require('../utils/json.cjs');
|
|
2
2
|
const require_base = require('./base.cjs');
|
|
3
3
|
const require_index = require('./block_translators/index.cjs');
|
|
4
|
+
const require_metadata = require('./metadata.cjs');
|
|
4
5
|
const require_messages_tool = require('./tool.cjs');
|
|
5
6
|
|
|
6
7
|
//#region src/messages/ai.ts
|
|
7
|
-
/**
|
|
8
|
-
* Represents an AI message in a conversation.
|
|
9
|
-
*/
|
|
10
8
|
var AIMessage = class extends require_base.BaseMessage {
|
|
9
|
+
type = "ai";
|
|
11
10
|
tool_calls = [];
|
|
12
11
|
invalid_tool_calls = [];
|
|
13
|
-
/**
|
|
14
|
-
* If provided, token usage information associated with the message.
|
|
15
|
-
*/
|
|
16
12
|
usage_metadata;
|
|
17
13
|
get lc_aliases() {
|
|
18
14
|
return {
|
|
@@ -21,13 +17,13 @@ var AIMessage = class extends require_base.BaseMessage {
|
|
|
21
17
|
invalid_tool_calls: "invalid_tool_calls"
|
|
22
18
|
};
|
|
23
19
|
}
|
|
24
|
-
constructor(fields
|
|
20
|
+
constructor(fields) {
|
|
25
21
|
let initParams;
|
|
26
|
-
if (typeof fields === "string") initParams = {
|
|
22
|
+
if (typeof fields === "string" || Array.isArray(fields)) initParams = {
|
|
27
23
|
content: fields,
|
|
28
24
|
tool_calls: [],
|
|
29
25
|
invalid_tool_calls: [],
|
|
30
|
-
additional_kwargs:
|
|
26
|
+
additional_kwargs: {}
|
|
31
27
|
};
|
|
32
28
|
else {
|
|
33
29
|
initParams = fields;
|
|
@@ -61,6 +57,7 @@ var AIMessage = class extends require_base.BaseMessage {
|
|
|
61
57
|
})));
|
|
62
58
|
const missingToolCalls = initParams.contentBlocks.filter((block) => block.type === "tool_call").filter((block) => !initParams.tool_calls?.some((toolCall) => toolCall.id === block.id && toolCall.name === block.name));
|
|
63
59
|
if (missingToolCalls.length > 0) initParams.tool_calls = missingToolCalls.map((block) => ({
|
|
60
|
+
type: "tool_call",
|
|
64
61
|
id: block.id,
|
|
65
62
|
name: block.name,
|
|
66
63
|
args: block.args
|
|
@@ -77,14 +74,10 @@ var AIMessage = class extends require_base.BaseMessage {
|
|
|
77
74
|
static lc_name() {
|
|
78
75
|
return "AIMessage";
|
|
79
76
|
}
|
|
80
|
-
_getType() {
|
|
81
|
-
return "ai";
|
|
82
|
-
}
|
|
83
77
|
get contentBlocks() {
|
|
84
|
-
if (this.response_metadata
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
const translator = require_index.getTranslator(modelProvider);
|
|
78
|
+
if ("output_version" in this.response_metadata && this.response_metadata.output_version === "v1") return this.content;
|
|
79
|
+
if ("model_provider" in this.response_metadata && typeof this.response_metadata.model_provider === "string") {
|
|
80
|
+
const translator = require_index.getTranslator(this.response_metadata.model_provider);
|
|
88
81
|
if (translator) return translator.translateContent(this);
|
|
89
82
|
}
|
|
90
83
|
const blocks = super.contentBlocks;
|
|
@@ -108,10 +101,19 @@ var AIMessage = class extends require_base.BaseMessage {
|
|
|
108
101
|
usage_metadata: this.usage_metadata
|
|
109
102
|
};
|
|
110
103
|
}
|
|
104
|
+
static isInstance(obj) {
|
|
105
|
+
return super.isInstance(obj) && obj.type === "ai";
|
|
106
|
+
}
|
|
111
107
|
};
|
|
108
|
+
/**
|
|
109
|
+
* @deprecated Use {@link AIMessage.isInstance} instead
|
|
110
|
+
*/
|
|
112
111
|
function isAIMessage(x) {
|
|
113
112
|
return x._getType() === "ai";
|
|
114
113
|
}
|
|
114
|
+
/**
|
|
115
|
+
* @deprecated Use {@link AIMessageChunk.isInstance} instead
|
|
116
|
+
*/
|
|
115
117
|
function isAIMessageChunk(x) {
|
|
116
118
|
return x._getType() === "ai";
|
|
117
119
|
}
|
|
@@ -119,17 +121,15 @@ function isAIMessageChunk(x) {
|
|
|
119
121
|
* Represents a chunk of an AI message, which can be concatenated with
|
|
120
122
|
* other AI message chunks.
|
|
121
123
|
*/
|
|
122
|
-
var AIMessageChunk = class
|
|
124
|
+
var AIMessageChunk = class extends require_base.BaseMessageChunk {
|
|
125
|
+
type = "ai";
|
|
123
126
|
tool_calls = [];
|
|
124
127
|
invalid_tool_calls = [];
|
|
125
128
|
tool_call_chunks = [];
|
|
126
|
-
/**
|
|
127
|
-
* If provided, token usage information associated with the message.
|
|
128
|
-
*/
|
|
129
129
|
usage_metadata;
|
|
130
130
|
constructor(fields) {
|
|
131
131
|
let initParams;
|
|
132
|
-
if (typeof fields === "string") initParams = {
|
|
132
|
+
if (typeof fields === "string" || Array.isArray(fields)) initParams = {
|
|
133
133
|
content: fields,
|
|
134
134
|
tool_calls: [],
|
|
135
135
|
invalid_tool_calls: [],
|
|
@@ -152,7 +152,7 @@ var AIMessageChunk = class AIMessageChunk extends require_base.BaseMessageChunk
|
|
|
152
152
|
const toolCalls = [];
|
|
153
153
|
const invalidToolCalls = [];
|
|
154
154
|
for (const [id, chunks] of Object.entries(groupedToolCallChunk)) {
|
|
155
|
-
let parsedArgs =
|
|
155
|
+
let parsedArgs = null;
|
|
156
156
|
const name = chunks[0]?.name ?? "";
|
|
157
157
|
const joinedArgs = chunks.map((c) => c.args || "").join("");
|
|
158
158
|
const argsStr = joinedArgs.length ? joinedArgs : "{}";
|
|
@@ -200,14 +200,10 @@ var AIMessageChunk = class AIMessageChunk extends require_base.BaseMessageChunk
|
|
|
200
200
|
static lc_name() {
|
|
201
201
|
return "AIMessageChunk";
|
|
202
202
|
}
|
|
203
|
-
_getType() {
|
|
204
|
-
return "ai";
|
|
205
|
-
}
|
|
206
203
|
get contentBlocks() {
|
|
207
|
-
if (this.response_metadata
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
const translator = require_index.getTranslator(modelProvider);
|
|
204
|
+
if ("output_version" in this.response_metadata && this.response_metadata.output_version === "v1") return this.content;
|
|
205
|
+
if ("model_provider" in this.response_metadata && typeof this.response_metadata.model_provider === "string") {
|
|
206
|
+
const translator = require_index.getTranslator(this.response_metadata.model_provider);
|
|
211
207
|
if (translator) return translator.translateContent(this);
|
|
212
208
|
}
|
|
213
209
|
const blocks = super.contentBlocks;
|
|
@@ -238,7 +234,7 @@ var AIMessageChunk = class AIMessageChunk extends require_base.BaseMessageChunk
|
|
|
238
234
|
const combinedFields = {
|
|
239
235
|
content: require_base.mergeContent(this.content, chunk.content),
|
|
240
236
|
additional_kwargs: require_base._mergeDicts(this.additional_kwargs, chunk.additional_kwargs),
|
|
241
|
-
response_metadata:
|
|
237
|
+
response_metadata: require_metadata.mergeResponseMetadata(this.response_metadata, chunk.response_metadata),
|
|
242
238
|
tool_call_chunks: [],
|
|
243
239
|
id: this.id ?? chunk.id
|
|
244
240
|
};
|
|
@@ -246,36 +242,12 @@ var AIMessageChunk = class AIMessageChunk extends require_base.BaseMessageChunk
|
|
|
246
242
|
const rawToolCalls = require_base._mergeLists(this.tool_call_chunks, chunk.tool_call_chunks);
|
|
247
243
|
if (rawToolCalls !== void 0 && rawToolCalls.length > 0) combinedFields.tool_call_chunks = rawToolCalls;
|
|
248
244
|
}
|
|
249
|
-
if (this.usage_metadata !== void 0 || chunk.usage_metadata !== void 0)
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
const outputTokenDetails = {
|
|
256
|
-
...(this.usage_metadata?.output_token_details?.audio !== void 0 || chunk.usage_metadata?.output_token_details?.audio !== void 0) && { audio: (this.usage_metadata?.output_token_details?.audio ?? 0) + (chunk.usage_metadata?.output_token_details?.audio ?? 0) },
|
|
257
|
-
...(this.usage_metadata?.output_token_details?.reasoning !== void 0 || chunk.usage_metadata?.output_token_details?.reasoning !== void 0) && { reasoning: (this.usage_metadata?.output_token_details?.reasoning ?? 0) + (chunk.usage_metadata?.output_token_details?.reasoning ?? 0) }
|
|
258
|
-
};
|
|
259
|
-
const left = this.usage_metadata ?? {
|
|
260
|
-
input_tokens: 0,
|
|
261
|
-
output_tokens: 0,
|
|
262
|
-
total_tokens: 0
|
|
263
|
-
};
|
|
264
|
-
const right = chunk.usage_metadata ?? {
|
|
265
|
-
input_tokens: 0,
|
|
266
|
-
output_tokens: 0,
|
|
267
|
-
total_tokens: 0
|
|
268
|
-
};
|
|
269
|
-
const usage_metadata = {
|
|
270
|
-
input_tokens: left.input_tokens + right.input_tokens,
|
|
271
|
-
output_tokens: left.output_tokens + right.output_tokens,
|
|
272
|
-
total_tokens: left.total_tokens + right.total_tokens,
|
|
273
|
-
...Object.keys(inputTokenDetails).length > 0 && { input_token_details: inputTokenDetails },
|
|
274
|
-
...Object.keys(outputTokenDetails).length > 0 && { output_token_details: outputTokenDetails }
|
|
275
|
-
};
|
|
276
|
-
combinedFields.usage_metadata = usage_metadata;
|
|
277
|
-
}
|
|
278
|
-
return new AIMessageChunk(combinedFields);
|
|
245
|
+
if (this.usage_metadata !== void 0 || chunk.usage_metadata !== void 0) combinedFields.usage_metadata = require_metadata.mergeUsageMetadata(this.usage_metadata, chunk.usage_metadata);
|
|
246
|
+
const Cls = this.constructor;
|
|
247
|
+
return new Cls(combinedFields);
|
|
248
|
+
}
|
|
249
|
+
static isInstance(obj) {
|
|
250
|
+
return super.isInstance(obj) && obj.type === "ai";
|
|
279
251
|
}
|
|
280
252
|
};
|
|
281
253
|
|
package/dist/messages/ai.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai.cjs","names":["BaseMessage","fields: string | AIMessageFields","kwargs?: Record<string, unknown>","initParams: AIMessageFields","toolCalls","defaultToolCallParser","getTranslator","x: BaseMessage","x: BaseMessageChunk","BaseMessageChunk","fields: string | AIMessageChunkFields","initParams: AIMessageChunkFields","toolCalls: ToolCall[]","invalidToolCalls: InvalidToolCall[]","parsePartialJson","chunk: AIMessageChunk","combinedFields: AIMessageChunkFields","mergeContent","_mergeDicts","_mergeLists","inputTokenDetails: InputTokenDetails","outputTokenDetails: OutputTokenDetails","left: UsageMetadata","right: UsageMetadata","usage_metadata: UsageMetadata"],"sources":["../../src/messages/ai.ts"],"sourcesContent":["import { parsePartialJson } from \"../utils/json.js\";\nimport {\n BaseMessage,\n BaseMessageChunk,\n mergeContent,\n _mergeDicts,\n type MessageType,\n BaseMessageFields,\n _mergeLists,\n} from \"./base.js\";\nimport { getTranslator } from \"./block_translators/index.js\";\nimport { ContentBlock } from \"./content/index.js\";\nimport {\n InvalidToolCall,\n ToolCall,\n ToolCallChunk,\n defaultToolCallParser,\n} from \"./tool.js\";\n\nexport type AIMessageFields = BaseMessageFields & {\n tool_calls?: ToolCall[];\n invalid_tool_calls?: InvalidToolCall[];\n usage_metadata?: UsageMetadata;\n};\n\nexport type ModalitiesTokenDetails = {\n /**\n * Text tokens.\n * Does not need to be reported, but some models will do so.\n */\n text?: number;\n\n /**\n * Image (non-video) tokens.\n */\n image?: number;\n\n /**\n * Audio tokens.\n */\n audio?: number;\n\n /**\n * Video tokens.\n */\n video?: number;\n\n /**\n * Document tokens.\n * e.g. PDF\n */\n document?: number;\n};\n\n/**\n * Breakdown of input token counts.\n *\n * Does not *need* to sum to full input token count. Does *not* need to have all keys.\n */\nexport type InputTokenDetails = ModalitiesTokenDetails & {\n /**\n * Input tokens that were cached and there was a cache hit.\n *\n * Since there was a cache hit, the tokens were read from the cache.\n * More precisely, the model state given these tokens was read from the cache.\n */\n cache_read?: number;\n\n /**\n * Input tokens that were cached and there was a cache miss.\n *\n * Since there was a cache miss, the cache was created from these tokens.\n */\n cache_creation?: number;\n};\n\n/**\n * Breakdown of output token counts.\n *\n * Does *not* need to sum to full output token count. Does *not* need to have all keys.\n */\nexport type OutputTokenDetails = ModalitiesTokenDetails & {\n /**\n * Reasoning output tokens.\n *\n * Tokens generated by the model in a chain of thought process (i.e. by\n * OpenAI's o1 models) that are not returned as part of model output.\n */\n reasoning?: number;\n};\n\n/**\n * Usage metadata for a message, such as token counts.\n */\nexport type UsageMetadata = {\n /**\n * Count of input (or prompt) tokens. Sum of all input token types.\n */\n input_tokens: number;\n /**\n * Count of output (or completion) tokens. Sum of all output token types.\n */\n output_tokens: number;\n /**\n * Total token count. Sum of input_tokens + output_tokens.\n */\n total_tokens: number;\n\n /**\n * Breakdown of input token counts.\n *\n * Does *not* need to sum to full input token count. Does *not* need to have all keys.\n */\n input_token_details?: InputTokenDetails;\n\n /**\n * Breakdown of output token counts.\n *\n * Does *not* need to sum to full output token count. Does *not* need to have all keys.\n */\n output_token_details?: OutputTokenDetails;\n};\n\n/**\n * Represents an AI message in a conversation.\n */\nexport class AIMessage extends BaseMessage implements AIMessageFields {\n // These are typed as optional to avoid breaking changes and allow for casting\n // from BaseMessage.\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n /**\n * If provided, token usage information associated with the message.\n */\n usage_metadata?: UsageMetadata;\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n };\n }\n\n constructor(\n fields: string | AIMessageFields,\n /** @deprecated */\n kwargs?: Record<string, unknown>\n ) {\n let initParams: AIMessageFields;\n if (typeof fields === \"string\") {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n additional_kwargs: kwargs ?? {},\n };\n } else {\n initParams = fields;\n const rawToolCalls = initParams.additional_kwargs?.tool_calls;\n const toolCalls = initParams.tool_calls;\n if (\n !(rawToolCalls == null) &&\n rawToolCalls.length > 0 &&\n (toolCalls === undefined || toolCalls.length === 0)\n ) {\n console.warn(\n [\n \"New LangChain packages are available that more efficiently handle\",\n \"tool calling.\\n\\nPlease upgrade your packages to versions that set\",\n \"message tool calls. e.g., `pnpm install @langchain/anthropic`,\",\n \"pnpm install @langchain/openai`, etc.\",\n ].join(\" \")\n );\n }\n try {\n if (!(rawToolCalls == null) && toolCalls === undefined) {\n const [toolCalls, invalidToolCalls] =\n defaultToolCallParser(rawToolCalls);\n initParams.tool_calls = toolCalls ?? [];\n initParams.invalid_tool_calls = invalidToolCalls ?? [];\n } else {\n initParams.tool_calls = initParams.tool_calls ?? [];\n initParams.invalid_tool_calls = initParams.invalid_tool_calls ?? [];\n }\n } catch (e) {\n // Do nothing if parsing fails\n initParams.tool_calls = [];\n initParams.invalid_tool_calls = [];\n }\n if (initParams.contentBlocks !== undefined) {\n // Add constructor tool calls as content blocks\n initParams.contentBlocks.push(\n ...initParams.tool_calls.map((toolCall) => ({\n type: \"tool_call\" as const,\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n }))\n );\n // Add content block tool calls that aren't in the constructor tool calls\n const missingToolCalls = initParams.contentBlocks\n .filter<ContentBlock.Tools.ToolCall>(\n (block) => block.type === \"tool_call\"\n )\n .filter(\n (block) =>\n !initParams.tool_calls?.some(\n (toolCall) =>\n toolCall.id === block.id && toolCall.name === block.name\n )\n );\n if (missingToolCalls.length > 0) {\n initParams.tool_calls = missingToolCalls.map((block) => ({\n id: block.id!,\n name: block.name,\n args: block.args as Record<string, unknown>,\n }));\n }\n }\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n if (typeof initParams !== \"string\") {\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n }\n this.usage_metadata = initParams.usage_metadata;\n }\n\n static lc_name() {\n return \"AIMessage\";\n }\n\n _getType(): MessageType {\n return \"ai\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (this.response_metadata?.output_version === \"v1\") {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n const modelProvider = this.response_metadata?.model_provider;\n if (modelProvider) {\n const translator = getTranslator(modelProvider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n const missingToolCalls = this.tool_calls.filter(\n (block) =>\n !blocks.some((b) => b.id === block.id && b.name === block.name)\n );\n blocks.push(\n ...missingToolCalls.map((block) => ({\n ...block,\n type: \"tool_call\" as const,\n id: block.id,\n name: block.name,\n args: block.args,\n }))\n );\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n}\n\nexport function isAIMessage(x: BaseMessage): x is AIMessage {\n return x._getType() === \"ai\";\n}\n\nexport function isAIMessageChunk(x: BaseMessageChunk): x is AIMessageChunk {\n return x._getType() === \"ai\";\n}\n\nexport type AIMessageChunkFields = AIMessageFields & {\n tool_call_chunks?: ToolCallChunk[];\n};\n\n/**\n * Represents a chunk of an AI message, which can be concatenated with\n * other AI message chunks.\n */\nexport class AIMessageChunk extends BaseMessageChunk {\n // Must redeclare tool call fields since there is no multiple inheritance in JS.\n // These are typed as optional to avoid breaking changes and allow for casting\n // from BaseMessage.\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n tool_call_chunks?: ToolCallChunk[] = [];\n\n /**\n * If provided, token usage information associated with the message.\n */\n usage_metadata?: UsageMetadata;\n\n constructor(fields: string | AIMessageChunkFields) {\n let initParams: AIMessageChunkFields;\n if (typeof fields === \"string\") {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n };\n } else if (fields.tool_call_chunks === undefined) {\n initParams = {\n ...fields,\n tool_calls: fields.tool_calls ?? [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n } else {\n const groupedToolCallChunk = fields.tool_call_chunks.reduce(\n (acc, chunk) => {\n // Assign a fallback ID if the chunk doesn't have one\n // This can happen with tools that have empty schemas\n const chunkId = chunk.id || `fallback-${chunk.index || 0}`;\n acc[chunkId] = acc[chunkId] ?? [];\n acc[chunkId].push(chunk);\n return acc;\n },\n {} as Record<string, ToolCallChunk[]>\n );\n\n const toolCalls: ToolCall[] = [];\n const invalidToolCalls: InvalidToolCall[] = [];\n for (const [id, chunks] of Object.entries(groupedToolCallChunk)) {\n let parsedArgs = {};\n const name = chunks[0]?.name ?? \"\";\n const joinedArgs = chunks.map((c) => c.args || \"\").join(\"\");\n const argsStr = joinedArgs.length ? joinedArgs : \"{}\";\n // Use the original ID from the first chunk if it exists, otherwise use the grouped ID\n const originalId = chunks[0]?.id || id;\n try {\n parsedArgs = parsePartialJson(argsStr);\n if (\n parsedArgs === null ||\n typeof parsedArgs !== \"object\" ||\n Array.isArray(parsedArgs)\n ) {\n throw new Error(\"Malformed tool call chunk args.\");\n }\n toolCalls.push({\n name,\n args: parsedArgs,\n id: originalId,\n type: \"tool_call\",\n });\n } catch (e) {\n invalidToolCalls.push({\n name,\n args: argsStr,\n id: originalId,\n error: \"Malformed args.\",\n type: \"invalid_tool_call\",\n });\n }\n }\n initParams = {\n ...fields,\n tool_calls: toolCalls,\n invalid_tool_calls: invalidToolCalls,\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n this.tool_call_chunks =\n initParams.tool_call_chunks ?? this.tool_call_chunks;\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n this.usage_metadata = initParams.usage_metadata;\n }\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n tool_call_chunks: \"tool_call_chunks\",\n };\n }\n\n static lc_name() {\n return \"AIMessageChunk\";\n }\n\n _getType(): MessageType {\n return \"ai\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (this.response_metadata?.output_version === \"v1\") {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n const modelProvider = this.response_metadata?.model_provider;\n if (modelProvider) {\n const translator = getTranslator(modelProvider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n if (typeof this.content !== \"string\") {\n const contentToolCalls = this.content\n .filter((block) => block.type === \"tool_call\")\n .map((block) => block.id);\n for (const toolCall of this.tool_calls) {\n if (toolCall.id && !contentToolCalls.includes(toolCall.id)) {\n blocks.push({\n ...toolCall,\n type: \"tool_call\",\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n });\n }\n }\n }\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n tool_call_chunks: this.tool_call_chunks,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n concat(chunk: AIMessageChunk) {\n const combinedFields: AIMessageChunkFields = {\n content: mergeContent(this.content, chunk.content),\n additional_kwargs: _mergeDicts(\n this.additional_kwargs,\n chunk.additional_kwargs\n ),\n response_metadata: _mergeDicts(\n this.response_metadata,\n chunk.response_metadata\n ),\n tool_call_chunks: [],\n id: this.id ?? chunk.id,\n };\n if (\n this.tool_call_chunks !== undefined ||\n chunk.tool_call_chunks !== undefined\n ) {\n const rawToolCalls = _mergeLists(\n this.tool_call_chunks,\n chunk.tool_call_chunks\n );\n if (rawToolCalls !== undefined && rawToolCalls.length > 0) {\n combinedFields.tool_call_chunks = rawToolCalls;\n }\n }\n if (\n this.usage_metadata !== undefined ||\n chunk.usage_metadata !== undefined\n ) {\n const inputTokenDetails: InputTokenDetails = {\n ...((this.usage_metadata?.input_token_details?.audio !== undefined ||\n chunk.usage_metadata?.input_token_details?.audio !== undefined) && {\n audio:\n (this.usage_metadata?.input_token_details?.audio ?? 0) +\n (chunk.usage_metadata?.input_token_details?.audio ?? 0),\n }),\n ...((this.usage_metadata?.input_token_details?.cache_read !==\n undefined ||\n chunk.usage_metadata?.input_token_details?.cache_read !==\n undefined) && {\n cache_read:\n (this.usage_metadata?.input_token_details?.cache_read ?? 0) +\n (chunk.usage_metadata?.input_token_details?.cache_read ?? 0),\n }),\n ...((this.usage_metadata?.input_token_details?.cache_creation !==\n undefined ||\n chunk.usage_metadata?.input_token_details?.cache_creation !==\n undefined) && {\n cache_creation:\n (this.usage_metadata?.input_token_details?.cache_creation ?? 0) +\n (chunk.usage_metadata?.input_token_details?.cache_creation ?? 0),\n }),\n };\n\n const outputTokenDetails: OutputTokenDetails = {\n ...((this.usage_metadata?.output_token_details?.audio !== undefined ||\n chunk.usage_metadata?.output_token_details?.audio !== undefined) && {\n audio:\n (this.usage_metadata?.output_token_details?.audio ?? 0) +\n (chunk.usage_metadata?.output_token_details?.audio ?? 0),\n }),\n ...((this.usage_metadata?.output_token_details?.reasoning !==\n undefined ||\n chunk.usage_metadata?.output_token_details?.reasoning !==\n undefined) && {\n reasoning:\n (this.usage_metadata?.output_token_details?.reasoning ?? 0) +\n (chunk.usage_metadata?.output_token_details?.reasoning ?? 0),\n }),\n };\n\n const left: UsageMetadata = this.usage_metadata ?? {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n const right: UsageMetadata = chunk.usage_metadata ?? {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n const usage_metadata: UsageMetadata = {\n input_tokens: left.input_tokens + right.input_tokens,\n output_tokens: left.output_tokens + right.output_tokens,\n total_tokens: left.total_tokens + right.total_tokens,\n // Do not include `input_token_details` / `output_token_details` keys in combined fields\n // unless their values are defined.\n ...(Object.keys(inputTokenDetails).length > 0 && {\n input_token_details: inputTokenDetails,\n }),\n ...(Object.keys(outputTokenDetails).length > 0 && {\n output_token_details: outputTokenDetails,\n }),\n };\n combinedFields.usage_metadata = usage_metadata;\n }\n return new AIMessageChunk(combinedFields);\n }\n}\n"],"mappings":";;;;;;;;;AA8HA,IAAa,YAAb,cAA+BA,yBAAuC;CAGpE,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;;;;CAK3C;CAEA,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;EACrB;CACF;CAED,YACEC,QAEAC,QACA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,UACpB,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,mBAAmB,UAAU,CAAE;EAChC;OACI;GACL,aAAa;GACb,MAAM,eAAe,WAAW,mBAAmB;GACnD,MAAM,YAAY,WAAW;AAC7B,OACE,EAAE,gBAAgB,SAClB,aAAa,SAAS,MACrB,cAAc,UAAa,UAAU,WAAW,IAEjD,QAAQ,KACN;IACE;IACA;IACA;IACA;GACD,EAAC,KAAK,IAAI,CACZ;AAEH,OAAI;AACF,QAAI,EAAE,gBAAgB,SAAS,cAAc,QAAW;KACtD,MAAM,CAACC,aAAW,iBAAiB,GACjCC,4CAAsB,aAAa;KACrC,WAAW,aAAaD,eAAa,CAAE;KACvC,WAAW,qBAAqB,oBAAoB,CAAE;IACvD,OAAM;KACL,WAAW,aAAa,WAAW,cAAc,CAAE;KACnD,WAAW,qBAAqB,WAAW,sBAAsB,CAAE;IACpE;GACF,SAAQ,GAAG;IAEV,WAAW,aAAa,CAAE;IAC1B,WAAW,qBAAqB,CAAE;GACnC;AACD,OAAI,WAAW,kBAAkB,QAAW;IAE1C,WAAW,cAAc,KACvB,GAAG,WAAW,WAAW,IAAI,CAAC,cAAc;KAC1C,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,GAAE,CACJ;IAED,MAAM,mBAAmB,WAAW,cACjC,OACC,CAAC,UAAU,MAAM,SAAS,YAC3B,CACA,OACC,CAAC,UACC,CAAC,WAAW,YAAY,KACtB,CAAC,aACC,SAAS,OAAO,MAAM,MAAM,SAAS,SAAS,MAAM,KACvD,CACJ;AACH,QAAI,iBAAiB,SAAS,GAC5B,WAAW,aAAa,iBAAiB,IAAI,CAAC,WAAW;KACvD,IAAI,MAAM;KACV,MAAM,MAAM;KACZ,MAAM,MAAM;IACb,GAAE;GAEN;EACF;EAGD,MAAM,WAAW;AACjB,MAAI,OAAO,eAAe,UAAU;GAClC,KAAK,aAAa,WAAW,cAAc,KAAK;GAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACzC;EACD,KAAK,iBAAiB,WAAW;CAClC;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,WAAwB;AACtB,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MAAI,KAAK,mBAAmB,mBAAmB,KAC7C,QAAO,KAAK;EAGd,MAAM,gBAAgB,KAAK,mBAAmB;AAC9C,MAAI,eAAe;GACjB,MAAM,aAAaE,4BAAc,cAAc;AAC/C,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YAAY;GACnB,MAAM,mBAAmB,KAAK,WAAW,OACvC,CAAC,UACC,CAAC,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,MAAM,EAAE,SAAS,MAAM,KAAK,CAClE;GACD,OAAO,KACL,GAAG,iBAAiB,IAAI,CAAC,WAAW;IAClC,GAAG;IACH,MAAM;IACN,IAAI,MAAM;IACV,MAAM,MAAM;IACZ,MAAM,MAAM;GACb,GAAE,CACJ;EACF;AAED,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;AACF;AAED,SAAgB,YAAYC,GAAgC;AAC1D,QAAO,EAAE,UAAU,KAAK;AACzB;AAED,SAAgB,iBAAiBC,GAA0C;AACzE,QAAO,EAAE,UAAU,KAAK;AACzB;;;;;AAUD,IAAa,iBAAb,MAAa,uBAAuBC,8BAAiB;CAInD,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C,mBAAqC,CAAE;;;;CAKvC;CAEA,YAAYC,QAAuC;EACjD,IAAIC;AACJ,MAAI,OAAO,WAAW,UACpB,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;EACrB;WACQ,OAAO,qBAAqB,QACrC,aAAa;GACX,GAAG;GACH,YAAY,OAAO,cAAc,CAAE;GACnC,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;GACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;EACP;OACI;GACL,MAAM,uBAAuB,OAAO,iBAAiB,OACnD,CAAC,KAAK,UAAU;IAGd,MAAM,UAAU,MAAM,MAAM,CAAC,SAAS,EAAE,MAAM,SAAS,GAAG;IAC1D,IAAI,WAAW,IAAI,YAAY,CAAE;IACjC,IAAI,SAAS,KAAK,MAAM;AACxB,WAAO;GACR,GACD,CAAE,EACH;GAED,MAAMC,YAAwB,CAAE;GAChC,MAAMC,mBAAsC,CAAE;AAC9C,QAAK,MAAM,CAAC,IAAI,OAAO,IAAI,OAAO,QAAQ,qBAAqB,EAAE;IAC/D,IAAI,aAAa,CAAE;IACnB,MAAM,OAAO,OAAO,IAAI,QAAQ;IAChC,MAAM,aAAa,OAAO,IAAI,CAAC,MAAM,EAAE,QAAQ,GAAG,CAAC,KAAK,GAAG;IAC3D,MAAM,UAAU,WAAW,SAAS,aAAa;IAEjD,MAAM,aAAa,OAAO,IAAI,MAAM;AACpC,QAAI;KACF,aAAaC,8BAAiB,QAAQ;AACtC,SACE,eAAe,QACf,OAAO,eAAe,YACtB,MAAM,QAAQ,WAAW,CAEzB,OAAM,IAAI,MAAM;KAElB,UAAU,KAAK;MACb;MACA,MAAM;MACN,IAAI;MACJ,MAAM;KACP,EAAC;IACH,SAAQ,GAAG;KACV,iBAAiB,KAAK;MACpB;MACA,MAAM;MACN,IAAI;MACJ,OAAO;MACP,MAAM;KACP,EAAC;IACH;GACF;GACD,aAAa;IACX,GAAG;IACH,YAAY;IACZ,oBAAoB;IACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;GACP;EACF;EAGD,MAAM,WAAW;EACjB,KAAK,mBACH,WAAW,oBAAoB,KAAK;EACtC,KAAK,aAAa,WAAW,cAAc,KAAK;EAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACxC,KAAK,iBAAiB,WAAW;CAClC;CAED,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;GACpB,kBAAkB;EACnB;CACF;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,WAAwB;AACtB,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MAAI,KAAK,mBAAmB,mBAAmB,KAC7C,QAAO,KAAK;EAGd,MAAM,gBAAgB,KAAK,mBAAmB;AAC9C,MAAI,eAAe;GACjB,MAAM,aAAaR,4BAAc,cAAc;AAC/C,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YACP;OAAI,OAAO,KAAK,YAAY,UAAU;IACpC,MAAM,mBAAmB,KAAK,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,YAAY,CAC7C,IAAI,CAAC,UAAU,MAAM,GAAG;AAC3B,SAAK,MAAM,YAAY,KAAK,WAC1B,KAAI,SAAS,MAAM,CAAC,iBAAiB,SAAS,SAAS,GAAG,EACxD,OAAO,KAAK;KACV,GAAG;KACH,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,EAAC;GAGP;;AAGH,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,kBAAkB,KAAK;GACvB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAOS,OAAuB;EAC5B,MAAMC,iBAAuC;GAC3C,SAASC,0BAAa,KAAK,SAAS,MAAM,QAAQ;GAClD,mBAAmBC,yBACjB,KAAK,mBACL,MAAM,kBACP;GACD,mBAAmBA,yBACjB,KAAK,mBACL,MAAM,kBACP;GACD,kBAAkB,CAAE;GACpB,IAAI,KAAK,MAAM,MAAM;EACtB;AACD,MACE,KAAK,qBAAqB,UAC1B,MAAM,qBAAqB,QAC3B;GACA,MAAM,eAAeC,yBACnB,KAAK,kBACL,MAAM,iBACP;AACD,OAAI,iBAAiB,UAAa,aAAa,SAAS,GACtD,eAAe,mBAAmB;EAErC;AACD,MACE,KAAK,mBAAmB,UACxB,MAAM,mBAAmB,QACzB;GACA,MAAMC,oBAAuC;IAC3C,IAAK,KAAK,gBAAgB,qBAAqB,UAAU,UACvD,MAAM,gBAAgB,qBAAqB,UAAU,WAAc,EACnE,QACG,KAAK,gBAAgB,qBAAqB,SAAS,MACnD,MAAM,gBAAgB,qBAAqB,SAAS,GACxD;IACD,IAAK,KAAK,gBAAgB,qBAAqB,eAC7C,UACA,MAAM,gBAAgB,qBAAqB,eACzC,WAAc,EAChB,aACG,KAAK,gBAAgB,qBAAqB,cAAc,MACxD,MAAM,gBAAgB,qBAAqB,cAAc,GAC7D;IACD,IAAK,KAAK,gBAAgB,qBAAqB,mBAC7C,UACA,MAAM,gBAAgB,qBAAqB,mBACzC,WAAc,EAChB,iBACG,KAAK,gBAAgB,qBAAqB,kBAAkB,MAC5D,MAAM,gBAAgB,qBAAqB,kBAAkB,GACjE;GACF;GAED,MAAMC,qBAAyC;IAC7C,IAAK,KAAK,gBAAgB,sBAAsB,UAAU,UACxD,MAAM,gBAAgB,sBAAsB,UAAU,WAAc,EACpE,QACG,KAAK,gBAAgB,sBAAsB,SAAS,MACpD,MAAM,gBAAgB,sBAAsB,SAAS,GACzD;IACD,IAAK,KAAK,gBAAgB,sBAAsB,cAC9C,UACA,MAAM,gBAAgB,sBAAsB,cAC1C,WAAc,EAChB,YACG,KAAK,gBAAgB,sBAAsB,aAAa,MACxD,MAAM,gBAAgB,sBAAsB,aAAa,GAC7D;GACF;GAED,MAAMC,OAAsB,KAAK,kBAAkB;IACjD,cAAc;IACd,eAAe;IACf,cAAc;GACf;GACD,MAAMC,QAAuB,MAAM,kBAAkB;IACnD,cAAc;IACd,eAAe;IACf,cAAc;GACf;GACD,MAAMC,iBAAgC;IACpC,cAAc,KAAK,eAAe,MAAM;IACxC,eAAe,KAAK,gBAAgB,MAAM;IAC1C,cAAc,KAAK,eAAe,MAAM;IAGxC,GAAI,OAAO,KAAK,kBAAkB,CAAC,SAAS,KAAK,EAC/C,qBAAqB,kBACtB;IACD,GAAI,OAAO,KAAK,mBAAmB,CAAC,SAAS,KAAK,EAChD,sBAAsB,mBACvB;GACF;GACD,eAAe,iBAAiB;EACjC;AACD,SAAO,IAAI,eAAe;CAC3B;AACF"}
|
|
1
|
+
{"version":3,"file":"ai.cjs","names":["BaseMessage","fields: $InferMessageContent<TStructure, \"ai\"> | AIMessageFields<TStructure>","initParams: AIMessageFields<TStructure>","toolCalls","defaultToolCallParser","getTranslator","obj: unknown","x: BaseMessage","x: BaseMessageChunk","BaseMessageChunk","fields:\n | $InferMessageContent<TStructure, \"ai\">\n | AIMessageChunkFields<TStructure>","initParams: AIMessageChunkFields<TStructure>","toolCalls: ToolCall[]","invalidToolCalls: InvalidToolCall[]","parsedArgs: Record<string, unknown> | null","parsePartialJson","chunk: AIMessageChunk<TStructure>","combinedFields: AIMessageChunkFields","mergeContent","_mergeDicts","mergeResponseMetadata","_mergeLists","mergeUsageMetadata"],"sources":["../../src/messages/ai.ts"],"sourcesContent":["import { parsePartialJson } from \"../utils/json.js\";\nimport {\n BaseMessage,\n BaseMessageChunk,\n mergeContent,\n _mergeDicts,\n BaseMessageFields,\n _mergeLists,\n} from \"./base.js\";\nimport { getTranslator } from \"./block_translators/index.js\";\nimport { ContentBlock } from \"./content/index.js\";\nimport {\n $InferMessageContent,\n $InferMessageProperty,\n MessageStructure,\n} from \"./message.js\";\nimport { mergeResponseMetadata, mergeUsageMetadata } from \"./metadata.js\";\nimport {\n InvalidToolCall,\n ToolCall,\n ToolCallChunk,\n defaultToolCallParser,\n} from \"./tool.js\";\nimport { Constructor } from \"./utils.js\";\n\nexport interface AIMessageFields<\n TStructure extends MessageStructure = MessageStructure\n> extends BaseMessageFields<TStructure, \"ai\"> {\n tool_calls?: ToolCall[];\n invalid_tool_calls?: InvalidToolCall[];\n usage_metadata?: $InferMessageProperty<TStructure, \"ai\", \"usage_metadata\">;\n}\n\nexport class AIMessage<TStructure extends MessageStructure = MessageStructure>\n extends BaseMessage<TStructure, \"ai\">\n implements AIMessageFields<TStructure>\n{\n readonly type = \"ai\" as const;\n\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n usage_metadata?: AIMessageFields<TStructure>[\"usage_metadata\"];\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n };\n }\n\n constructor(\n fields: $InferMessageContent<TStructure, \"ai\"> | AIMessageFields<TStructure>\n ) {\n let initParams: AIMessageFields<TStructure>;\n if (typeof fields === \"string\" || Array.isArray(fields)) {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n additional_kwargs: {},\n };\n } else {\n initParams = fields;\n const rawToolCalls = initParams.additional_kwargs?.tool_calls;\n const toolCalls = initParams.tool_calls;\n if (\n !(rawToolCalls == null) &&\n rawToolCalls.length > 0 &&\n (toolCalls === undefined || toolCalls.length === 0)\n ) {\n console.warn(\n [\n \"New LangChain packages are available that more efficiently handle\",\n \"tool calling.\\n\\nPlease upgrade your packages to versions that set\",\n \"message tool calls. e.g., `pnpm install @langchain/anthropic`,\",\n \"pnpm install @langchain/openai`, etc.\",\n ].join(\" \")\n );\n }\n try {\n if (!(rawToolCalls == null) && toolCalls === undefined) {\n const [toolCalls, invalidToolCalls] =\n defaultToolCallParser(rawToolCalls);\n initParams.tool_calls = toolCalls ?? [];\n initParams.invalid_tool_calls = invalidToolCalls ?? [];\n } else {\n initParams.tool_calls = initParams.tool_calls ?? [];\n initParams.invalid_tool_calls = initParams.invalid_tool_calls ?? [];\n }\n } catch (e) {\n // Do nothing if parsing fails\n initParams.tool_calls = [];\n initParams.invalid_tool_calls = [];\n }\n if (initParams.contentBlocks !== undefined) {\n // Add constructor tool calls as content blocks\n initParams.contentBlocks.push(\n ...initParams.tool_calls.map((toolCall) => ({\n type: \"tool_call\" as const,\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n }))\n );\n // Add content block tool calls that aren't in the constructor tool calls\n const missingToolCalls = initParams.contentBlocks\n .filter<ContentBlock.Tools.ToolCall>(\n (block) => block.type === \"tool_call\"\n )\n .filter(\n (block) =>\n !initParams.tool_calls?.some(\n (toolCall) =>\n toolCall.id === block.id && toolCall.name === block.name\n )\n );\n if (missingToolCalls.length > 0) {\n initParams.tool_calls = missingToolCalls.map((block) => ({\n type: \"tool_call\" as const,\n id: block.id!,\n name: block.name,\n args: block.args as Record<string, unknown>,\n }));\n }\n }\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n if (typeof initParams !== \"string\") {\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n }\n this.usage_metadata = initParams.usage_metadata;\n }\n\n static lc_name() {\n return \"AIMessage\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (\n \"output_version\" in this.response_metadata &&\n this.response_metadata.output_version === \"v1\"\n ) {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n if (\n \"model_provider\" in this.response_metadata &&\n typeof this.response_metadata.model_provider === \"string\"\n ) {\n const translator = getTranslator(this.response_metadata.model_provider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n const missingToolCalls = this.tool_calls.filter(\n (block) =>\n !blocks.some((b) => b.id === block.id && b.name === block.name)\n );\n blocks.push(\n ...missingToolCalls.map((block) => ({\n ...block,\n type: \"tool_call\" as const,\n id: block.id,\n name: block.name,\n args: block.args,\n }))\n );\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n static isInstance(obj: unknown): obj is AIMessage {\n return super.isInstance(obj) && obj.type === \"ai\";\n }\n}\n\n/**\n * @deprecated Use {@link AIMessage.isInstance} instead\n */\nexport function isAIMessage<TStructure extends MessageStructure>(\n x: BaseMessage\n): x is AIMessage<TStructure> {\n return x._getType() === \"ai\";\n}\n\n/**\n * @deprecated Use {@link AIMessageChunk.isInstance} instead\n */\nexport function isAIMessageChunk<TStructure extends MessageStructure>(\n x: BaseMessageChunk\n): x is AIMessageChunk<TStructure> {\n return x._getType() === \"ai\";\n}\n\nexport type AIMessageChunkFields<\n TStructure extends MessageStructure = MessageStructure\n> = AIMessageFields<TStructure> & {\n tool_call_chunks?: ToolCallChunk[];\n};\n\n/**\n * Represents a chunk of an AI message, which can be concatenated with\n * other AI message chunks.\n */\nexport class AIMessageChunk<\n TStructure extends MessageStructure = MessageStructure\n >\n extends BaseMessageChunk<TStructure, \"ai\">\n implements AIMessage<TStructure>, AIMessageChunkFields<TStructure>\n{\n readonly type = \"ai\" as const;\n\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n tool_call_chunks?: ToolCallChunk[] = [];\n\n usage_metadata?: AIMessageChunkFields<TStructure>[\"usage_metadata\"];\n\n constructor(\n fields:\n | $InferMessageContent<TStructure, \"ai\">\n | AIMessageChunkFields<TStructure>\n ) {\n let initParams: AIMessageChunkFields<TStructure>;\n if (typeof fields === \"string\" || Array.isArray(fields)) {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n };\n } else if (fields.tool_call_chunks === undefined) {\n initParams = {\n ...fields,\n tool_calls: fields.tool_calls ?? [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n } else {\n const groupedToolCallChunk = fields.tool_call_chunks.reduce(\n (acc, chunk) => {\n // Assign a fallback ID if the chunk doesn't have one\n // This can happen with tools that have empty schemas\n const chunkId = chunk.id || `fallback-${chunk.index || 0}`;\n acc[chunkId] = acc[chunkId] ?? [];\n acc[chunkId].push(chunk);\n return acc;\n },\n {} as Record<string, ToolCallChunk[]>\n );\n\n const toolCalls: ToolCall[] = [];\n const invalidToolCalls: InvalidToolCall[] = [];\n for (const [id, chunks] of Object.entries(groupedToolCallChunk)) {\n let parsedArgs: Record<string, unknown> | null = null;\n const name = chunks[0]?.name ?? \"\";\n const joinedArgs = chunks.map((c) => c.args || \"\").join(\"\");\n const argsStr = joinedArgs.length ? joinedArgs : \"{}\";\n // Use the original ID from the first chunk if it exists, otherwise use the grouped ID\n const originalId = chunks[0]?.id || id;\n try {\n parsedArgs = parsePartialJson(argsStr);\n if (\n parsedArgs === null ||\n typeof parsedArgs !== \"object\" ||\n Array.isArray(parsedArgs)\n ) {\n throw new Error(\"Malformed tool call chunk args.\");\n }\n toolCalls.push({\n name,\n args: parsedArgs,\n id: originalId,\n type: \"tool_call\",\n });\n } catch (e) {\n invalidToolCalls.push({\n name,\n args: argsStr,\n id: originalId,\n error: \"Malformed args.\",\n type: \"invalid_tool_call\",\n });\n }\n }\n initParams = {\n ...fields,\n tool_calls: toolCalls,\n invalid_tool_calls: invalidToolCalls,\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n this.tool_call_chunks =\n initParams.tool_call_chunks ?? this.tool_call_chunks;\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n this.usage_metadata = initParams.usage_metadata;\n }\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n tool_call_chunks: \"tool_call_chunks\",\n };\n }\n\n static lc_name() {\n return \"AIMessageChunk\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (\n \"output_version\" in this.response_metadata &&\n this.response_metadata.output_version === \"v1\"\n ) {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n if (\n \"model_provider\" in this.response_metadata &&\n typeof this.response_metadata.model_provider === \"string\"\n ) {\n const translator = getTranslator(this.response_metadata.model_provider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n if (typeof this.content !== \"string\") {\n const contentToolCalls = this.content\n .filter((block) => block.type === \"tool_call\")\n .map((block) => block.id);\n for (const toolCall of this.tool_calls) {\n if (toolCall.id && !contentToolCalls.includes(toolCall.id)) {\n blocks.push({\n ...toolCall,\n type: \"tool_call\",\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n });\n }\n }\n }\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n tool_call_chunks: this.tool_call_chunks,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n concat(chunk: AIMessageChunk<TStructure>) {\n const combinedFields: AIMessageChunkFields = {\n content: mergeContent(this.content, chunk.content),\n additional_kwargs: _mergeDicts(\n this.additional_kwargs,\n chunk.additional_kwargs\n ),\n response_metadata: mergeResponseMetadata(\n this.response_metadata,\n chunk.response_metadata\n ),\n tool_call_chunks: [],\n id: this.id ?? chunk.id,\n };\n if (\n this.tool_call_chunks !== undefined ||\n chunk.tool_call_chunks !== undefined\n ) {\n const rawToolCalls = _mergeLists(\n this.tool_call_chunks,\n chunk.tool_call_chunks\n );\n if (rawToolCalls !== undefined && rawToolCalls.length > 0) {\n combinedFields.tool_call_chunks = rawToolCalls;\n }\n }\n if (\n this.usage_metadata !== undefined ||\n chunk.usage_metadata !== undefined\n ) {\n combinedFields.usage_metadata = mergeUsageMetadata(\n this.usage_metadata,\n chunk.usage_metadata\n );\n }\n const Cls = this.constructor as Constructor<this>;\n return new Cls(combinedFields);\n }\n\n static isInstance(obj: unknown): obj is AIMessageChunk {\n return super.isInstance(obj) && obj.type === \"ai\";\n }\n}\n"],"mappings":";;;;;;;AAiCA,IAAa,YAAb,cACUA,yBAEV;CACE,AAAS,OAAO;CAEhB,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C;CAEA,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;EACrB;CACF;CAED,YACEC,QACA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,YAAY,MAAM,QAAQ,OAAO,EACrD,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,mBAAmB,CAAE;EACtB;OACI;GACL,aAAa;GACb,MAAM,eAAe,WAAW,mBAAmB;GACnD,MAAM,YAAY,WAAW;AAC7B,OACE,EAAE,gBAAgB,SAClB,aAAa,SAAS,MACrB,cAAc,UAAa,UAAU,WAAW,IAEjD,QAAQ,KACN;IACE;IACA;IACA;IACA;GACD,EAAC,KAAK,IAAI,CACZ;AAEH,OAAI;AACF,QAAI,EAAE,gBAAgB,SAAS,cAAc,QAAW;KACtD,MAAM,CAACC,aAAW,iBAAiB,GACjCC,4CAAsB,aAAa;KACrC,WAAW,aAAaD,eAAa,CAAE;KACvC,WAAW,qBAAqB,oBAAoB,CAAE;IACvD,OAAM;KACL,WAAW,aAAa,WAAW,cAAc,CAAE;KACnD,WAAW,qBAAqB,WAAW,sBAAsB,CAAE;IACpE;GACF,SAAQ,GAAG;IAEV,WAAW,aAAa,CAAE;IAC1B,WAAW,qBAAqB,CAAE;GACnC;AACD,OAAI,WAAW,kBAAkB,QAAW;IAE1C,WAAW,cAAc,KACvB,GAAG,WAAW,WAAW,IAAI,CAAC,cAAc;KAC1C,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,GAAE,CACJ;IAED,MAAM,mBAAmB,WAAW,cACjC,OACC,CAAC,UAAU,MAAM,SAAS,YAC3B,CACA,OACC,CAAC,UACC,CAAC,WAAW,YAAY,KACtB,CAAC,aACC,SAAS,OAAO,MAAM,MAAM,SAAS,SAAS,MAAM,KACvD,CACJ;AACH,QAAI,iBAAiB,SAAS,GAC5B,WAAW,aAAa,iBAAiB,IAAI,CAAC,WAAW;KACvD,MAAM;KACN,IAAI,MAAM;KACV,MAAM,MAAM;KACZ,MAAM,MAAM;IACb,GAAE;GAEN;EACF;EAGD,MAAM,WAAW;AACjB,MAAI,OAAO,eAAe,UAAU;GAClC,KAAK,aAAa,WAAW,cAAc,KAAK;GAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACzC;EACD,KAAK,iBAAiB,WAAW;CAClC;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MACE,oBAAoB,KAAK,qBACzB,KAAK,kBAAkB,mBAAmB,KAE1C,QAAO,KAAK;AAGd,MACE,oBAAoB,KAAK,qBACzB,OAAO,KAAK,kBAAkB,mBAAmB,UACjD;GACA,MAAM,aAAaE,4BAAc,KAAK,kBAAkB,eAAe;AACvE,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YAAY;GACnB,MAAM,mBAAmB,KAAK,WAAW,OACvC,CAAC,UACC,CAAC,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,MAAM,EAAE,SAAS,MAAM,KAAK,CAClE;GACD,OAAO,KACL,GAAG,iBAAiB,IAAI,CAAC,WAAW;IAClC,GAAG;IACH,MAAM;IACN,IAAI,MAAM;IACV,MAAM,MAAM;IACZ,MAAM,MAAM;GACb,GAAE,CACJ;EACF;AAED,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAO,WAAWC,KAAgC;AAChD,SAAO,MAAM,WAAW,IAAI,IAAI,IAAI,SAAS;CAC9C;AACF;;;;AAKD,SAAgB,YACdC,GAC4B;AAC5B,QAAO,EAAE,UAAU,KAAK;AACzB;;;;AAKD,SAAgB,iBACdC,GACiC;AACjC,QAAO,EAAE,UAAU,KAAK;AACzB;;;;;AAYD,IAAa,iBAAb,cAGUC,8BAEV;CACE,AAAS,OAAO;CAEhB,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C,mBAAqC,CAAE;CAEvC;CAEA,YACEC,QAGA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,YAAY,MAAM,QAAQ,OAAO,EACrD,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;EACrB;WACQ,OAAO,qBAAqB,QACrC,aAAa;GACX,GAAG;GACH,YAAY,OAAO,cAAc,CAAE;GACnC,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;GACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;EACP;OACI;GACL,MAAM,uBAAuB,OAAO,iBAAiB,OACnD,CAAC,KAAK,UAAU;IAGd,MAAM,UAAU,MAAM,MAAM,CAAC,SAAS,EAAE,MAAM,SAAS,GAAG;IAC1D,IAAI,WAAW,IAAI,YAAY,CAAE;IACjC,IAAI,SAAS,KAAK,MAAM;AACxB,WAAO;GACR,GACD,CAAE,EACH;GAED,MAAMC,YAAwB,CAAE;GAChC,MAAMC,mBAAsC,CAAE;AAC9C,QAAK,MAAM,CAAC,IAAI,OAAO,IAAI,OAAO,QAAQ,qBAAqB,EAAE;IAC/D,IAAIC,aAA6C;IACjD,MAAM,OAAO,OAAO,IAAI,QAAQ;IAChC,MAAM,aAAa,OAAO,IAAI,CAAC,MAAM,EAAE,QAAQ,GAAG,CAAC,KAAK,GAAG;IAC3D,MAAM,UAAU,WAAW,SAAS,aAAa;IAEjD,MAAM,aAAa,OAAO,IAAI,MAAM;AACpC,QAAI;KACF,aAAaC,8BAAiB,QAAQ;AACtC,SACE,eAAe,QACf,OAAO,eAAe,YACtB,MAAM,QAAQ,WAAW,CAEzB,OAAM,IAAI,MAAM;KAElB,UAAU,KAAK;MACb;MACA,MAAM;MACN,IAAI;MACJ,MAAM;KACP,EAAC;IACH,SAAQ,GAAG;KACV,iBAAiB,KAAK;MACpB;MACA,MAAM;MACN,IAAI;MACJ,OAAO;MACP,MAAM;KACP,EAAC;IACH;GACF;GACD,aAAa;IACX,GAAG;IACH,YAAY;IACZ,oBAAoB;IACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;GACP;EACF;EAGD,MAAM,WAAW;EACjB,KAAK,mBACH,WAAW,oBAAoB,KAAK;EACtC,KAAK,aAAa,WAAW,cAAc,KAAK;EAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACxC,KAAK,iBAAiB,WAAW;CAClC;CAED,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;GACpB,kBAAkB;EACnB;CACF;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MACE,oBAAoB,KAAK,qBACzB,KAAK,kBAAkB,mBAAmB,KAE1C,QAAO,KAAK;AAGd,MACE,oBAAoB,KAAK,qBACzB,OAAO,KAAK,kBAAkB,mBAAmB,UACjD;GACA,MAAM,aAAaV,4BAAc,KAAK,kBAAkB,eAAe;AACvE,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YACP;OAAI,OAAO,KAAK,YAAY,UAAU;IACpC,MAAM,mBAAmB,KAAK,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,YAAY,CAC7C,IAAI,CAAC,UAAU,MAAM,GAAG;AAC3B,SAAK,MAAM,YAAY,KAAK,WAC1B,KAAI,SAAS,MAAM,CAAC,iBAAiB,SAAS,SAAS,GAAG,EACxD,OAAO,KAAK;KACV,GAAG;KACH,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,EAAC;GAGP;;AAGH,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,kBAAkB,KAAK;GACvB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAOW,OAAmC;EACxC,MAAMC,iBAAuC;GAC3C,SAASC,0BAAa,KAAK,SAAS,MAAM,QAAQ;GAClD,mBAAmBC,yBACjB,KAAK,mBACL,MAAM,kBACP;GACD,mBAAmBC,uCACjB,KAAK,mBACL,MAAM,kBACP;GACD,kBAAkB,CAAE;GACpB,IAAI,KAAK,MAAM,MAAM;EACtB;AACD,MACE,KAAK,qBAAqB,UAC1B,MAAM,qBAAqB,QAC3B;GACA,MAAM,eAAeC,yBACnB,KAAK,kBACL,MAAM,iBACP;AACD,OAAI,iBAAiB,UAAa,aAAa,SAAS,GACtD,eAAe,mBAAmB;EAErC;AACD,MACE,KAAK,mBAAmB,UACxB,MAAM,mBAAmB,QAEzB,eAAe,iBAAiBC,oCAC9B,KAAK,gBACL,MAAM,eACP;EAEH,MAAM,MAAM,KAAK;AACjB,SAAO,IAAI,IAAI;CAChB;CAED,OAAO,WAAWhB,KAAqC;AACrD,SAAO,MAAM,WAAW,IAAI,IAAI,IAAI,SAAS;CAC9C;AACF"}
|