@langchain/core 1.1.5 → 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/caches/index.d.cts.map +1 -1
- package/dist/callbacks/base.cjs.map +1 -1
- package/dist/callbacks/base.d.cts +2 -2
- package/dist/callbacks/base.d.cts.map +1 -1
- package/dist/callbacks/base.d.ts +2 -2
- package/dist/callbacks/base.d.ts.map +1 -1
- package/dist/callbacks/base.js.map +1 -1
- package/dist/callbacks/manager.cjs +3 -3
- package/dist/callbacks/manager.cjs.map +1 -1
- package/dist/callbacks/manager.d.cts +1 -1
- package/dist/callbacks/manager.d.cts.map +1 -1
- package/dist/callbacks/manager.d.ts +1 -1
- package/dist/callbacks/manager.d.ts.map +1 -1
- package/dist/callbacks/manager.js +3 -3
- package/dist/callbacks/manager.js.map +1 -1
- package/dist/load/import_map.cjs +2 -0
- package/dist/load/import_map.cjs.map +1 -1
- package/dist/load/import_map.js +2 -0
- package/dist/load/import_map.js.map +1 -1
- package/dist/load/index.d.cts.map +1 -1
- package/dist/load/index.d.ts.map +1 -1
- package/dist/messages/base.cjs +2 -2
- package/dist/messages/base.cjs.map +1 -1
- package/dist/messages/base.d.cts +1 -1
- package/dist/messages/base.d.cts.map +1 -1
- package/dist/messages/base.d.ts +1 -1
- package/dist/messages/base.d.ts.map +1 -1
- package/dist/messages/base.js +2 -2
- package/dist/messages/base.js.map +1 -1
- package/dist/messages/tool.d.ts.map +1 -1
- package/dist/runnables/base.cjs +1 -1
- package/dist/runnables/base.cjs.map +1 -1
- package/dist/runnables/base.js +1 -1
- package/dist/runnables/base.js.map +1 -1
- package/dist/tracers/base.cjs +5 -2
- package/dist/tracers/base.cjs.map +1 -1
- package/dist/tracers/base.d.cts +1 -1
- package/dist/tracers/base.d.cts.map +1 -1
- package/dist/tracers/base.d.ts +1 -1
- package/dist/tracers/base.d.ts.map +1 -1
- package/dist/tracers/base.js +5 -2
- package/dist/tracers/base.js.map +1 -1
- package/dist/tracers/event_stream.cjs +5 -1
- package/dist/tracers/event_stream.cjs.map +1 -1
- package/dist/tracers/event_stream.d.cts.map +1 -1
- package/dist/tracers/event_stream.d.ts.map +1 -1
- package/dist/tracers/event_stream.js +5 -1
- package/dist/tracers/event_stream.js.map +1 -1
- package/dist/tracers/tracer_langchain.cjs +31 -3
- package/dist/tracers/tracer_langchain.cjs.map +1 -1
- package/dist/tracers/tracer_langchain.d.cts +8 -7
- package/dist/tracers/tracer_langchain.d.cts.map +1 -1
- package/dist/tracers/tracer_langchain.d.ts +8 -7
- package/dist/tracers/tracer_langchain.d.ts.map +1 -1
- package/dist/tracers/tracer_langchain.js +31 -3
- package/dist/tracers/tracer_langchain.js.map +1 -1
- package/dist/utils/context.cjs +107 -0
- package/dist/utils/context.cjs.map +1 -0
- package/dist/utils/context.d.cts +44 -0
- package/dist/utils/context.d.cts.map +1 -0
- package/dist/utils/context.d.ts +44 -0
- package/dist/utils/context.d.ts.map +1 -0
- package/dist/utils/context.js +101 -0
- package/dist/utils/context.js.map +1 -0
- package/dist/utils/testing/chat_models.cjs +4 -1
- package/dist/utils/testing/chat_models.cjs.map +1 -1
- package/dist/utils/testing/chat_models.d.cts +1 -1
- package/dist/utils/testing/chat_models.d.cts.map +1 -1
- package/dist/utils/testing/chat_models.d.ts +1 -1
- package/dist/utils/testing/chat_models.d.ts.map +1 -1
- package/dist/utils/testing/chat_models.js +4 -1
- package/dist/utils/testing/chat_models.js.map +1 -1
- package/dist/vectorstores.d.cts.map +1 -1
- package/package.json +13 -2
- package/utils/context.cjs +1 -0
- package/utils/context.d.cts +1 -0
- package/utils/context.d.ts +1 -0
- package/utils/context.js +1 -0
|
@@ -103,7 +103,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends require_langua
|
|
|
103
103
|
}] };
|
|
104
104
|
return generation;
|
|
105
105
|
}
|
|
106
|
-
async *_streamResponseChunks(_messages,
|
|
106
|
+
async *_streamResponseChunks(_messages, options, runManager) {
|
|
107
107
|
if (this.thrownErrorString) throw new Error(this.thrownErrorString);
|
|
108
108
|
if (this.chunks?.length) {
|
|
109
109
|
for (const msgChunk of this.chunks) {
|
|
@@ -115,6 +115,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends require_langua
|
|
|
115
115
|
}),
|
|
116
116
|
text: msgChunk.content?.toString() ?? ""
|
|
117
117
|
});
|
|
118
|
+
if (options.signal?.aborted) break;
|
|
118
119
|
yield cg;
|
|
119
120
|
await runManager?.handleLLMNewToken(msgChunk.content, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
120
121
|
}
|
|
@@ -128,6 +129,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends require_langua
|
|
|
128
129
|
message: new require_ai.AIMessageChunk({ content: ch }),
|
|
129
130
|
text: ch
|
|
130
131
|
});
|
|
132
|
+
if (options.signal?.aborted) break;
|
|
131
133
|
yield cg;
|
|
132
134
|
await runManager?.handleLLMNewToken(ch, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
133
135
|
}
|
|
@@ -210,6 +212,7 @@ var FakeListChatModel = class FakeListChatModel extends require_language_models_
|
|
|
210
212
|
await this._sleepIfRequested();
|
|
211
213
|
if (options?.thrownErrorString) throw new Error(options.thrownErrorString);
|
|
212
214
|
const chunk = this._createResponseChunk(text, isLastChunk ? this.generationInfo : void 0);
|
|
215
|
+
if (options.signal?.aborted) break;
|
|
213
216
|
yield chunk;
|
|
214
217
|
runManager?.handleLLMNewToken(text);
|
|
215
218
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.cjs","names":["BaseChatModel","messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","AIMessage","tools: (StructuredTool | ToolSpec)[]","toJsonSchema","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","ChatGenerationChunk","AIMessageChunk","params: FakeChatInput","text: string","options: this[\"ParsedCallOptions\"]","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>","RunnableLambda"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmCA,kDAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAIC,qBAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAIA,qBAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+BJ,kDAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUK,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJL,UACAM,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAIL,qBAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLM,WACAH,UACAJ,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAIQ,oCAAoB;KACjC,SAAS,IAAIC,0BAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;IAED,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAIR,qBACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAIO,oCAAoB;IACjC,SAAS,IAAIC,0BAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;GACD,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0BZ,kDAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYa,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJH,WACAR,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBW,MAAc;AAC9B,SAAO;GACL,SAAS,IAAIV,qBAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLM,WACAK,SACAZ,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;GACD,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEW,MAEAE,gBACqB;AACrB,SAAO,IAAIL,oCAAoB;GAC7B,SAAS,IAAIC,0BAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUP,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIEW,SAKAC,SAMI;AACJ,SAAOC,4BAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|
|
1
|
+
{"version":3,"file":"chat_models.cjs","names":["BaseChatModel","messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","AIMessage","tools: (StructuredTool | ToolSpec)[]","toJsonSchema","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","options: this[\"ParsedCallOptions\"]","ChatGenerationChunk","AIMessageChunk","params: FakeChatInput","text: string","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>","RunnableLambda"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n if (options.signal?.aborted) break;\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmCA,kDAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAIC,qBAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAIA,qBAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+BJ,kDAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUK,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJL,UACAM,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAIL,qBAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLM,WACAC,SACAR,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAIS,oCAAoB;KACjC,SAAS,IAAIC,0BAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;AAED,QAAI,QAAQ,QAAQ,QAAS;IAC7B,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAIT,qBACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAIQ,oCAAoB;IACjC,SAAS,IAAIC,0BAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0Bb,kDAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYc,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJJ,WACAR,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBY,MAAc;AAC9B,SAAO;GACL,SAAS,IAAIX,qBAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLM,WACAC,SACAR,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEY,MAEAC,gBACqB;AACrB,SAAO,IAAIJ,oCAAoB;GAC7B,SAAS,IAAIC,0BAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUR,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIEW,SAKAC,SAMI;AACJ,SAAOC,4BAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|
|
@@ -59,7 +59,7 @@ declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatMode
|
|
|
59
59
|
_llmType(): string;
|
|
60
60
|
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure>, FakeStreamingChatModelCallOptions>;
|
|
61
61
|
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
62
|
-
_streamResponseChunks(_messages: BaseMessage[],
|
|
62
|
+
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
63
63
|
}
|
|
64
64
|
/**
|
|
65
65
|
* Interface for the input parameters specific to the Fake List Chat model.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.d.cts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;UAUiBgB,QAAAA;;;EAAAA,MAAAA,EAGLD,cAHa,GAGIE,MAAjBF,CAAAA,MAAAA,EAAiBE,OAAM,CAAA;AAKnC;AAKA;;;AAAsDd,UALrCe,iCAAAA,SAA0ChB,wBAKLC,CAAAA,CAAmB;AAYzE;;;AAG4HK,UAf3GW,4BAAAA,SAAqChB,mBAesEK,CAAAA;EAARa;EAHzEpB,KAAAA,CAAAA,EAAAA,MAAAA;EAAa;EAKnCqB,SAAAA,CAAAA,EAbLjB,WAaKiB,EAAsB;EAAuBJ;EAEnDb,MAAAA,CAAAA,EAbFE,cAaEF,EAAAA;EACHE;EAIMgB,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAOC;EAAWC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZlCP,aAAAA,SAAsBnB,aAAAA,CAYY0B;EAA8BR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA+Bf,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAE9FO,SAAAA,CAAAA,QAAAA,EAXEN,WAWFM,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAXmEX,wBAWnEW,CAAAA,EAX8FU,OAW9FV,CAXsGH,UAWtGG,CAAAA;;AAAwCE,cATzCS,sBAAAA,SAA+BrB,aASUY,CATIK,iCASJL,CAAAA,CAAAA;;EAAwBN,SAAAA,EAPvEF,WAOuEE,EAAAA;EAAsEW,MAAAA,EANhJX,cAMgJW,EAAAA;EAAvGR,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC7BL,iBAAAA,CAAAA,EAAAA,MAAAA;EAAkEL,QAAAA,KAAAA;EAAmCQ,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHxCW,4BAGwCX,GAHTJ,aAGSI;EAARa,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAChFhB,SAAAA,CAAAA,KAAAA,EAAAA,CAFfM,cAEeN,GAFEW,QAEFX,CAAAA,EAAAA,CAAAA,EAFgBK,QAEhBL,CAFyBQ,sBAEzBR,EAFiDE,cAEjDF,kBAAAA,EAFuHa,iCAEvHb,CAAAA;EAAiEL,SAAAA,CAAAA,QAAAA,EAD9EK,WAC8EL,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADZA,wBACYA,CAAAA,EADeqB,OACfrB,CADuBQ,UACvBR,CAAAA;EAA0CS,qBAAAA,CAAAA,SAAAA,EAA3GJ,WAA2GI,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAA1CT,wBAA0CS,CAAAA,EAAfqB,cAAerB,CAAAA,mBAAAA,CAAAA;;;AAX/E;AAgBjE;AAaiBuB,UAbAD,aAAAA,SAAsB5B,mBAaeD,CAAAA;EAuBjC+B;EAAwCD,SAAAA,EAAAA,MAAAA,EAAAA;EAOxCf;EAGGc,KAAAA,CAAAA,EAAAA,MAAAA;EAGC1B,eAAAA,CAAAA,EAAAA,OAAAA;EAAiEL;;;;;EAKrDK,cAAAA,CAAAA,EA3ChBY,MA2CgBZ,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAA0GI,UAzC9HuB,4BAAAA,SAAqC9B,wBAyCyFO,CAAAA;EAAfqB,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOiHpB,cAzB5NuB,iBAAAA,SAA0BhC,aAyBkMS,CAzBpLsB,4BAyBoLtB,CAAAA,CAAAA;EACtMO,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAsBA,eAAAA,EAAAA,OAAAA;EAA2DiB,SAAAA,EAAAA,MAAAA,EAAAA;EAA7BtB,CAAAA,EAAAA,MAAAA;EAA+DsB,KAAAA,CAAAA,EAAAA,MAAAA;EAAfnB,eAAAA,EAAAA,OAAAA;EAA4BE,cAAAA,CAAAA,EAnBtJA,MAmBsJA,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA8BH,QAAAA,KAAAA;EAA+CD,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC3OR,WAAAA,CAAAA,MAAAA,EAjBW0B,aAiBX1B;EACG6B,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAF+NxB,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EA1BhMT,SAAAA,CAAAA,SAAAA,EAatBI,WAbsBJ,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAa2CD,wBAb3CC,CAAAA,EAasEoB,OAbtEpB,CAa8EO,UAb9EP,CAAAA;EAAa,iBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAAA;aAe3CK;;;mCAGoBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}
|
|
1
|
+
{"version":3,"file":"chat_models.d.cts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;UAUiBgB,QAAAA;;;EAAAA,MAAAA,EAGLD,cAHa,GAGIE,MAAjBF,CAAAA,MAAAA,EAAiBE,OAAM,CAAA;AAKnC;AAKA;;;AAAsDd,UALrCe,iCAAAA,SAA0ChB,wBAKLC,CAAAA,CAAmB;AAYzE;;;AAG4HK,UAf3GW,4BAAAA,SAAqChB,mBAesEK,CAAAA;EAARa;EAHzEpB,KAAAA,CAAAA,EAAAA,MAAAA;EAAa;EAKnCqB,SAAAA,CAAAA,EAbLjB,WAaKiB,EAAsB;EAAuBJ;EAEnDb,MAAAA,CAAAA,EAbFE,cAaEF,EAAAA;EACHE;EAIMgB,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAOC;EAAWC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZlCP,aAAAA,SAAsBnB,aAAAA,CAYY0B;EAA8BR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA+Bf,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAE9FO,SAAAA,CAAAA,QAAAA,EAXEN,WAWFM,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAXmEX,wBAWnEW,CAAAA,EAX8FU,OAW9FV,CAXsGH,UAWtGG,CAAAA;;AAAwCE,cATzCS,sBAAAA,SAA+BrB,aASUY,CATIK,iCASJL,CAAAA,CAAAA;;EAAwBN,SAAAA,EAPvEF,WAOuEE,EAAAA;EAAsEW,MAAAA,EANhJX,cAMgJW,EAAAA;EAAvGR,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC7BL,iBAAAA,CAAAA,EAAAA,MAAAA;EAAkEL,QAAAA,KAAAA;EAAmCQ,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHxCW,4BAGwCX,GAHTJ,aAGSI;EAARa,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAChFhB,SAAAA,CAAAA,KAAAA,EAAAA,CAFfM,cAEeN,GAFEW,QAEFX,CAAAA,EAAAA,CAAAA,EAFgBK,QAEhBL,CAFyBQ,sBAEzBR,EAFiDE,cAEjDF,kBAAAA,EAFuHa,iCAEvHb,CAAAA;EAAgEL,SAAAA,CAAAA,QAAAA,EAD7EK,WAC6EL,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADXA,wBACWA,CAAAA,EADgBqB,OAChBrB,CADwBQ,UACxBR,CAAAA;EAA0CS,qBAAAA,CAAAA,SAAAA,EAA1GJ,WAA0GI,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAA1CT,wBAA0CS,CAAAA,EAAfqB,cAAerB,CAAAA,mBAAAA,CAAAA;;;AAX9E;AAgBjE;AAaiBuB,UAbAD,aAAAA,SAAsB5B,mBAaeD,CAAAA;EAuBjC+B;EAAwCD,SAAAA,EAAAA,MAAAA,EAAAA;EAOxCf;EAGGc,KAAAA,CAAAA,EAAAA,MAAAA;EAGC1B,eAAAA,CAAAA,EAAAA,OAAAA;EAAiEL;;;;;EAKrDK,cAAAA,CAAAA,EA3ChBY,MA2CgBZ,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAA0GI,UAzC9HuB,4BAAAA,SAAqC9B,wBAyCyFO,CAAAA;EAAfqB,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOiHpB,cAzB5NuB,iBAAAA,SAA0BhC,aAyBkMS,CAzBpLsB,4BAyBoLtB,CAAAA,CAAAA;EACtMO,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAsBA,eAAAA,EAAAA,OAAAA;EAA2DiB,SAAAA,EAAAA,MAAAA,EAAAA;EAA7BtB,CAAAA,EAAAA,MAAAA;EAA+DsB,KAAAA,CAAAA,EAAAA,MAAAA;EAAfnB,eAAAA,EAAAA,OAAAA;EAA4BE,cAAAA,CAAAA,EAnBtJA,MAmBsJA,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA8BH,QAAAA,KAAAA;EAA+CD,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC3OR,WAAAA,CAAAA,MAAAA,EAjBW0B,aAiBX1B;EACG6B,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAF+NxB,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EA1BhMT,SAAAA,CAAAA,SAAAA,EAatBI,WAbsBJ,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAa2CD,wBAb3CC,CAAAA,EAasEoB,OAbtEpB,CAa8EO,UAb9EP,CAAAA;EAAa,iBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAAA;aAe3CK;;;mCAGoBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}
|
|
@@ -60,7 +60,7 @@ declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatMode
|
|
|
60
60
|
_llmType(): string;
|
|
61
61
|
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure>, FakeStreamingChatModelCallOptions>;
|
|
62
62
|
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
63
|
-
_streamResponseChunks(_messages: BaseMessage[],
|
|
63
|
+
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
64
64
|
}
|
|
65
65
|
/**
|
|
66
66
|
* Interface for the input parameters specific to the Fake List Chat model.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.d.ts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;;UAUiBgB,QAAAA;;;UAGLD,iBAAiBE;AAH7B;AAQA;AAKA;;AAMaV,UAXIW,iCAAAA,SAA0ChB,wBAW9CK,CAAAA;AAN4D;AAYzE;;AAGyFP,UAfxEmB,4BAAAA,SAAqChB,mBAemCH,CAAAA;EAAmCQ;EAARa,KAAAA,CAAAA,EAAAA,MAAAA;EAHzEpB;EAAa,SAAA,CAAA,EARxCI,WAQwC,EAAA;EAKnCiB;EAA6CJ,MAAAA,CAAAA,EAXrDX,cAWqDW,EAAAA;EAEnDb;EACHE,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAIMgB;EAAOC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZvBN,aAAAA,SAAsBnB,aAAAA,CAYCyB;EAAWC,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA8BR,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAA+Bf,SAAAA,CAAAA,QAAAA,EAT5FC,WAS4FD,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAT3BJ,wBAS2BI,CAAAA,EATAiB,OASAjB,CATQI,UASRJ,CAAAA;;AAE7EY,cATlBM,sBAAAA,SAA+BrB,aASbe,CAT2BE,iCAS3BF,CAAAA,CAAAA;EAAuBH,KAAAA,EAAAA,MAAAA;aAP/CR;EAOuEE,MAAAA,EAN1EA,cAM0EA,EAAAA;EAAsEW,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAvGR,iBAAAA,CAAAA,EAAAA,MAAAA;EAC7BL,QAAAA,KAAAA;EAAkEL,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHLmB,4BAGKnB,GAH0BI,aAG1BJ;EAAmCQ,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAARa,SAAAA,CAAAA,KAAAA,EAAAA,CAD/FV,cAC+FU,GAD9EL,QAC8EK,CAAAA,EAAAA,CAAAA,EADhEX,QACgEW,CADvDR,sBACuDQ,EAD/Bd,cAC+Bc,kBAAAA,EADuCH,iCACvCG,CAAAA;EAChFhB,SAAAA,CAAAA,QAAAA,EADbA,WACaA,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADqDL,wBACrDK,CAAAA,EADgFgB,OAChFhB,CADwFG,UACxFH,CAAAA;EAAiEL,qBAAAA,CAAAA,SAAAA,EAAjEK,WAAiEL,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAAAA,wBAAAA,CAAAA,EAA2B8B,cAA3B9B,CAA0CS,mBAA1CT,CAAAA;;;;AAXrC;AAgBhD+B,UAAAA,aAAAA,SAAsB5B,mBAAAA,CAAAA;EAatB6B;EAuBIC,SAAAA,EAAAA,MAAAA,EAAAA;EAAwCD;EAOxCf,KAAAA,CAAAA,EAAAA,MAAAA;EAGGc,eAAAA,CAAAA,EAAAA,OAAAA;EAGC1B;;;;;EAERC,cAAAA,CAAAA,EAxCIW,MAwCJX,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAGoFN,UAzCpFgC,4BAAAA,SAAqC9B,wBAyC+CF,CAAAA;EAA0CS,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOmIyB,cAzB7PD,iBAAAA,SAA0BhC,aAyBmOiC,CAzBrNF,4BAyBqNE,CAAAA,CAAAA;EAAjCxB,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EACtMO,eAAAA,EAAAA,OAAAA;EAAsBA,SAAAA,EAAAA,MAAAA,EAAAA;EAA2DiB,CAAAA,EAAAA,MAAAA;EAA7BtB,KAAAA,CAAAA,EAAAA,MAAAA;EAA+DsB,eAAAA,EAAAA,OAAAA;EAAfnB,cAAAA,CAAAA,EAnB1HE,MAmB0HF,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA4BE,QAAAA,KAAAA;EAA8BH,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAA+CD,WAAAA,CAAAA,MAAAA,EAhBhOkB,aAgBgOlB;EAC3OR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EACG6B,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAF+NxB,SAAAA,CAAAA,SAAAA,EAbtNL,WAasNK,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAbrJV,wBAaqJU,CAAAA,EAb1HW,OAa0HX,CAblHF,UAakHE,CAAAA;EA1BhMT,iBAAAA,CAAAA,IAAAA,EAAAA,MAAAA,CAAAA,EAAAA;IAAa,OAAA,EAe3CK,SAf2C,kBAAA;;;mCAkBvBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}
|
|
1
|
+
{"version":3,"file":"chat_models.d.ts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;;UAUiBgB,QAAAA;;;UAGLD,iBAAiBE;AAH7B;AAQA;AAKA;;AAMaV,UAXIW,iCAAAA,SAA0ChB,wBAW9CK,CAAAA;AAN4D;AAYzE;;AAGyFP,UAfxEmB,4BAAAA,SAAqChB,mBAemCH,CAAAA;EAAmCQ;EAARa,KAAAA,CAAAA,EAAAA,MAAAA;EAHzEpB;EAAa,SAAA,CAAA,EARxCI,WAQwC,EAAA;EAKnCiB;EAA6CJ,MAAAA,CAAAA,EAXrDX,cAWqDW,EAAAA;EAEnDb;EACHE,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAIMgB;EAAOC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZvBN,aAAAA,SAAsBnB,aAAAA,CAYCyB;EAAWC,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA8BR,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAA+Bf,SAAAA,CAAAA,QAAAA,EAT5FC,WAS4FD,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAT3BJ,wBAS2BI,CAAAA,EATAiB,OASAjB,CATQI,UASRJ,CAAAA;;AAE7EY,cATlBM,sBAAAA,SAA+BrB,aASbe,CAT2BE,iCAS3BF,CAAAA,CAAAA;EAAuBH,KAAAA,EAAAA,MAAAA;aAP/CR;EAOuEE,MAAAA,EAN1EA,cAM0EA,EAAAA;EAAsEW,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAvGR,iBAAAA,CAAAA,EAAAA,MAAAA;EAC7BL,QAAAA,KAAAA;EAAkEL,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHLmB,4BAGKnB,GAH0BI,aAG1BJ;EAAmCQ,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAARa,SAAAA,CAAAA,KAAAA,EAAAA,CAD/FV,cAC+FU,GAD9EL,QAC8EK,CAAAA,EAAAA,CAAAA,EADhEX,QACgEW,CADvDR,sBACuDQ,EAD/Bd,cAC+Bc,kBAAAA,EADuCH,iCACvCG,CAAAA;EAChFhB,SAAAA,CAAAA,QAAAA,EADbA,WACaA,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADqDL,wBACrDK,CAAAA,EADgFgB,OAChFhB,CADwFG,UACxFH,CAAAA;EAAgEL,qBAAAA,CAAAA,SAAAA,EAAhEK,WAAgEL,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAAAA,wBAAAA,CAAAA,EAA2B8B,cAA3B9B,CAA0CS,mBAA1CT,CAAAA;;;;AAXpC;AAgBhD+B,UAAAA,aAAAA,SAAsB5B,mBAAAA,CAAAA;EAatB6B;EAuBIC,SAAAA,EAAAA,MAAAA,EAAAA;EAAwCD;EAOxCf,KAAAA,CAAAA,EAAAA,MAAAA;EAGGc,eAAAA,CAAAA,EAAAA,OAAAA;EAGC1B;;;;;EAERC,cAAAA,CAAAA,EAxCIW,MAwCJX,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAGoFN,UAzCpFgC,4BAAAA,SAAqC9B,wBAyC+CF,CAAAA;EAA0CS,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOmIyB,cAzB7PD,iBAAAA,SAA0BhC,aAyBmOiC,CAzBrNF,4BAyBqNE,CAAAA,CAAAA;EAAjCxB,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EACtMO,eAAAA,EAAAA,OAAAA;EAAsBA,SAAAA,EAAAA,MAAAA,EAAAA;EAA2DiB,CAAAA,EAAAA,MAAAA;EAA7BtB,KAAAA,CAAAA,EAAAA,MAAAA;EAA+DsB,eAAAA,EAAAA,OAAAA;EAAfnB,cAAAA,CAAAA,EAnB1HE,MAmB0HF,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA4BE,QAAAA,KAAAA;EAA8BH,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAA+CD,WAAAA,CAAAA,MAAAA,EAhBhOkB,aAgBgOlB;EAC3OR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EACG6B,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAF+NxB,SAAAA,CAAAA,SAAAA,EAbtNL,WAasNK,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAbrJV,wBAaqJU,CAAAA,EAb1HW,OAa0HX,CAblHF,UAakHE,CAAAA;EA1BhMT,iBAAAA,CAAAA,IAAAA,EAAAA,MAAAA,CAAAA,EAAAA;IAAa,OAAA,EAe3CK,SAf2C,kBAAA;;;mCAkBvBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}
|
|
@@ -103,7 +103,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends BaseChatModel
|
|
|
103
103
|
}] };
|
|
104
104
|
return generation;
|
|
105
105
|
}
|
|
106
|
-
async *_streamResponseChunks(_messages,
|
|
106
|
+
async *_streamResponseChunks(_messages, options, runManager) {
|
|
107
107
|
if (this.thrownErrorString) throw new Error(this.thrownErrorString);
|
|
108
108
|
if (this.chunks?.length) {
|
|
109
109
|
for (const msgChunk of this.chunks) {
|
|
@@ -115,6 +115,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends BaseChatModel
|
|
|
115
115
|
}),
|
|
116
116
|
text: msgChunk.content?.toString() ?? ""
|
|
117
117
|
});
|
|
118
|
+
if (options.signal?.aborted) break;
|
|
118
119
|
yield cg;
|
|
119
120
|
await runManager?.handleLLMNewToken(msgChunk.content, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
120
121
|
}
|
|
@@ -128,6 +129,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends BaseChatModel
|
|
|
128
129
|
message: new AIMessageChunk({ content: ch }),
|
|
129
130
|
text: ch
|
|
130
131
|
});
|
|
132
|
+
if (options.signal?.aborted) break;
|
|
131
133
|
yield cg;
|
|
132
134
|
await runManager?.handleLLMNewToken(ch, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
133
135
|
}
|
|
@@ -210,6 +212,7 @@ var FakeListChatModel = class FakeListChatModel extends BaseChatModel {
|
|
|
210
212
|
await this._sleepIfRequested();
|
|
211
213
|
if (options?.thrownErrorString) throw new Error(options.thrownErrorString);
|
|
212
214
|
const chunk = this._createResponseChunk(text, isLastChunk ? this.generationInfo : void 0);
|
|
215
|
+
if (options.signal?.aborted) break;
|
|
213
216
|
yield chunk;
|
|
214
217
|
runManager?.handleLLMNewToken(text);
|
|
215
218
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.js","names":["messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","tools: (StructuredTool | ToolSpec)[]","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","params: FakeChatInput","text: string","options: this[\"ParsedCallOptions\"]","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmC,cAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJA,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAI,UAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAI,UAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+B,cAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUC,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJH,UACAI,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAI,UAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLC,WACAH,UACAF,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAI,oBAAoB;KACjC,SAAS,IAAI,eAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;IAED,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAI,UACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAI,oBAAoB;IACjC,SAAS,IAAI,eAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;GACD,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0B,cAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYM,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJD,WACAN,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBO,MAAc;AAC9B,SAAO;GACL,SAAS,IAAI,UAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLF,WACAG,SACAR,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;GACD,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEO,MAEAE,gBACqB;AACrB,SAAO,IAAI,oBAAoB;GAC7B,SAAS,IAAI,eAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUR,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIES,SAKAC,SAMI;AACJ,SAAO,eAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|
|
1
|
+
{"version":3,"file":"chat_models.js","names":["messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","tools: (StructuredTool | ToolSpec)[]","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","options: this[\"ParsedCallOptions\"]","params: FakeChatInput","text: string","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n if (options.signal?.aborted) break;\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmC,cAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJA,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAI,UAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAI,UAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+B,cAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUC,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJH,UACAI,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAI,UAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLC,WACAC,SACAN,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAI,oBAAoB;KACjC,SAAS,IAAI,eAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;AAED,QAAI,QAAQ,QAAQ,QAAS;IAC7B,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAI,UACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAI,oBAAoB;IACjC,SAAS,IAAI,eAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0B,cAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYO,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJF,WACAN,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBQ,MAAc;AAC9B,SAAO;GACL,SAAS,IAAI,UAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLH,WACAC,SACAN,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEQ,MAEAC,gBACqB;AACrB,SAAO,IAAI,oBAAoB;GAC7B,SAAS,IAAI,eAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUR,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIES,SAKAC,SAMI;AACJ,SAAO,eAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|