@langchain/core 1.1.5 → 1.2.0-dev-1765937705265
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/caches/index.d.cts +2 -2
- package/dist/caches/index.d.cts.map +1 -1
- package/dist/caches/index.d.ts +2 -2
- package/dist/caches/index.d.ts.map +1 -1
- package/dist/callbacks/base.d.cts +2 -2
- package/dist/callbacks/base.d.cts.map +1 -1
- package/dist/callbacks/base.d.ts +2 -2
- package/dist/callbacks/base.d.ts.map +1 -1
- package/dist/load/index.d.cts.map +1 -1
- package/dist/messages/ai.cjs +13 -9
- package/dist/messages/ai.cjs.map +1 -1
- package/dist/messages/ai.d.cts +25 -5
- package/dist/messages/ai.d.cts.map +1 -1
- package/dist/messages/ai.d.ts +25 -5
- package/dist/messages/ai.d.ts.map +1 -1
- package/dist/messages/ai.js +13 -9
- package/dist/messages/ai.js.map +1 -1
- package/dist/messages/base.cjs +2 -2
- package/dist/messages/base.cjs.map +1 -1
- package/dist/messages/base.d.cts +1 -1
- package/dist/messages/base.d.cts.map +1 -1
- package/dist/messages/base.d.ts +1 -1
- package/dist/messages/base.d.ts.map +1 -1
- package/dist/messages/base.js +2 -2
- package/dist/messages/base.js.map +1 -1
- package/dist/messages/index.d.cts +2 -2
- package/dist/messages/index.d.ts +2 -2
- package/dist/messages/message.cjs.map +1 -1
- package/dist/messages/message.d.cts +62 -6
- package/dist/messages/message.d.cts.map +1 -1
- package/dist/messages/message.d.ts +62 -6
- package/dist/messages/message.d.ts.map +1 -1
- package/dist/messages/message.js.map +1 -1
- package/dist/messages/tool.cjs.map +1 -1
- package/dist/messages/tool.d.cts +10 -0
- package/dist/messages/tool.d.cts.map +1 -1
- package/dist/messages/tool.d.ts +10 -0
- package/dist/messages/tool.d.ts.map +1 -1
- package/dist/messages/tool.js.map +1 -1
- package/dist/messages/utils.d.cts +3 -3
- package/dist/messages/utils.d.cts.map +1 -1
- package/dist/messages/utils.d.ts +3 -3
- package/dist/messages/utils.d.ts.map +1 -1
- package/dist/prompt_values.d.cts +4 -4
- package/dist/prompt_values.d.cts.map +1 -1
- package/dist/prompt_values.d.ts +4 -4
- package/dist/prompt_values.d.ts.map +1 -1
- package/dist/prompts/chat.d.cts +2 -2
- package/dist/prompts/chat.d.cts.map +1 -1
- package/dist/prompts/chat.d.ts +2 -2
- package/dist/prompts/chat.d.ts.map +1 -1
- package/dist/stores.d.cts.map +1 -1
- package/dist/stores.d.ts.map +1 -1
- package/dist/tools/index.cjs +2 -1
- package/dist/tools/index.cjs.map +1 -1
- package/dist/tools/index.d.cts +17 -12
- package/dist/tools/index.d.cts.map +1 -1
- package/dist/tools/index.d.ts +17 -12
- package/dist/tools/index.d.ts.map +1 -1
- package/dist/tools/index.js +2 -1
- package/dist/tools/index.js.map +1 -1
- package/dist/tracers/event_stream.cjs +5 -1
- package/dist/tracers/event_stream.cjs.map +1 -1
- package/dist/tracers/event_stream.d.cts.map +1 -1
- package/dist/tracers/event_stream.d.ts.map +1 -1
- package/dist/tracers/event_stream.js +5 -1
- package/dist/tracers/event_stream.js.map +1 -1
- package/dist/utils/event_source_parse.d.cts.map +1 -1
- package/dist/utils/stream.d.cts.map +1 -1
- package/dist/utils/testing/chat_models.cjs +4 -1
- package/dist/utils/testing/chat_models.cjs.map +1 -1
- package/dist/utils/testing/chat_models.d.cts +5 -5
- package/dist/utils/testing/chat_models.d.cts.map +1 -1
- package/dist/utils/testing/chat_models.d.ts +5 -5
- package/dist/utils/testing/chat_models.d.ts.map +1 -1
- package/dist/utils/testing/chat_models.js +4 -1
- package/dist/utils/testing/chat_models.js.map +1 -1
- package/dist/vectorstores.d.cts.map +1 -1
- package/package.json +2 -2
- package/agents.cjs +0 -1
- package/agents.d.cts +0 -1
- package/agents.d.ts +0 -1
- package/agents.js +0 -1
- package/caches.cjs +0 -1
- package/caches.d.cts +0 -1
- package/caches.d.ts +0 -1
- package/caches.js +0 -1
- package/callbacks/base.cjs +0 -1
- package/callbacks/base.d.cts +0 -1
- package/callbacks/base.d.ts +0 -1
- package/callbacks/base.js +0 -1
- package/callbacks/dispatch/web.cjs +0 -1
- package/callbacks/dispatch/web.d.cts +0 -1
- package/callbacks/dispatch/web.d.ts +0 -1
- package/callbacks/dispatch/web.js +0 -1
- package/callbacks/dispatch.cjs +0 -1
- package/callbacks/dispatch.d.cts +0 -1
- package/callbacks/dispatch.d.ts +0 -1
- package/callbacks/dispatch.js +0 -1
- package/callbacks/manager.cjs +0 -1
- package/callbacks/manager.d.cts +0 -1
- package/callbacks/manager.d.ts +0 -1
- package/callbacks/manager.js +0 -1
- package/callbacks/promises.cjs +0 -1
- package/callbacks/promises.d.cts +0 -1
- package/callbacks/promises.d.ts +0 -1
- package/callbacks/promises.js +0 -1
- package/chat_history.cjs +0 -1
- package/chat_history.d.cts +0 -1
- package/chat_history.d.ts +0 -1
- package/chat_history.js +0 -1
- package/context.cjs +0 -1
- package/context.d.cts +0 -1
- package/context.d.ts +0 -1
- package/context.js +0 -1
- package/document_loaders/base.cjs +0 -1
- package/document_loaders/base.d.cts +0 -1
- package/document_loaders/base.d.ts +0 -1
- package/document_loaders/base.js +0 -1
- package/document_loaders/langsmith.cjs +0 -1
- package/document_loaders/langsmith.d.cts +0 -1
- package/document_loaders/langsmith.d.ts +0 -1
- package/document_loaders/langsmith.js +0 -1
- package/documents.cjs +0 -1
- package/documents.d.cts +0 -1
- package/documents.d.ts +0 -1
- package/documents.js +0 -1
- package/embeddings.cjs +0 -1
- package/embeddings.d.cts +0 -1
- package/embeddings.d.ts +0 -1
- package/embeddings.js +0 -1
- package/example_selectors.cjs +0 -1
- package/example_selectors.d.cts +0 -1
- package/example_selectors.d.ts +0 -1
- package/example_selectors.js +0 -1
- package/indexing.cjs +0 -1
- package/indexing.d.cts +0 -1
- package/indexing.d.ts +0 -1
- package/indexing.js +0 -1
- package/language_models/base.cjs +0 -1
- package/language_models/base.d.cts +0 -1
- package/language_models/base.d.ts +0 -1
- package/language_models/base.js +0 -1
- package/language_models/chat_models.cjs +0 -1
- package/language_models/chat_models.d.cts +0 -1
- package/language_models/chat_models.d.ts +0 -1
- package/language_models/chat_models.js +0 -1
- package/language_models/llms.cjs +0 -1
- package/language_models/llms.d.cts +0 -1
- package/language_models/llms.d.ts +0 -1
- package/language_models/llms.js +0 -1
- package/language_models/profile.cjs +0 -1
- package/language_models/profile.d.cts +0 -1
- package/language_models/profile.d.ts +0 -1
- package/language_models/profile.js +0 -1
- package/load/serializable.cjs +0 -1
- package/load/serializable.d.cts +0 -1
- package/load/serializable.d.ts +0 -1
- package/load/serializable.js +0 -1
- package/load.cjs +0 -1
- package/load.d.cts +0 -1
- package/load.d.ts +0 -1
- package/load.js +0 -1
- package/memory.cjs +0 -1
- package/memory.d.cts +0 -1
- package/memory.d.ts +0 -1
- package/memory.js +0 -1
- package/messages/tool.cjs +0 -1
- package/messages/tool.d.cts +0 -1
- package/messages/tool.d.ts +0 -1
- package/messages/tool.js +0 -1
- package/messages.cjs +0 -1
- package/messages.d.cts +0 -1
- package/messages.d.ts +0 -1
- package/messages.js +0 -1
- package/output_parsers/openai_functions.cjs +0 -1
- package/output_parsers/openai_functions.d.cts +0 -1
- package/output_parsers/openai_functions.d.ts +0 -1
- package/output_parsers/openai_functions.js +0 -1
- package/output_parsers/openai_tools.cjs +0 -1
- package/output_parsers/openai_tools.d.cts +0 -1
- package/output_parsers/openai_tools.d.ts +0 -1
- package/output_parsers/openai_tools.js +0 -1
- package/output_parsers.cjs +0 -1
- package/output_parsers.d.cts +0 -1
- package/output_parsers.d.ts +0 -1
- package/output_parsers.js +0 -1
- package/outputs.cjs +0 -1
- package/outputs.d.cts +0 -1
- package/outputs.d.ts +0 -1
- package/outputs.js +0 -1
- package/prompt_values.cjs +0 -1
- package/prompt_values.d.cts +0 -1
- package/prompt_values.d.ts +0 -1
- package/prompt_values.js +0 -1
- package/prompts.cjs +0 -1
- package/prompts.d.cts +0 -1
- package/prompts.d.ts +0 -1
- package/prompts.js +0 -1
- package/retrievers/document_compressors.cjs +0 -1
- package/retrievers/document_compressors.d.cts +0 -1
- package/retrievers/document_compressors.d.ts +0 -1
- package/retrievers/document_compressors.js +0 -1
- package/retrievers.cjs +0 -1
- package/retrievers.d.cts +0 -1
- package/retrievers.d.ts +0 -1
- package/retrievers.js +0 -1
- package/runnables/graph.cjs +0 -1
- package/runnables/graph.d.cts +0 -1
- package/runnables/graph.d.ts +0 -1
- package/runnables/graph.js +0 -1
- package/runnables.cjs +0 -1
- package/runnables.d.cts +0 -1
- package/runnables.d.ts +0 -1
- package/runnables.js +0 -1
- package/singletons.cjs +0 -1
- package/singletons.d.cts +0 -1
- package/singletons.d.ts +0 -1
- package/singletons.js +0 -1
- package/stores.cjs +0 -1
- package/stores.d.cts +0 -1
- package/stores.d.ts +0 -1
- package/stores.js +0 -1
- package/structured_query.cjs +0 -1
- package/structured_query.d.cts +0 -1
- package/structured_query.d.ts +0 -1
- package/structured_query.js +0 -1
- package/tools.cjs +0 -1
- package/tools.d.cts +0 -1
- package/tools.d.ts +0 -1
- package/tools.js +0 -1
- package/tracers/base.cjs +0 -1
- package/tracers/base.d.cts +0 -1
- package/tracers/base.d.ts +0 -1
- package/tracers/base.js +0 -1
- package/tracers/console.cjs +0 -1
- package/tracers/console.d.cts +0 -1
- package/tracers/console.d.ts +0 -1
- package/tracers/console.js +0 -1
- package/tracers/log_stream.cjs +0 -1
- package/tracers/log_stream.d.cts +0 -1
- package/tracers/log_stream.d.ts +0 -1
- package/tracers/log_stream.js +0 -1
- package/tracers/run_collector.cjs +0 -1
- package/tracers/run_collector.d.cts +0 -1
- package/tracers/run_collector.d.ts +0 -1
- package/tracers/run_collector.js +0 -1
- package/tracers/tracer_langchain.cjs +0 -1
- package/tracers/tracer_langchain.d.cts +0 -1
- package/tracers/tracer_langchain.d.ts +0 -1
- package/tracers/tracer_langchain.js +0 -1
- package/types/stream.cjs +0 -1
- package/types/stream.d.cts +0 -1
- package/types/stream.d.ts +0 -1
- package/types/stream.js +0 -1
- package/utils/async_caller.cjs +0 -1
- package/utils/async_caller.d.cts +0 -1
- package/utils/async_caller.d.ts +0 -1
- package/utils/async_caller.js +0 -1
- package/utils/chunk_array.cjs +0 -1
- package/utils/chunk_array.d.cts +0 -1
- package/utils/chunk_array.d.ts +0 -1
- package/utils/chunk_array.js +0 -1
- package/utils/env.cjs +0 -1
- package/utils/env.d.cts +0 -1
- package/utils/env.d.ts +0 -1
- package/utils/env.js +0 -1
- package/utils/event_source_parse.cjs +0 -1
- package/utils/event_source_parse.d.cts +0 -1
- package/utils/event_source_parse.d.ts +0 -1
- package/utils/event_source_parse.js +0 -1
- package/utils/format.cjs +0 -1
- package/utils/format.d.cts +0 -1
- package/utils/format.d.ts +0 -1
- package/utils/format.js +0 -1
- package/utils/function_calling.cjs +0 -1
- package/utils/function_calling.d.cts +0 -1
- package/utils/function_calling.d.ts +0 -1
- package/utils/function_calling.js +0 -1
- package/utils/hash.cjs +0 -1
- package/utils/hash.d.cts +0 -1
- package/utils/hash.d.ts +0 -1
- package/utils/hash.js +0 -1
- package/utils/json_patch.cjs +0 -1
- package/utils/json_patch.d.cts +0 -1
- package/utils/json_patch.d.ts +0 -1
- package/utils/json_patch.js +0 -1
- package/utils/json_schema.cjs +0 -1
- package/utils/json_schema.d.cts +0 -1
- package/utils/json_schema.d.ts +0 -1
- package/utils/json_schema.js +0 -1
- package/utils/math.cjs +0 -1
- package/utils/math.d.cts +0 -1
- package/utils/math.d.ts +0 -1
- package/utils/math.js +0 -1
- package/utils/stream.cjs +0 -1
- package/utils/stream.d.cts +0 -1
- package/utils/stream.d.ts +0 -1
- package/utils/stream.js +0 -1
- package/utils/testing.cjs +0 -1
- package/utils/testing.d.cts +0 -1
- package/utils/testing.d.ts +0 -1
- package/utils/testing.js +0 -1
- package/utils/tiktoken.cjs +0 -1
- package/utils/tiktoken.d.cts +0 -1
- package/utils/tiktoken.d.ts +0 -1
- package/utils/tiktoken.js +0 -1
- package/utils/types.cjs +0 -1
- package/utils/types.d.cts +0 -1
- package/utils/types.d.ts +0 -1
- package/utils/types.js +0 -1
- package/vectorstores.cjs +0 -1
- package/vectorstores.d.cts +0 -1
- package/vectorstores.d.ts +0 -1
- package/vectorstores.js +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"event_source_parse.d.cts","names":["IterableReadableStream","EventStreamContentType","EventSourceMessage","getBytes","Uint8Array","ReadableStream","AsyncIterable","Promise","getLines","ArrayBufferLike","getMessages","convertEventStreamToIterableReadableDataStream"],"sources":["../../src/utils/event_source_parse.d.ts"],"sourcesContent":["import { IterableReadableStream } from \"./stream.js\";\nexport declare const EventStreamContentType = \"text/event-stream\";\n/**\n * Represents a message sent in an event stream\n * https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format\n */\nexport interface EventSourceMessage {\n /** The event ID to set the EventSource object's last event ID value. */\n id: string;\n /** A string identifying the type of event described. */\n event: string;\n /** The event data */\n data: string;\n /** The reconnection interval (in milliseconds) to wait before retrying the connection */\n retry?: number;\n}\n/**\n * Converts a ReadableStream into a callback pattern.\n * @param stream The input ReadableStream.\n * @param onChunk A function that will be called on each new byte chunk in the stream.\n * @returns {Promise<void>} A promise that will be resolved when the stream closes.\n */\nexport declare function getBytes(stream: ReadableStream<Uint8Array> | AsyncIterable<any>, onChunk: (arr: Uint8Array, flush?: boolean) => void): Promise<void>;\n/**\n * Parses arbitary byte chunks into EventSource line buffers.\n * Each line should be of the format \"field: value\" and ends with \\r, \\n, or \\r\\n.\n * @param onLine A function that will be called on each new EventSource line.\n * @returns A function that should be called for each incoming byte chunk.\n */\nexport declare function getLines(onLine: (line: Uint8Array, fieldLength: number, flush?: boolean) => void): (arr: Uint8Array<ArrayBufferLike>, flush?: boolean | undefined) => void;\n/**\n * Parses line buffers into EventSourceMessages.\n * @param onId A function that will be called on each `id` field.\n * @param onRetry A function that will be called on each `retry` field.\n * @param onMessage A function that will be called on each message.\n * @returns A function that should be called for each incoming line buffer.\n */\nexport declare function getMessages(onMessage?: (msg: EventSourceMessage) => void, onId?: (id: string) => void, onRetry?: (retry: number) => void): (line: Uint8Array<ArrayBufferLike>, fieldLength: number, flush?: boolean | undefined) => void;\nexport declare function convertEventStreamToIterableReadableDataStream(stream: ReadableStream, onMetadataEvent?: (e: unknown) => unknown): IterableReadableStream<any>;\n//# sourceMappingURL=event_source_parse.d.ts.map"],"mappings":";;;cACqBC,sBAAAA;;AAArB;AAKA;AAgBA;AAAwDG,UAhBvCF,kBAAAA,CAgBuCE;EAAfC;EAA6BC,EAAAA,EAAAA,MAAAA;EAAmCF;EAAuCG,KAAAA,EAAAA,MAAAA;EAAO;EAO/HC,IAAAA,EAAAA,MAAQ;EAAgBJ;EAA6EK,KAAAA,CAAAA,EAAAA,MAAAA;;AAAD;AAQ5H;;;;AAAqK;AAC7IE,iBAhBAR,QAAAA,CAgBAQ,MAAAA,EAhBiBN,cAgB6B,CAhBdD,UAgBuBC,CAAAA,GAhBTC,aAgBqEN,CAAAA,GAAAA,CAAAA,EAAAA,OAAAA,EAAAA,CAAAA,
|
|
1
|
+
{"version":3,"file":"event_source_parse.d.cts","names":["IterableReadableStream","EventStreamContentType","EventSourceMessage","getBytes","Uint8Array","ReadableStream","AsyncIterable","Promise","getLines","ArrayBufferLike","getMessages","convertEventStreamToIterableReadableDataStream"],"sources":["../../src/utils/event_source_parse.d.ts"],"sourcesContent":["import { IterableReadableStream } from \"./stream.js\";\nexport declare const EventStreamContentType = \"text/event-stream\";\n/**\n * Represents a message sent in an event stream\n * https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format\n */\nexport interface EventSourceMessage {\n /** The event ID to set the EventSource object's last event ID value. */\n id: string;\n /** A string identifying the type of event described. */\n event: string;\n /** The event data */\n data: string;\n /** The reconnection interval (in milliseconds) to wait before retrying the connection */\n retry?: number;\n}\n/**\n * Converts a ReadableStream into a callback pattern.\n * @param stream The input ReadableStream.\n * @param onChunk A function that will be called on each new byte chunk in the stream.\n * @returns {Promise<void>} A promise that will be resolved when the stream closes.\n */\nexport declare function getBytes(stream: ReadableStream<Uint8Array> | AsyncIterable<any>, onChunk: (arr: Uint8Array, flush?: boolean) => void): Promise<void>;\n/**\n * Parses arbitary byte chunks into EventSource line buffers.\n * Each line should be of the format \"field: value\" and ends with \\r, \\n, or \\r\\n.\n * @param onLine A function that will be called on each new EventSource line.\n * @returns A function that should be called for each incoming byte chunk.\n */\nexport declare function getLines(onLine: (line: Uint8Array, fieldLength: number, flush?: boolean) => void): (arr: Uint8Array<ArrayBufferLike>, flush?: boolean | undefined) => void;\n/**\n * Parses line buffers into EventSourceMessages.\n * @param onId A function that will be called on each `id` field.\n * @param onRetry A function that will be called on each `retry` field.\n * @param onMessage A function that will be called on each message.\n * @returns A function that should be called for each incoming line buffer.\n */\nexport declare function getMessages(onMessage?: (msg: EventSourceMessage) => void, onId?: (id: string) => void, onRetry?: (retry: number) => void): (line: Uint8Array<ArrayBufferLike>, fieldLength: number, flush?: boolean | undefined) => void;\nexport declare function convertEventStreamToIterableReadableDataStream(stream: ReadableStream, onMetadataEvent?: (e: unknown) => unknown): IterableReadableStream<any>;\n//# sourceMappingURL=event_source_parse.d.ts.map"],"mappings":";;;cACqBC,sBAAAA;;AAArB;AAKA;AAgBA;AAAwDG,UAhBvCF,kBAAAA,CAgBuCE;EAAfC;EAA6BC,EAAAA,EAAAA,MAAAA;EAAmCF;EAAuCG,KAAAA,EAAAA,MAAAA;EAAO;EAO/HC,IAAAA,EAAAA,MAAQ;EAAgBJ;EAA6EK,KAAAA,CAAAA,EAAAA,MAAAA;;AAAD;AAQ5H;;;;AAAqK;AAC7IE,iBAhBAR,QAAAA,CAgBAQ,MAAAA,EAhBiBN,cAgB6B,CAhBdD,UAgBuBC,CAAAA,GAhBTC,aAgBqEN,CAAAA,GAAAA,CAAAA,EAAAA,OAAAA,EAAAA,CAAAA,GAAsB,EAhBxDI,UAgBwD,EAAA,KAAA,CAAA,EAAA,OAAA,EAAA,GAAA,IAAA,CAAA,EAhBjBG,OAgBiB,CAAA,IAAA,CAAA;;;;;;;iBATzIC,QAAAA,gBAAwBJ,kEAAkEA,WAAWK;;;;;;;;iBAQrGC,WAAAA,mBAA8BR,qGAAqGE,WAAWK;iBAC9IE,8CAAAA,SAAuDN,4DAA4DL"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stream.d.cts","names":["IterableReadableStreamInterface","IterableReadableStream","T","ReadableStreamDefaultReader","IteratorResult","Promise","Symbol","asyncIterator","asyncDispose","ReadableStream","AsyncGenerator","atee","concat","Array","Record","AsyncGeneratorWithSetup","S","TReturn","TNext","AbortSignal","PromiseLike","Error","pipeGeneratorWithSetup","A","U","UReturn","UNext","Awaited"],"sources":["../../src/utils/stream.d.ts"],"sourcesContent":["import type { IterableReadableStreamInterface } from \"../types/_internal.js\";\nexport type { IterableReadableStreamInterface };\nexport declare class IterableReadableStream<T> extends ReadableStream<T> implements IterableReadableStreamInterface<T> {\n reader: ReadableStreamDefaultReader<T>;\n ensureReader(): void;\n next(): Promise<IteratorResult<T>>;\n return(): Promise<IteratorResult<T>>;\n throw(e: any): Promise<IteratorResult<T>>;\n [Symbol.asyncIterator](): this;\n [Symbol.asyncDispose](): Promise<void>;\n static fromReadableStream<T>(stream: ReadableStream<T>): IterableReadableStream<T>;\n static fromAsyncGenerator<T>(generator: AsyncGenerator<T>): IterableReadableStream<T>;\n}\nexport declare function atee<T>(iter: AsyncGenerator<T>, length?: number): AsyncGenerator<T>[];\nexport declare function concat<T extends Array<any> | string | number | Record<string, any> | any>(first: T, second: T): T;\nexport declare class AsyncGeneratorWithSetup<S = unknown, T = unknown, TReturn = unknown, TNext = unknown> implements AsyncGenerator<T, TReturn, TNext> {\n private generator;\n setup: Promise<S>;\n config?: unknown;\n signal?: AbortSignal;\n private firstResult;\n private firstResultUsed;\n constructor(params: {\n generator: AsyncGenerator<T>;\n startSetup?: () => Promise<S>;\n config?: unknown;\n signal?: AbortSignal;\n });\n next(...args: [] | [TNext]): Promise<IteratorResult<T>>;\n return(value?: TReturn | PromiseLike<TReturn>): Promise<IteratorResult<T>>;\n throw(e: Error): Promise<IteratorResult<T>>;\n [Symbol.asyncIterator](): this;\n [Symbol.asyncDispose](): Promise<void>;\n}\nexport declare function pipeGeneratorWithSetup<S, A extends unknown[], T, TReturn, TNext, U, UReturn, UNext>(to: (g: AsyncGenerator<T, TReturn, TNext>, s: S, ...args: A) => AsyncGenerator<U, UReturn, UNext>, generator: AsyncGenerator<T, TReturn, TNext>, startSetup: () => Promise<S>, signal: AbortSignal | undefined, ...args: A): Promise<{\n output: AsyncGenerator<U, UReturn, UNext>;\n setup: Awaited<S>;\n}>;\n//# sourceMappingURL=stream.d.ts.map"],"mappings":";;;cAEqBC,kCAAkCQ,eAAeP,cAAcF,gCAAgCE;EAA/FD,MAAAA,EACTE,2BAD+BD,CACHA,
|
|
1
|
+
{"version":3,"file":"stream.d.cts","names":["IterableReadableStreamInterface","IterableReadableStream","T","ReadableStreamDefaultReader","IteratorResult","Promise","Symbol","asyncIterator","asyncDispose","ReadableStream","AsyncGenerator","atee","concat","Array","Record","AsyncGeneratorWithSetup","S","TReturn","TNext","AbortSignal","PromiseLike","Error","pipeGeneratorWithSetup","A","U","UReturn","UNext","Awaited"],"sources":["../../src/utils/stream.d.ts"],"sourcesContent":["import type { IterableReadableStreamInterface } from \"../types/_internal.js\";\nexport type { IterableReadableStreamInterface };\nexport declare class IterableReadableStream<T> extends ReadableStream<T> implements IterableReadableStreamInterface<T> {\n reader: ReadableStreamDefaultReader<T>;\n ensureReader(): void;\n next(): Promise<IteratorResult<T>>;\n return(): Promise<IteratorResult<T>>;\n throw(e: any): Promise<IteratorResult<T>>;\n [Symbol.asyncIterator](): this;\n [Symbol.asyncDispose](): Promise<void>;\n static fromReadableStream<T>(stream: ReadableStream<T>): IterableReadableStream<T>;\n static fromAsyncGenerator<T>(generator: AsyncGenerator<T>): IterableReadableStream<T>;\n}\nexport declare function atee<T>(iter: AsyncGenerator<T>, length?: number): AsyncGenerator<T>[];\nexport declare function concat<T extends Array<any> | string | number | Record<string, any> | any>(first: T, second: T): T;\nexport declare class AsyncGeneratorWithSetup<S = unknown, T = unknown, TReturn = unknown, TNext = unknown> implements AsyncGenerator<T, TReturn, TNext> {\n private generator;\n setup: Promise<S>;\n config?: unknown;\n signal?: AbortSignal;\n private firstResult;\n private firstResultUsed;\n constructor(params: {\n generator: AsyncGenerator<T>;\n startSetup?: () => Promise<S>;\n config?: unknown;\n signal?: AbortSignal;\n });\n next(...args: [] | [TNext]): Promise<IteratorResult<T>>;\n return(value?: TReturn | PromiseLike<TReturn>): Promise<IteratorResult<T>>;\n throw(e: Error): Promise<IteratorResult<T>>;\n [Symbol.asyncIterator](): this;\n [Symbol.asyncDispose](): Promise<void>;\n}\nexport declare function pipeGeneratorWithSetup<S, A extends unknown[], T, TReturn, TNext, U, UReturn, UNext>(to: (g: AsyncGenerator<T, TReturn, TNext>, s: S, ...args: A) => AsyncGenerator<U, UReturn, UNext>, generator: AsyncGenerator<T, TReturn, TNext>, startSetup: () => Promise<S>, signal: AbortSignal | undefined, ...args: A): Promise<{\n output: AsyncGenerator<U, UReturn, UNext>;\n setup: Awaited<S>;\n}>;\n//# sourceMappingURL=stream.d.ts.map"],"mappings":";;;cAEqBC,kCAAkCQ,eAAeP,cAAcF,gCAAgCE;EAA/FD,MAAAA,EACTE,2BAD+BD,CACHA,CADG,CAAA;EAA2BA,YAAAA,CAAAA,CAAAA,EAAAA,IAAAA;EAA8CA,IAAAA,CAAAA,CAAAA,EAGxGG,OAHwGH,CAGhGE,cAHgGF,CAGjFA,CAHiFA,CAAAA,CAAAA;EAC5EA,MAAAA,CAAAA,CAAAA,EAG1BG,OAH0BH,CAGlBE,cAHkBF,CAGHA,CAHGA,CAAAA,CAAAA;EAA5BC,KAAAA,CAAAA,CAAAA,EAAAA,GAAAA,CAAAA,EAIOE,OAJPF,CAIeC,cAJfD,CAI8BD,CAJ9BC,CAAAA,CAAAA;EAEuBD,CAG9BI,MAAAA,CAAOC,aAAAA,GAHuBL,EAAAA,IAAAA;EAAfE,CAIfE,MAAAA,CAAOE,YAAAA,GAJQJ,EAISC,OAJTD,CAAAA,IAAAA,CAAAA;EAARC,OAAAA,kBAAAA,CAAAA,CAAAA,CAAAA,CAAAA,MAAAA,EAK6BI,cAL7BJ,CAK4CH,CAL5CG,CAAAA,CAAAA,EAKiDJ,sBALjDI,CAKwEH,CALxEG,CAAAA;EACyBH,OAAAA,kBAAAA,CAAAA,CAAAA,CAAAA,CAAAA,SAAAA,EAKOQ,cALPR,CAKsBA,CALtBA,CAAAA,CAAAA,EAK2BD,sBAL3BC,CAKkDA,CALlDA,CAAAA;;AAAvBG,iBAOUM,IAPVN,CAAAA,CAAAA,CAAAA,CAAAA,IAAAA,EAOwBK,cAPxBL,CAOuCH,CAPvCG,CAAAA,EAAAA,MAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAO6DK,cAP7DL,CAO4EH,CAP5EG,CAAAA,EAAAA;AAC4BH,iBAOlBU,MAPkBV,CAAAA,UAODW,KAPCX,CAAAA,GAAAA,CAAAA,GAAAA,MAAAA,GAAAA,MAAAA,GAO8BY,MAP9BZ,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAAAA,GAAAA,CAAAA,CAAAA,KAAAA,EAOgEA,CAPhEA,EAAAA,MAAAA,EAO2EA,CAP3EA,CAAAA,EAO+EA,CAP/EA;AAAfE,cAQNW,uBARMX,CAAAA,IAAAA,OAAAA,EAAAA,IAAAA,OAAAA,EAAAA,UAAAA,OAAAA,EAAAA,QAAAA,OAAAA,CAAAA,YAQ2FM,cAR3FN,CAQ0GF,CAR1GE,EAQ6Ga,OAR7Gb,EAQsHc,KARtHd,CAAAA,CAAAA;EAARC,QAAAA,SAAAA;EACdC,KAAOC,EASDF,OATCE,CASOS,CATPT,CAAAA;EACiBF,MAAAA,CAAAA,EAAAA,OAAAA;EAAxBC,MAAOE,CAAAA,EAUCW,WAVDX;EAC4CN,QAAAA,WAAAA;EAAfO,QAAAA,eAAAA;EAA2CP,WAAAA,CAAAA,MAAAA,EAAAA;IAAvBD,SAAAA,EAa1CS,cAb0CT,CAa3BC,CAb2BD,CAAAA;IACFC,UAAAA,CAAAA,EAAAA,GAAAA,GAahCG,OAbgCH,CAaxBc,CAbwBd,CAAAA;IAAfQ,MAAAA,CAAAA,EAAAA,OAAAA;IAA2CR,MAAAA,CAAAA,EAetEiB,WAfsEjB;EAAvBD,CAAAA;EATTQ,IAAAA,CAAAA,GAAAA,IAAAA,EAAAA,EAAAA,GAAAA,CA0B/BS,KA1B+BT,CAAAA,CAAAA,EA0BtBJ,OA1BsBI,CA0BdL,cA1BcK,CA0BCP,CA1BDO,CAAAA,CAAAA;EAA6BT,MAAAA,CAAAA,KAAAA,CAAAA,EA2BjEiB,OA3BiEjB,GA2BvDoB,WA3BuDpB,CA2B3CiB,OA3B2CjB,CAAAA,CAAAA,EA2BhCK,OA3BgCL,CA2BxBI,cA3BwBJ,CA2BTE,CA3BSF,CAAAA,CAAAA;EAA+B,KAAA,CAAA,CAAA,EA4BtGqB,KA5BsG,CAAA,EA4B9FhB,OA5B8F,CA4BtFD,cA5BsF,CA4BvEF,CA5BuE,CAAA,CAAA;EAW3FS,CAkBnBL,MAAAA,CAAOC,aAAAA,GAlBgB,EAAA,IAAA;EAAyBL,CAmBhDI,MAAAA,CAAOE,YAAAA,GAnByCN,EAmBxBG,OAnBwBH,CAAAA,IAAAA,CAAAA;;AAAqCA,iBAqBlEoB,sBArBkEpB,CAAAA,CAAAA,EAAAA,UAAAA,OAAAA,EAAAA,EAAAA,CAAAA,EAAAA,OAAAA,EAAAA,KAAAA,EAAAA,CAAAA,EAAAA,OAAAA,EAAAA,KAAAA,CAAAA,CAAAA,EAAAA,EAAAA,CAAAA,CAAAA,EAqB2BQ,cArB3BR,CAqB0CA,CArB1CA,EAqB6Ce,OArB7Cf,EAqBsDgB,KArBtDhB,CAAAA,EAAAA,CAAAA,EAqBiEc,CArBjEd,EAAAA,GAAAA,IAAAA,EAqB6EqB,CArB7ErB,EAAAA,GAqBmFQ,cArBnFR,CAqBkGsB,CArBlGtB,EAqBqGuB,OArBrGvB,EAqB8GwB,KArB9GxB,CAAAA,EAAAA,SAAAA,EAqBiIQ,cArBjIR,CAqBgJA,CArBhJA,EAqBmJe,OArBnJf,EAqB4JgB,KArB5JhB,CAAAA,EAAAA,UAAAA,EAAAA,GAAAA,GAqBsLG,OArBtLH,CAqB8Lc,CArB9Ld,CAAAA,EAAAA,MAAAA,EAqB0MiB,WArB1MjB,GAAAA,SAAAA,EAAAA,GAAAA,IAAAA,EAqB4OqB,CArB5OrB,CAAAA,EAqBgPG,OArBhPH,CAAAA;EAAfQ,MAAAA,EAsB/DA,cAtB+DA,CAsBhDc,CAtBgDd,EAsB7Ce,OAtB6Cf,EAsBpCgB,KAtBoChB,CAAAA;EAAc,KAAA,EAuB9EiB,OAvB8E,CAuBtEX,CAvBsE,CAAA;AACzF,CAAA,CAAA"}
|
|
@@ -103,7 +103,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends require_langua
|
|
|
103
103
|
}] };
|
|
104
104
|
return generation;
|
|
105
105
|
}
|
|
106
|
-
async *_streamResponseChunks(_messages,
|
|
106
|
+
async *_streamResponseChunks(_messages, options, runManager) {
|
|
107
107
|
if (this.thrownErrorString) throw new Error(this.thrownErrorString);
|
|
108
108
|
if (this.chunks?.length) {
|
|
109
109
|
for (const msgChunk of this.chunks) {
|
|
@@ -115,6 +115,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends require_langua
|
|
|
115
115
|
}),
|
|
116
116
|
text: msgChunk.content?.toString() ?? ""
|
|
117
117
|
});
|
|
118
|
+
if (options.signal?.aborted) break;
|
|
118
119
|
yield cg;
|
|
119
120
|
await runManager?.handleLLMNewToken(msgChunk.content, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
120
121
|
}
|
|
@@ -128,6 +129,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends require_langua
|
|
|
128
129
|
message: new require_ai.AIMessageChunk({ content: ch }),
|
|
129
130
|
text: ch
|
|
130
131
|
});
|
|
132
|
+
if (options.signal?.aborted) break;
|
|
131
133
|
yield cg;
|
|
132
134
|
await runManager?.handleLLMNewToken(ch, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
133
135
|
}
|
|
@@ -210,6 +212,7 @@ var FakeListChatModel = class FakeListChatModel extends require_language_models_
|
|
|
210
212
|
await this._sleepIfRequested();
|
|
211
213
|
if (options?.thrownErrorString) throw new Error(options.thrownErrorString);
|
|
212
214
|
const chunk = this._createResponseChunk(text, isLastChunk ? this.generationInfo : void 0);
|
|
215
|
+
if (options.signal?.aborted) break;
|
|
213
216
|
yield chunk;
|
|
214
217
|
runManager?.handleLLMNewToken(text);
|
|
215
218
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.cjs","names":["BaseChatModel","messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","AIMessage","tools: (StructuredTool | ToolSpec)[]","toJsonSchema","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","ChatGenerationChunk","AIMessageChunk","params: FakeChatInput","text: string","options: this[\"ParsedCallOptions\"]","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>","RunnableLambda"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmCA,kDAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAIC,qBAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAIA,qBAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+BJ,kDAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUK,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJL,UACAM,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAIL,qBAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLM,WACAH,UACAJ,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAIQ,oCAAoB;KACjC,SAAS,IAAIC,0BAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;IAED,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAIR,qBACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAIO,oCAAoB;IACjC,SAAS,IAAIC,0BAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;GACD,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0BZ,kDAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYa,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJH,WACAR,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBW,MAAc;AAC9B,SAAO;GACL,SAAS,IAAIV,qBAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLM,WACAK,SACAZ,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;GACD,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEW,MAEAE,gBACqB;AACrB,SAAO,IAAIL,oCAAoB;GAC7B,SAAS,IAAIC,0BAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUP,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIEW,SAKAC,SAMI;AACJ,SAAOC,4BAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|
|
1
|
+
{"version":3,"file":"chat_models.cjs","names":["BaseChatModel","messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","AIMessage","tools: (StructuredTool | ToolSpec)[]","toJsonSchema","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","options: this[\"ParsedCallOptions\"]","ChatGenerationChunk","AIMessageChunk","params: FakeChatInput","text: string","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>","RunnableLambda"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n if (options.signal?.aborted) break;\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmCA,kDAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAIC,qBAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAIA,qBAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+BJ,kDAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUK,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJL,UACAM,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAIL,qBAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLM,WACAC,SACAR,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAIS,oCAAoB;KACjC,SAAS,IAAIC,0BAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;AAED,QAAI,QAAQ,QAAQ,QAAS;IAC7B,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAIT,qBACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAIQ,oCAAoB;IACjC,SAAS,IAAIC,0BAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0Bb,kDAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYc,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJJ,WACAR,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBY,MAAc;AAC9B,SAAO;GACL,SAAS,IAAIX,qBAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLM,WACAC,SACAR,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEY,MAEAC,gBACqB;AACrB,SAAO,IAAIJ,oCAAoB;GAC7B,SAAS,IAAIC,0BAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUR,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAYC,uCAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAcA,uCAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAaA,uCAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAYA,uCAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIEW,SAKAC,SAMI;AACJ,SAAOC,4BAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { BaseMessage } from "../../messages/base.cjs";
|
|
2
2
|
import { AIMessage, AIMessageChunk } from "../../messages/ai.cjs";
|
|
3
|
-
import { MessageStructure } from "../../messages/message.cjs";
|
|
3
|
+
import { MessageStructure, MessageToolSet } from "../../messages/message.cjs";
|
|
4
4
|
import { ChatGenerationChunk, ChatResult } from "../../outputs.cjs";
|
|
5
5
|
import { InteropZodType } from "../types/zod.cjs";
|
|
6
6
|
import { CallbackManagerForLLMRun } from "../../callbacks/manager.cjs";
|
|
@@ -57,9 +57,9 @@ declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatMode
|
|
|
57
57
|
...rest
|
|
58
58
|
}: FakeStreamingChatModelFields & BaseLLMParams);
|
|
59
59
|
_llmType(): string;
|
|
60
|
-
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure
|
|
60
|
+
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure<MessageToolSet>>, FakeStreamingChatModelCallOptions>;
|
|
61
61
|
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
62
|
-
_streamResponseChunks(_messages: BaseMessage[],
|
|
62
|
+
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
63
63
|
}
|
|
64
64
|
/**
|
|
65
65
|
* Interface for the input parameters specific to the Fake List Chat model.
|
|
@@ -115,7 +115,7 @@ declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptio
|
|
|
115
115
|
_llmType(): string;
|
|
116
116
|
_generate(_messages: BaseMessage[], options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
117
117
|
_formatGeneration(text: string): {
|
|
118
|
-
message: AIMessage<MessageStructure
|
|
118
|
+
message: AIMessage<MessageStructure<MessageToolSet>>;
|
|
119
119
|
text: string;
|
|
120
120
|
};
|
|
121
121
|
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
@@ -124,7 +124,7 @@ declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptio
|
|
|
124
124
|
_createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;
|
|
125
125
|
_currentResponse(): string;
|
|
126
126
|
_incrementResponse(): void;
|
|
127
|
-
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure
|
|
127
|
+
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure<MessageToolSet>>, FakeListChatModelCallOptions>;
|
|
128
128
|
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
|
|
129
129
|
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
|
|
130
130
|
raw: BaseMessage;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.d.cts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;UAUiBgB,QAAAA;;;EAAAA,MAAAA,EAGLD,cAHa,GAGIE,MAAjBF,CAAAA,MAAAA,EAAiBE,OAAM,CAAA;AAKnC;AAKA;;;AAAsDd,UALrCe,iCAAAA,SAA0ChB,wBAKLC,CAAAA,CAAmB;AAYzE;;;AAG4HK,UAf3GW,4BAAAA,SAAqChB,mBAesEK,CAAAA;EAARa;EAHzEpB,KAAAA,CAAAA,EAAAA,MAAAA;EAAa;EAKnCqB,SAAAA,CAAAA,EAbLjB,WAaKiB,EAAsB;EAAuBJ;EAEnDb,MAAAA,CAAAA,EAbFE,cAaEF,EAAAA;EACHE;EAIMgB,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAOC;EAAWC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZlCP,aAAAA,SAAsBnB,aAAAA,CAYY0B;EAA8BR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA+Bf,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAE9FO,SAAAA,CAAAA,QAAAA,EAXEN,WAWFM,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAXmEX,wBAWnEW,CAAAA,EAX8FU,OAW9FV,CAXsGH,UAWtGG,CAAAA;;AAAwCE,cATzCS,sBAAAA,SAA+BrB,aASUY,CATIK,iCASJL,CAAAA,CAAAA;;EAAwBN,SAAAA,EAPvEF,WAOuEE,EAAAA;EAAsEW,MAAAA,EANhJX,cAMgJW,EAAAA;EAAvGR,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC7BL,iBAAAA,CAAAA,EAAAA,MAAAA;EAAkEL,QAAAA,KAAAA;EAAmCQ,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHxCW,4BAGwCX,GAHTJ,aAGSI;EAARa,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAChFhB,SAAAA,CAAAA,KAAAA,EAAAA,CAFfM,cAEeN,GAFEW,QAEFX,CAAAA,EAAAA,CAAAA,EAFgBK,QAEhBL,CAFyBQ,sBAEzBR,EAFiDE,cAEjDF,kBAAAA,EAFuHa,iCAEvHb,CAAAA;EAAiEL,SAAAA,CAAAA,QAAAA,EAD9EK,WAC8EL,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADZA,wBACYA,CAAAA,EADeqB,OACfrB,CADuBQ,UACvBR,CAAAA;EAA0CS,qBAAAA,CAAAA,SAAAA,EAA3GJ,WAA2GI,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAA1CT,wBAA0CS,CAAAA,EAAfqB,cAAerB,CAAAA,mBAAAA,CAAAA;;;AAX/E;AAgBjE;AAaiBuB,UAbAD,aAAAA,SAAsB5B,mBAaeD,CAAAA;EAuBjC+B;EAAwCD,SAAAA,EAAAA,MAAAA,EAAAA;EAOxCf;EAGGc,KAAAA,CAAAA,EAAAA,MAAAA;EAGC1B,eAAAA,CAAAA,EAAAA,OAAAA;EAAiEL;;;;;EAKrDK,cAAAA,CAAAA,EA3ChBY,MA2CgBZ,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAA0GI,UAzC9HuB,4BAAAA,SAAqC9B,wBAyCyFO,CAAAA;EAAfqB,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOiHpB,cAzB5NuB,iBAAAA,SAA0BhC,aAyBkMS,CAzBpLsB,4BAyBoLtB,CAAAA,CAAAA;EACtMO,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAsBA,eAAAA,EAAAA,OAAAA;EAA2DiB,SAAAA,EAAAA,MAAAA,EAAAA;EAA7BtB,CAAAA,EAAAA,MAAAA;EAA+DsB,KAAAA,CAAAA,EAAAA,MAAAA;EAAfnB,eAAAA,EAAAA,OAAAA;EAA4BE,cAAAA,CAAAA,EAnBtJA,MAmBsJA,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA8BH,QAAAA,KAAAA;EAA+CD,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAC3OR,WAAAA,CAAAA,MAAAA,EAjBW0B,aAiBX1B;EACG6B,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAF+NxB,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EA1BhMT,SAAAA,CAAAA,SAAAA,EAatBI,WAbsBJ,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAa2CD,wBAb3CC,CAAAA,EAasEoB,OAbtEpB,CAa8EO,UAb9EP,CAAAA;EAAa,iBAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAAA;aAe3CK;;;mCAGoBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}
|
|
1
|
+
{"version":3,"file":"chat_models.d.cts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageToolSet","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure<import(\"../../messages/message.js\").MessageToolSet>>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure<import(\"../../messages/message.js\").MessageToolSet>>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure<import(\"../../messages/message.js\").MessageToolSet>>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;UAUiBgB,QAAAA;;;EAAAA,MAAAA,EAGLD,cAHa,GAGIE,MAAjBF,CAAAA,MAAAA,EAAiBE,OAAM,CAAA;AAKnC;AAKA;;;AAAsDd,UALrCe,iCAAAA,SAA0ChB,wBAKLC,CAAAA,CAAmB;AAYzE;;;AAG4HK,UAf3GW,4BAAAA,SAAqChB,mBAesEK,CAAAA;EAARa;EAHzEpB,KAAAA,CAAAA,EAAAA,MAAAA;EAAa;EAKnCqB,SAAAA,CAAAA,EAbLjB,WAaKiB,EAAsB;EAAuBJ;EAEnDb,MAAAA,CAAAA,EAbFE,cAaEF,EAAAA;EACHE;EAIMgB,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAOC;EAAWC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZlCP,aAAAA,SAAsBnB,aAAAA,CAYY0B;EAA8BR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA+Bf,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAE9FO,SAAAA,CAAAA,QAAAA,EAXEN,WAWFM,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAXmEX,wBAWnEW,CAAAA,EAX8FU,OAW9FV,CAXsGH,UAWtGG,CAAAA;;AAAwCE,cATzCS,sBAAAA,SAA+BrB,aASUY,CATIK,iCASJL,CAAAA,CAAAA;;aAP/CR;EAOuEE,MAAAA,EAN1EA,cAM0EA,EAAAA;EAA0HW,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAA3JR,iBAAAA,CAAAA,EAAAA,MAAAA;EAC7BL,QAAAA,KAAAA;EAAkEL,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHLmB,4BAGKnB,GAH0BI,aAG1BJ;EAAmCQ,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAARa,SAAAA,CAAAA,KAAAA,EAAAA,CAD/FV,cAC+FU,GAD9EL,QAC8EK,CAAAA,EAAAA,CAAAA,EADhEX,QACgEW,CADvDR,sBACuDQ,EAD/Bd,cAC+Bc,iBAAAA,gBAAAA,CAAAA,EAD2FH,iCAC3FG,CAAAA;EAChFhB,SAAAA,CAAAA,QAAAA,EADbA,WACaA,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADqDL,wBACrDK,CAAAA,EADgFgB,OAChFhB,CADwFG,UACxFH,CAAAA;EAAgEL,qBAAAA,CAAAA,SAAAA,EAAhEK,WAAgEL,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAAAA,wBAAAA,CAAAA,EAA2B+B,cAA3B/B,CAA0CS,mBAA1CT,CAAAA;;;;AAXpC;AAgBhDgC,UAAAA,aAAAA,SAAsB7B,mBAAAA,CAAAA;EAatB8B;EAuBIC,SAAAA,EAAAA,MAAAA,EAAAA;EAAwCD;EAOxChB,KAAAA,CAAAA,EAAAA,MAAAA;EAGGe,eAAAA,CAAAA,EAAAA,OAAAA;EAGC3B;;;;;mBAtCJY;;AA2CgBZ,UAzCpB4B,4BAAAA,SAAqC/B,wBAyCjBG,CAAAA;EAAgEL,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOqGc,cAzBrLoB,iBAAAA,SAA0BjC,aAyB2Ja,CAzB7ImB,4BAyB6InB,CAAAA,CAAAA;EAAgDD,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAwBsB,eAAAA,EAAAA,OAAAA;EAAjCzB,SAAAA,EAAAA,MAAAA,EAAAA;EACtMO,CAAAA,EAAAA,MAAAA;EAAsBA,KAAAA,CAAAA,EAAAA,MAAAA;EAA2DkB,eAAAA,EAAAA,OAAAA;EAA7BvB,cAAAA,CAAAA,EAnB1EK,MAmB0EL,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA+DuB,QAAAA,KAAAA;EAAfpB,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAA4BE,WAAAA,CAAAA,MAAAA,EAhBnJe,aAgBmJf;EAA8BH,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA+CD,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAC3OR,SAAAA,CAAAA,SAAAA,EAdYA,WAcZA,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAd6EL,wBAc7EK,CAAAA,EAdwGgB,OAcxGhB,CAdgHG,UAchHH,CAAAA;EACG8B,iBAAAA,CAAAA,IAAAA,EAAAA,MAAAA,CAAAA,EAAAA;IAF+NzB,OAAAA,EAX9NJ,SAW8NI,iBAAAA,gBAAAA,CAAAA;IA1BhMT,IAAAA,EAAAA,MAAAA;EAAa,CAAA;mCAkBvBI,gEAAgEL,2BAA2B+B,eAAetB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kDAA0H0B;yCACrKhB,sBAAsBA,8BAA8BL,6BAA6BuB,oBAAoBpB,eAAeoB,aAAalB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBsB;yCACvOlB,sBAAsBA,8BAA8BL,6BAA6BuB,mBAAmBpB,eAAeoB,aAAalB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG8B"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { BaseMessage } from "../../messages/base.js";
|
|
2
2
|
import { AIMessage, AIMessageChunk } from "../../messages/ai.js";
|
|
3
|
-
import { MessageStructure } from "../../messages/message.js";
|
|
3
|
+
import { MessageStructure, MessageToolSet } from "../../messages/message.js";
|
|
4
4
|
import { ChatGenerationChunk, ChatResult } from "../../outputs.js";
|
|
5
5
|
import { InteropZodType } from "../types/zod.js";
|
|
6
6
|
import { CallbackManagerForLLMRun } from "../../callbacks/manager.js";
|
|
@@ -58,9 +58,9 @@ declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatMode
|
|
|
58
58
|
...rest
|
|
59
59
|
}: FakeStreamingChatModelFields & BaseLLMParams);
|
|
60
60
|
_llmType(): string;
|
|
61
|
-
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure
|
|
61
|
+
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure<MessageToolSet>>, FakeStreamingChatModelCallOptions>;
|
|
62
62
|
_generate(messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
63
|
-
_streamResponseChunks(_messages: BaseMessage[],
|
|
63
|
+
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
64
64
|
}
|
|
65
65
|
/**
|
|
66
66
|
* Interface for the input parameters specific to the Fake List Chat model.
|
|
@@ -116,7 +116,7 @@ declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptio
|
|
|
116
116
|
_llmType(): string;
|
|
117
117
|
_generate(_messages: BaseMessage[], options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
118
118
|
_formatGeneration(text: string): {
|
|
119
|
-
message: AIMessage<MessageStructure
|
|
119
|
+
message: AIMessage<MessageStructure<MessageToolSet>>;
|
|
120
120
|
text: string;
|
|
121
121
|
};
|
|
122
122
|
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
@@ -125,7 +125,7 @@ declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptio
|
|
|
125
125
|
_createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;
|
|
126
126
|
_currentResponse(): string;
|
|
127
127
|
_incrementResponse(): void;
|
|
128
|
-
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure
|
|
128
|
+
bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<MessageStructure<MessageToolSet>>, FakeListChatModelCallOptions>;
|
|
129
129
|
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
|
|
130
130
|
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
|
|
131
131
|
raw: BaseMessage;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.d.ts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;;UAUiBgB,QAAAA;;;UAGLD,iBAAiBE;AAH7B;AAQA;AAKA;;AAMaV,UAXIW,iCAAAA,SAA0ChB,wBAW9CK,CAAAA;AAN4D;AAYzE;;AAGyFP,UAfxEmB,4BAAAA,SAAqChB,mBAemCH,CAAAA;EAAmCQ;EAARa,KAAAA,CAAAA,EAAAA,MAAAA;EAHzEpB;EAAa,SAAA,CAAA,EARxCI,WAQwC,EAAA;EAKnCiB;EAA6CJ,MAAAA,CAAAA,EAXrDX,cAWqDW,EAAAA;EAEnDb;EACHE,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAIMgB;EAAOC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZvBN,aAAAA,SAAsBnB,aAAAA,CAYCyB;EAAWC,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA8BR,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAA+Bf,SAAAA,CAAAA,QAAAA,EAT5FC,WAS4FD,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAT3BJ,wBAS2BI,CAAAA,EATAiB,OASAjB,CATQI,UASRJ,CAAAA;;AAE7EY,cATlBM,sBAAAA,SAA+BrB,aASbe,CAT2BE,iCAS3BF,CAAAA,CAAAA;EAAuBH,KAAAA,EAAAA,MAAAA;aAP/CR;EAOuEE,MAAAA,EAN1EA,cAM0EA,EAAAA;EAAsEW,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAvGR,iBAAAA,CAAAA,EAAAA,MAAAA;EAC7BL,QAAAA,KAAAA;EAAkEL,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAHLmB,4BAGKnB,GAH0BI,aAG1BJ;EAAmCQ,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAARa,SAAAA,CAAAA,KAAAA,EAAAA,CAD/FV,cAC+FU,GAD9EL,QAC8EK,CAAAA,EAAAA,CAAAA,EADhEX,QACgEW,CADvDR,sBACuDQ,EAD/Bd,cAC+Bc,kBAAAA,EADuCH,iCACvCG,CAAAA;EAChFhB,SAAAA,CAAAA,QAAAA,EADbA,WACaA,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EADqDL,wBACrDK,CAAAA,EADgFgB,OAChFhB,CADwFG,UACxFH,CAAAA;EAAiEL,qBAAAA,CAAAA,SAAAA,EAAjEK,WAAiEL,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAAAA,wBAAAA,CAAAA,EAA2B8B,cAA3B9B,CAA0CS,mBAA1CT,CAAAA;;;;AAXrC;AAgBhD+B,UAAAA,aAAAA,SAAsB5B,mBAAAA,CAAAA;EAatB6B;EAuBIC,SAAAA,EAAAA,MAAAA,EAAAA;EAAwCD;EAOxCf,KAAAA,CAAAA,EAAAA,MAAAA;EAGGc,eAAAA,CAAAA,EAAAA,OAAAA;EAGC1B;;;;;EAERC,cAAAA,CAAAA,EAxCIW,MAwCJX,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;;AAGoFN,UAzCpFgC,4BAAAA,SAAqC9B,wBAyC+CF,CAAAA;EAA0CS,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOmIyB,cAzB7PD,iBAAAA,SAA0BhC,aAyBmOiC,CAzBrNF,4BAyBqNE,CAAAA,CAAAA;EAAjCxB,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EACtMO,eAAAA,EAAAA,OAAAA;EAAsBA,SAAAA,EAAAA,MAAAA,EAAAA;EAA2DiB,CAAAA,EAAAA,MAAAA;EAA7BtB,KAAAA,CAAAA,EAAAA,MAAAA;EAA+DsB,eAAAA,EAAAA,OAAAA;EAAfnB,cAAAA,CAAAA,EAnB1HE,MAmB0HF,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA4BE,QAAAA,KAAAA;EAA8BH,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAA+CD,WAAAA,CAAAA,MAAAA,EAhBhOkB,aAgBgOlB;EAC3OR,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EACG6B,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAF+NxB,SAAAA,CAAAA,SAAAA,EAbtNL,WAasNK,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAbrJV,wBAaqJU,CAAAA,EAb1HW,OAa0HX,CAblHF,UAakHE,CAAAA;EA1BhMT,iBAAAA,CAAAA,IAAAA,EAAAA,MAAAA,CAAAA,EAAAA;IAAa,OAAA,EAe3CK,SAf2C,kBAAA;;;mCAkBvBD,gEAAgEL,2BAA2B8B,eAAerB;uBACtHY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kCAAsEyB;yCACjHf,sBAAsBA,8BAA8BL,6BAA6BsB,oBAAoBnB,eAAemB,aAAajB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBqB;yCACvOjB,sBAAsBA,8BAA8BL,6BAA6BsB,mBAAmBnB,eAAemB,aAAajB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG6B"}
|
|
1
|
+
{"version":3,"file":"chat_models.d.ts","names":["CallbackManagerForLLMRun","BaseChatModel","BaseChatModelCallOptions","BaseChatModelParams","BaseLLMParams","BaseMessage","AIMessage","AIMessageChunk","ChatResult","ChatGenerationChunk","Runnable","StructuredTool","StructuredOutputMethodParams","BaseLanguageModelInput","StructuredOutputMethodOptions","InteropZodType","ToolSpec","Record","FakeStreamingChatModelCallOptions","FakeStreamingChatModelFields","FakeChatModel","Promise","FakeStreamingChatModel","sleep","responses","chunks","toolStyle","thrownErrorString","______messages_message_js0","MessageToolSet","MessageStructure","AsyncGenerator","FakeChatInput","FakeListChatModelCallOptions","FakeListChatModel","RunOutput"],"sources":["../../../src/utils/testing/chat_models.d.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport { BaseChatModel, BaseChatModelCallOptions, BaseChatModelParams } from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport { BaseMessage, AIMessage, AIMessageChunk } from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport { StructuredOutputMethodParams, BaseLanguageModelInput, StructuredOutputMethodOptions } from \"../../language_models/base.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>;\n}\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions extends BaseChatModelCallOptions {\n}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\nexport declare class FakeChatModel extends BaseChatModel {\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n}\nexport declare class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep: number;\n responses: BaseMessage[];\n chunks: AIMessageChunk[];\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n thrownErrorString?: string;\n private tools;\n constructor({ sleep, responses, chunks, toolStyle, thrownErrorString, ...rest }: FakeStreamingChatModelFields & BaseLLMParams);\n _llmType(): string;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure<import(\"../../messages/message.js\").MessageToolSet>>, FakeStreamingChatModelCallOptions>;\n _generate(messages: BaseMessage[], _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n}\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n emitCustomEvent?: boolean;\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport declare class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name(): string;\n lc_serializable: boolean;\n responses: string[];\n i: number;\n sleep?: number;\n emitCustomEvent: boolean;\n generationInfo?: Record<string, unknown>;\n private tools;\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n constructor(params: FakeChatInput);\n _combineLLMOutput(): never[];\n _llmType(): string;\n _generate(_messages: BaseMessage[], options?: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;\n _formatGeneration(text: string): {\n message: AIMessage<import(\"../../messages/message.js\").MessageStructure<import(\"../../messages/message.js\").MessageToolSet>>;\n text: string;\n };\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n _sleepIfRequested(): Promise<void>;\n _sleep(): Promise<void>;\n _createResponseChunk(text: string, generationInfo?: Record<string, any>): ChatGenerationChunk;\n _currentResponse(): string;\n _incrementResponse(): void;\n bindTools(tools: (StructuredTool | ToolSpec)[]): Runnable<BaseLanguageModelInput, AIMessageChunk<import(\"../../messages/message.js\").MessageStructure<import(\"../../messages/message.js\").MessageToolSet>>, FakeListChatModelCallOptions>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, false> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(_params: StructuredOutputMethodParams<RunOutput, true> | InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n//# sourceMappingURL=chat_models.d.ts.map"],"mappings":";;;;;;;;;;;;;;;UAUiBgB,QAAAA;;;UAGLD,iBAAiBE;AAH7B;AAQA;AAKA;;AAMaV,UAXIW,iCAAAA,SAA0ChB,wBAW9CK,CAAAA;AAN4D;AAYzE;;AAGyFP,UAfxEmB,4BAAAA,SAAqChB,mBAemCH,CAAAA;EAAmCQ;EAARa,KAAAA,CAAAA,EAAAA,MAAAA;EAHzEpB;EAAa,SAAA,CAAA,EARxCI,WAQwC,EAAA;EAKnCiB;EAA6CJ,MAAAA,CAAAA,EAXrDX,cAWqDW,EAAAA;EAEnDb;EACHE,SAAAA,CAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAIMgB;EAAOC,iBAAAA,CAAAA,EAAAA,MAAAA;;AAAmBE,cAZvBN,aAAAA,SAAsBnB,aAAAA,CAYCyB;EAAWC,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA8BR,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAA+Bf,SAAAA,CAAAA,QAAAA,EAT5FC,WAS4FD,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAT3BJ,wBAS2BI,CAAAA,EATAiB,OASAjB,CATQI,UASRJ,CAAAA;;AAE7EY,cATlBM,sBAAAA,SAA+BrB,aASbe,CAT2BE,iCAS3BF,CAAAA,CAAAA;EAAuBH,KAAAA,EAAAA,MAAAA;aAP/CR;UACHE;EAM0EA,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAA0HW,iBAAAA,CAAAA,EAAAA,MAAAA;EAA3JR,QAAAA,KAAAA;EAC7BL,WAAAA,CAAAA;IAAAA,KAAAA;IAAAA,SAAAA;IAAAA,MAAAA;IAAAA,SAAAA;IAAAA,iBAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAH6Dc,4BAG7Dd,GAH4FD,aAG5FC;EAAkEL,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAmCQ,SAAAA,CAAAA,KAAAA,EAAAA,CADvGG,cACuGH,GADtFQ,QACsFR,CAAAA,EAAAA,CAAAA,EADxEE,QACwEF,CAD/DK,sBAC+DL,EADvCD,cACuCC,iBAAAA,gBAAAA,CAAAA,EADmFU,iCACnFV,CAAAA;EAARa,SAAAA,CAAAA,QAAAA,EAA7FhB,WAA6FgB,EAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EAA3BrB,wBAA2BqB,CAAAA,EAAAA,OAAAA,CAAQb,UAARa,CAAAA;EAChFhB,qBAAAA,CAAAA,SAAAA,EAAAA,WAAAA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAAgEL,wBAAhEK,CAAAA,EAA2F0B,cAA3F1B,CAA0GI,mBAA1GJ,CAAAA;;;;;AAX4B,UAgBhD2B,aAAAA,SAAsB7B,mBAhB0B,CAAA;EAgBhD6B;EAaAC,SAAAA,EAAAA,MAAAA,EAAAA;EAuBIC;EAAwCD,KAAAA,CAAAA,EAAAA,MAAAA;EAOxChB,eAAAA,CAAAA,EAAAA,OAAAA;EAGGe;;;;;mBAnCHf;;AAwCJX,UAtCA2B,4BAAAA,SAAqC/B,wBAsCrCI,CAAAA;EAGoBD,iBAAAA,CAAAA,EAAAA,MAAAA;;;;;;;;;;;;;;;;;;;;;;AAOuIY,cAzBvJiB,iBAAAA,SAA0BjC,aAyB6HgB,CAzB/GgB,4BAyB+GhB,CAAAA,CAAAA;EAA8BH,OAAAA,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAgDD,eAAAA,EAAAA,OAAAA;EAAwBsB,SAAAA,EAAAA,MAAAA,EAAAA;EAAjCzB,CAAAA,EAAAA,MAAAA;EACtMO,KAAAA,CAAAA,EAAAA,MAAAA;EAAsBA,eAAAA,EAAAA,OAAAA;EAA2DkB,cAAAA,CAAAA,EAnBvGlB,MAmBuGkB,CAAAA,MAAAA,EAAAA,OAAAA,CAAAA;EAA7BvB,QAAAA,KAAAA;EAA+DuB,SAAAA,EAAAA,QAAAA,GAAAA,WAAAA,GAAAA,SAAAA,GAAAA,QAAAA;EAAfpB,WAAAA,CAAAA,MAAAA,EAhBvHiB,aAgBuHjB;EAA4BE,iBAAAA,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAA8BH,QAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAA+CD,SAAAA,CAAAA,SAAAA,EAb/NR,WAa+NQ,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAb9Jb,wBAa8Ja,CAAAA,EAbnIQ,OAamIR,CAb3HL,UAa2HK,CAAAA;EAC3OR,iBAAAA,CAAAA,IAAAA,EAAAA,MAAAA,CAAAA,EAAAA;IACG8B,OAAAA,EAbC7B,SAaD6B,iBAAAA,gBAAAA,CAAAA;IAF+NzB,IAAAA,EAAAA,MAAAA;EA1BhMT,CAAAA;EAAa,qBAAA,CAAA,SAAA,EAkBvBI,WAlBuB,EAAA,EAAA,OAAA,EAAA,IAAA,CAAA,mBAAA,CAAA,EAAA,UAAA,CAAA,EAkByCL,wBAlBzC,CAAA,EAkBoE+B,cAlBpE,CAkBmFtB,mBAlBnF,CAAA;uBAmBnCY;YACXA;sDAC0CJ,sBAAsBR;;;oBAGxDE,iBAAiBK,cAAcN,SAASG,wBAAwBN,kDAA0H0B;yCACrKhB,sBAAsBA,8BAA8BL,6BAA6BuB,oBAAoBpB,eAAeoB,aAAalB,8BAA8BH,uCAAuCJ,SAASG,wBAAwBsB;yCACvOlB,sBAAsBA,8BAA8BL,6BAA6BuB,mBAAmBpB,eAAeoB,aAAalB,8BAA8BH,sCAAsCJ,SAASG;SAC3OR;YACG8B"}
|
|
@@ -103,7 +103,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends BaseChatModel
|
|
|
103
103
|
}] };
|
|
104
104
|
return generation;
|
|
105
105
|
}
|
|
106
|
-
async *_streamResponseChunks(_messages,
|
|
106
|
+
async *_streamResponseChunks(_messages, options, runManager) {
|
|
107
107
|
if (this.thrownErrorString) throw new Error(this.thrownErrorString);
|
|
108
108
|
if (this.chunks?.length) {
|
|
109
109
|
for (const msgChunk of this.chunks) {
|
|
@@ -115,6 +115,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends BaseChatModel
|
|
|
115
115
|
}),
|
|
116
116
|
text: msgChunk.content?.toString() ?? ""
|
|
117
117
|
});
|
|
118
|
+
if (options.signal?.aborted) break;
|
|
118
119
|
yield cg;
|
|
119
120
|
await runManager?.handleLLMNewToken(msgChunk.content, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
120
121
|
}
|
|
@@ -128,6 +129,7 @@ var FakeStreamingChatModel = class FakeStreamingChatModel extends BaseChatModel
|
|
|
128
129
|
message: new AIMessageChunk({ content: ch }),
|
|
129
130
|
text: ch
|
|
130
131
|
});
|
|
132
|
+
if (options.signal?.aborted) break;
|
|
131
133
|
yield cg;
|
|
132
134
|
await runManager?.handleLLMNewToken(ch, void 0, void 0, void 0, void 0, { chunk: cg });
|
|
133
135
|
}
|
|
@@ -210,6 +212,7 @@ var FakeListChatModel = class FakeListChatModel extends BaseChatModel {
|
|
|
210
212
|
await this._sleepIfRequested();
|
|
211
213
|
if (options?.thrownErrorString) throw new Error(options.thrownErrorString);
|
|
212
214
|
const chunk = this._createResponseChunk(text, isLastChunk ? this.generationInfo : void 0);
|
|
215
|
+
if (options.signal?.aborted) break;
|
|
213
216
|
yield chunk;
|
|
214
217
|
runManager?.handleLLMNewToken(text);
|
|
215
218
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_models.js","names":["messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","tools: (StructuredTool | ToolSpec)[]","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","params: FakeChatInput","text: string","options: this[\"ParsedCallOptions\"]","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmC,cAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJA,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAI,UAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAI,UAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+B,cAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUC,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJH,UACAI,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAI,UAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLC,WACAH,UACAF,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAI,oBAAoB;KACjC,SAAS,IAAI,eAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;IAED,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAI,UACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAI,oBAAoB;IACjC,SAAS,IAAI,eAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;GACD,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0B,cAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYM,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJD,WACAN,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBO,MAAc;AAC9B,SAAO;GACL,SAAS,IAAI,UAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLF,WACAG,SACAR,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;GACD,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEO,MAEAE,gBACqB;AACrB,SAAO,IAAI,oBAAoB;GAC7B,SAAS,IAAI,eAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUR,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIES,SAKAC,SAMI;AACJ,SAAO,eAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|
|
1
|
+
{"version":3,"file":"chat_models.js","names":["messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","tools: (StructuredTool | ToolSpec)[]","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","generation: ChatResult","_messages: BaseMessage[]","options: this[\"ParsedCallOptions\"]","params: FakeChatInput","text: string","generationInfo?: Record<string, any>","_params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","_config?: StructuredOutputMethodOptions<boolean>"],"sources":["../../../src/utils/testing/chat_models.ts"],"sourcesContent":["import { CallbackManagerForLLMRun } from \"../../callbacks/manager.js\";\nimport {\n BaseChatModel,\n BaseChatModelCallOptions,\n BaseChatModelParams,\n} from \"../../language_models/chat_models.js\";\nimport { BaseLLMParams } from \"../../language_models/llms.js\";\nimport {\n BaseMessage,\n AIMessage,\n AIMessageChunk,\n} from \"../../messages/index.js\";\nimport { type ChatResult, ChatGenerationChunk } from \"../../outputs.js\";\nimport { Runnable, RunnableLambda } from \"../../runnables/base.js\";\nimport { StructuredTool } from \"../../tools/index.js\";\nimport {\n StructuredOutputMethodParams,\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"../../language_models/base.js\";\n\nimport { toJsonSchema } from \"../json_schema.js\";\nimport { InteropZodType } from \"../types/zod.js\";\n\n/** Minimal shape actually needed by `bindTools` */\nexport interface ToolSpec {\n name: string;\n description?: string;\n schema: InteropZodType | Record<string, unknown>; // Either a Zod schema *or* a plain JSON-Schema object\n}\n\n/**\n * Interface specific to the Fake Streaming Chat model.\n */\nexport interface FakeStreamingChatModelCallOptions\n extends BaseChatModelCallOptions {}\n/**\n * Interface for the Constructor-field specific to the Fake Streaming Chat model (all optional because we fill in defaults).\n */\nexport interface FakeStreamingChatModelFields extends BaseChatModelParams {\n /** Milliseconds to pause between fallback char-by-char chunks */\n sleep?: number;\n\n /** Full AI messages to fall back to when no `chunks` supplied */\n responses?: BaseMessage[];\n\n /** Exact chunks to emit (can include tool-call deltas) */\n chunks?: AIMessageChunk[];\n\n /** How tool specs are formatted in `bindTools` */\n toolStyle?: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\";\n\n /** Throw this error instead of streaming (useful in tests) */\n thrownErrorString?: string;\n}\n\nexport class FakeChatModel extends BaseChatModel {\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake\";\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (options?.stop?.length) {\n return {\n generations: [\n {\n message: new AIMessage(options.stop[0]),\n text: options.stop[0],\n },\n ],\n };\n }\n const text = messages\n .map((m) => {\n if (typeof m.content === \"string\") {\n return m.content;\n }\n return JSON.stringify(m.content, null, 2);\n })\n .join(\"\\n\");\n await runManager?.handleLLMNewToken(text);\n return {\n generations: [\n {\n message: new AIMessage(text),\n text,\n },\n ],\n llmOutput: {},\n };\n }\n}\n\nexport class FakeStreamingChatModel extends BaseChatModel<FakeStreamingChatModelCallOptions> {\n sleep = 50;\n\n responses: BaseMessage[] = [];\n\n chunks: AIMessageChunk[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n thrownErrorString?: string;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n constructor({\n sleep = 50,\n responses = [],\n chunks = [],\n toolStyle = \"openai\",\n thrownErrorString,\n ...rest\n }: FakeStreamingChatModelFields & BaseLLMParams) {\n super(rest);\n this.sleep = sleep;\n this.responses = responses;\n this.chunks = chunks;\n this.toolStyle = toolStyle;\n this.thrownErrorString = thrownErrorString;\n }\n\n _llmType() {\n return \"fake\";\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n /* creating a *new* instance – mirrors LangChain .bind semantics for type-safety and avoiding noise */\n const next = new FakeStreamingChatModel({\n sleep: this.sleep,\n responses: this.responses,\n chunks: this.chunks,\n toolStyle: this.toolStyle,\n thrownErrorString: this.thrownErrorString,\n });\n next.tools = merged;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n async _generate(\n messages: BaseMessage[],\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n\n const content = this.responses?.[0]?.content ?? messages[0].content ?? \"\";\n\n const generation: ChatResult = {\n generations: [\n {\n text: \"\",\n message: new AIMessage({\n content,\n tool_calls: this.chunks?.[0]?.tool_calls,\n }),\n },\n ],\n };\n\n return generation;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.thrownErrorString) {\n throw new Error(this.thrownErrorString);\n }\n if (this.chunks?.length) {\n for (const msgChunk of this.chunks) {\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({\n content: msgChunk.content,\n tool_calls: msgChunk.tool_calls,\n additional_kwargs: msgChunk.additional_kwargs ?? {},\n }),\n text: msgChunk.content?.toString() ?? \"\",\n });\n\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n msgChunk.content as string,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n return;\n }\n\n const fallback =\n this.responses?.[0] ??\n new AIMessage(\n typeof _messages[0].content === \"string\" ? _messages[0].content : \"\"\n );\n const text = typeof fallback.content === \"string\" ? fallback.content : \"\";\n\n for (const ch of text) {\n await new Promise((r) => setTimeout(r, this.sleep));\n const cg = new ChatGenerationChunk({\n message: new AIMessageChunk({ content: ch }),\n text: ch,\n });\n if (options.signal?.aborted) break;\n yield cg;\n await runManager?.handleLLMNewToken(\n ch,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: cg }\n );\n }\n }\n}\n\n/**\n * Interface for the input parameters specific to the Fake List Chat model.\n */\nexport interface FakeChatInput extends BaseChatModelParams {\n /** Responses to return */\n responses: string[];\n\n /** Time to sleep in milliseconds between responses */\n sleep?: number;\n\n emitCustomEvent?: boolean;\n\n /**\n * Generation info to include on the last chunk during streaming.\n * This gets merged into response_metadata by the base chat model.\n * Useful for testing response_metadata propagation (e.g., finish_reason).\n */\n generationInfo?: Record<string, unknown>;\n}\n\nexport interface FakeListChatModelCallOptions extends BaseChatModelCallOptions {\n thrownErrorString?: string;\n}\n\n/**\n * A fake Chat Model that returns a predefined list of responses. It can be used\n * for testing purposes.\n * @example\n * ```typescript\n * const chat = new FakeListChatModel({\n * responses: [\"I'll callback later.\", \"You 'console' them!\"]\n * });\n *\n * const firstMessage = new HumanMessage(\"You want to hear a JavaScript joke?\");\n * const secondMessage = new HumanMessage(\"How do you cheer up a JavaScript developer?\");\n *\n * // Call the chat model with a message and log the response\n * const firstResponse = await chat.call([firstMessage]);\n * console.log({ firstResponse });\n *\n * const secondResponse = await chat.call([secondMessage]);\n * console.log({ secondResponse });\n * ```\n */\nexport class FakeListChatModel extends BaseChatModel<FakeListChatModelCallOptions> {\n static lc_name() {\n return \"FakeListChatModel\";\n }\n\n lc_serializable = true;\n\n responses: string[];\n\n i = 0;\n\n sleep?: number;\n\n emitCustomEvent = false;\n\n generationInfo?: Record<string, unknown>;\n\n private tools: (StructuredTool | ToolSpec)[] = [];\n\n toolStyle: \"openai\" | \"anthropic\" | \"bedrock\" | \"google\" = \"openai\";\n\n constructor(params: FakeChatInput) {\n super(params);\n const { responses, sleep, emitCustomEvent, generationInfo } = params;\n this.responses = responses;\n this.sleep = sleep;\n this.emitCustomEvent = emitCustomEvent ?? this.emitCustomEvent;\n this.generationInfo = generationInfo;\n }\n\n _combineLLMOutput() {\n return [];\n }\n\n _llmType(): string {\n return \"fake-list\";\n }\n\n async _generate(\n _messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n if (options?.stop?.length) {\n return {\n generations: [this._formatGeneration(options.stop[0])],\n };\n } else {\n const response = this._currentResponse();\n this._incrementResponse();\n\n return {\n generations: [this._formatGeneration(response)],\n llmOutput: {},\n };\n }\n }\n\n _formatGeneration(text: string) {\n return {\n message: new AIMessage(text),\n text,\n };\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const response = this._currentResponse();\n this._incrementResponse();\n if (this.emitCustomEvent) {\n await runManager?.handleCustomEvent(\"some_test_event\", {\n someval: true,\n });\n }\n\n const responseChars = [...response];\n for (let i = 0; i < responseChars.length; i++) {\n const text = responseChars[i];\n const isLastChunk = i === responseChars.length - 1;\n await this._sleepIfRequested();\n if (options?.thrownErrorString) {\n throw new Error(options.thrownErrorString);\n }\n // Include generationInfo on the last chunk (like real providers do)\n // This gets merged into response_metadata by the base chat model\n const chunk = this._createResponseChunk(\n text,\n isLastChunk ? this.generationInfo : undefined\n );\n if (options.signal?.aborted) break;\n yield chunk;\n // eslint-disable-next-line no-void\n void runManager?.handleLLMNewToken(text);\n }\n }\n\n async _sleepIfRequested() {\n if (this.sleep !== undefined) {\n await this._sleep();\n }\n }\n\n async _sleep() {\n return new Promise<void>((resolve) => {\n setTimeout(() => resolve(), this.sleep);\n });\n }\n\n _createResponseChunk(\n text: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n generationInfo?: Record<string, any>\n ): ChatGenerationChunk {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({ content: text }),\n text,\n generationInfo,\n });\n }\n\n _currentResponse() {\n return this.responses[this.i];\n }\n\n _incrementResponse() {\n if (this.i < this.responses.length - 1) {\n this.i += 1;\n } else {\n this.i = 0;\n }\n }\n\n bindTools(tools: (StructuredTool | ToolSpec)[]) {\n const merged = [...this.tools, ...tools];\n\n const toolDicts = merged.map((t) => {\n switch (this.toolStyle) {\n case \"openai\":\n return {\n type: \"function\",\n function: {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n },\n };\n case \"anthropic\":\n return {\n name: t.name,\n description: t.description,\n input_schema: toJsonSchema(t.schema),\n };\n case \"bedrock\":\n return {\n toolSpec: {\n name: t.name,\n description: t.description,\n inputSchema: toJsonSchema(t.schema),\n },\n };\n case \"google\":\n return {\n name: t.name,\n description: t.description,\n parameters: toJsonSchema(t.schema),\n };\n default:\n throw new Error(`Unsupported tool style: ${this.toolStyle}`);\n }\n });\n\n const wrapped =\n this.toolStyle === \"google\"\n ? [{ functionDeclarations: toolDicts }]\n : toolDicts;\n\n const next = new FakeListChatModel({\n responses: this.responses,\n sleep: this.sleep,\n emitCustomEvent: this.emitCustomEvent,\n generationInfo: this.generationInfo,\n });\n next.tools = merged;\n next.toolStyle = this.toolStyle;\n next.i = this.i;\n\n return next.withConfig({ tools: wrapped } as BaseChatModelCallOptions);\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, false>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, true>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n _params:\n | StructuredOutputMethodParams<RunOutput, boolean>\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n _config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n return RunnableLambda.from(async (input) => {\n const message = await this.invoke(input);\n if (message.tool_calls?.[0]?.args) {\n return message.tool_calls[0].args as RunOutput;\n }\n if (typeof message.content === \"string\") {\n return JSON.parse(message.content);\n }\n throw new Error(\"No structured output found\");\n }) as Runnable;\n }\n}\n"],"mappings":";;;;;;;;AAwDA,IAAa,gBAAb,cAAmC,cAAc;CAC/C,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJA,UACAC,SACAC,YACqB;AACrB,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CACX;GACE,SAAS,IAAI,UAAU,QAAQ,KAAK;GACpC,MAAM,QAAQ,KAAK;EACpB,CACF,EACF;EAEH,MAAM,OAAO,SACV,IAAI,CAAC,MAAM;AACV,OAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE;AAEX,UAAO,KAAK,UAAU,EAAE,SAAS,MAAM,EAAE;EAC1C,EAAC,CACD,KAAK,KAAK;EACb,MAAM,YAAY,kBAAkB,KAAK;AACzC,SAAO;GACL,aAAa,CACX;IACE,SAAS,IAAI,UAAU;IACvB;GACD,CACF;GACD,WAAW,CAAE;EACd;CACF;AACF;AAED,IAAa,yBAAb,MAAa,+BAA+B,cAAiD;CAC3F,QAAQ;CAER,YAA2B,CAAE;CAE7B,SAA2B,CAAE;CAE7B,YAA2D;CAE3D;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAAY,EACV,QAAQ,IACR,YAAY,CAAE,GACd,SAAS,CAAE,GACX,YAAY,UACZ,kBACA,GAAG,MAC0C,EAAE;EAC/C,MAAM,KAAK;EACX,KAAK,QAAQ;EACb,KAAK,YAAY;EACjB,KAAK,SAAS;EACd,KAAK,YAAY;EACjB,KAAK,oBAAoB;CAC1B;CAED,WAAW;AACT,SAAO;CACR;CAED,UAAUC,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAGN,MAAM,OAAO,IAAI,uBAAuB;GACtC,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,QAAQ,KAAK;GACb,WAAW,KAAK;GAChB,mBAAmB,KAAK;EACzB;EACD,KAAK,QAAQ;AAEb,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CAED,MAAM,UACJH,UACAI,UACAC,aACqB;AACrB,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;EAGvB,MAAM,UAAU,KAAK,YAAY,IAAI,WAAW,SAAS,GAAG,WAAW;EAEvE,MAAMC,aAAyB,EAC7B,aAAa,CACX;GACE,MAAM;GACN,SAAS,IAAI,UAAU;IACrB;IACA,YAAY,KAAK,SAAS,IAAI;GAC/B;EACF,CACF,EACF;AAED,SAAO;CACR;CAED,OAAO,sBACLC,WACAC,SACAN,YACqC;AACrC,MAAI,KAAK,kBACP,OAAM,IAAI,MAAM,KAAK;AAEvB,MAAI,KAAK,QAAQ,QAAQ;AACvB,QAAK,MAAM,YAAY,KAAK,QAAQ;IAClC,MAAM,KAAK,IAAI,oBAAoB;KACjC,SAAS,IAAI,eAAe;MAC1B,SAAS,SAAS;MAClB,YAAY,SAAS;MACrB,mBAAmB,SAAS,qBAAqB,CAAE;KACpD;KACD,MAAM,SAAS,SAAS,UAAU,IAAI;IACvC;AAED,QAAI,QAAQ,QAAQ,QAAS;IAC7B,MAAM;IACN,MAAM,YAAY,kBAChB,SAAS,SACT,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;GACF;AACD;EACD;EAED,MAAM,WACJ,KAAK,YAAY,MACjB,IAAI,UACF,OAAO,UAAU,GAAG,YAAY,WAAW,UAAU,GAAG,UAAU;EAEtE,MAAM,OAAO,OAAO,SAAS,YAAY,WAAW,SAAS,UAAU;AAEvE,OAAK,MAAM,MAAM,MAAM;GACrB,MAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,MAAM;GAClD,MAAM,KAAK,IAAI,oBAAoB;IACjC,SAAS,IAAI,eAAe,EAAE,SAAS,GAAI;IAC3C,MAAM;GACP;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GACN,MAAM,YAAY,kBAChB,IACA,QACA,QACA,QACA,QACA,EAAE,OAAO,GAAI,EACd;EACF;CACF;AACF;;;;;;;;;;;;;;;;;;;;;AA8CD,IAAa,oBAAb,MAAa,0BAA0B,cAA4C;CACjF,OAAO,UAAU;AACf,SAAO;CACR;CAED,kBAAkB;CAElB;CAEA,IAAI;CAEJ;CAEA,kBAAkB;CAElB;CAEA,AAAQ,QAAuC,CAAE;CAEjD,YAA2D;CAE3D,YAAYO,QAAuB;EACjC,MAAM,OAAO;EACb,MAAM,EAAE,WAAW,OAAO,iBAAiB,gBAAgB,GAAG;EAC9D,KAAK,YAAY;EACjB,KAAK,QAAQ;EACb,KAAK,kBAAkB,mBAAmB,KAAK;EAC/C,KAAK,iBAAiB;CACvB;CAED,oBAAoB;AAClB,SAAO,CAAE;CACV;CAED,WAAmB;AACjB,SAAO;CACR;CAED,MAAM,UACJF,WACAN,SACAC,YACqB;EACrB,MAAM,KAAK,mBAAmB;AAC9B,MAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;AAE1B,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;AAGJ,MAAI,SAAS,MAAM,OACjB,QAAO,EACL,aAAa,CAAC,KAAK,kBAAkB,QAAQ,KAAK,GAAG,AAAC,EACvD;OACI;GACL,MAAM,WAAW,KAAK,kBAAkB;GACxC,KAAK,oBAAoB;AAEzB,UAAO;IACL,aAAa,CAAC,KAAK,kBAAkB,SAAS,AAAC;IAC/C,WAAW,CAAE;GACd;EACF;CACF;CAED,kBAAkBQ,MAAc;AAC9B,SAAO;GACL,SAAS,IAAI,UAAU;GACvB;EACD;CACF;CAED,OAAO,sBACLH,WACAC,SACAN,YACqC;EACrC,MAAM,WAAW,KAAK,kBAAkB;EACxC,KAAK,oBAAoB;AACzB,MAAI,KAAK,iBACP,MAAM,YAAY,kBAAkB,mBAAmB,EACrD,SAAS,KACV,EAAC;EAGJ,MAAM,gBAAgB,CAAC,GAAG,QAAS;AACnC,OAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;GAC7C,MAAM,OAAO,cAAc;GAC3B,MAAM,cAAc,MAAM,cAAc,SAAS;GACjD,MAAM,KAAK,mBAAmB;AAC9B,OAAI,SAAS,kBACX,OAAM,IAAI,MAAM,QAAQ;GAI1B,MAAM,QAAQ,KAAK,qBACjB,MACA,cAAc,KAAK,iBAAiB,OACrC;AACD,OAAI,QAAQ,QAAQ,QAAS;GAC7B,MAAM;GAED,YAAY,kBAAkB,KAAK;EACzC;CACF;CAED,MAAM,oBAAoB;AACxB,MAAI,KAAK,UAAU,QACjB,MAAM,KAAK,QAAQ;CAEtB;CAED,MAAM,SAAS;AACb,SAAO,IAAI,QAAc,CAAC,YAAY;GACpC,WAAW,MAAM,SAAS,EAAE,KAAK,MAAM;EACxC;CACF;CAED,qBACEQ,MAEAC,gBACqB;AACrB,SAAO,IAAI,oBAAoB;GAC7B,SAAS,IAAI,eAAe,EAAE,SAAS,KAAM;GAC7C;GACA;EACD;CACF;CAED,mBAAmB;AACjB,SAAO,KAAK,UAAU,KAAK;CAC5B;CAED,qBAAqB;AACnB,MAAI,KAAK,IAAI,KAAK,UAAU,SAAS,GACnC,KAAK,KAAK;OAEV,KAAK,IAAI;CAEZ;CAED,UAAUR,OAAsC;EAC9C,MAAM,SAAS,CAAC,GAAG,KAAK,OAAO,GAAG,KAAM;EAExC,MAAM,YAAY,OAAO,IAAI,CAAC,MAAM;AAClC,WAAQ,KAAK,WAAb;IACE,KAAK,SACH,QAAO;KACL,MAAM;KACN,UAAU;MACR,MAAM,EAAE;MACR,aAAa,EAAE;MACf,YAAY,aAAa,EAAE,OAAO;KACnC;IACF;IACH,KAAK,YACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,cAAc,aAAa,EAAE,OAAO;IACrC;IACH,KAAK,UACH,QAAO,EACL,UAAU;KACR,MAAM,EAAE;KACR,aAAa,EAAE;KACf,aAAa,aAAa,EAAE,OAAO;IACpC,EACF;IACH,KAAK,SACH,QAAO;KACL,MAAM,EAAE;KACR,aAAa,EAAE;KACf,YAAY,aAAa,EAAE,OAAO;IACnC;IACH,QACE,OAAM,IAAI,MAAM,CAAC,wBAAwB,EAAE,KAAK,WAAW;GAC9D;EACF,EAAC;EAEF,MAAM,UACJ,KAAK,cAAc,WACf,CAAC,EAAE,sBAAsB,UAAW,CAAC,IACrC;EAEN,MAAM,OAAO,IAAI,kBAAkB;GACjC,WAAW,KAAK;GAChB,OAAO,KAAK;GACZ,iBAAiB,KAAK;GACtB,gBAAgB,KAAK;EACtB;EACD,KAAK,QAAQ;EACb,KAAK,YAAY,KAAK;EACtB,KAAK,IAAI,KAAK;AAEd,SAAO,KAAK,WAAW,EAAE,OAAO,QAAS,EAA6B;CACvE;CA0BD,qBAIES,SAKAC,SAMI;AACJ,SAAO,eAAe,KAAK,OAAO,UAAU;GAC1C,MAAM,UAAU,MAAM,KAAK,OAAO,MAAM;AACxC,OAAI,QAAQ,aAAa,IAAI,KAC3B,QAAO,QAAQ,WAAW,GAAG;AAE/B,OAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,KAAK,MAAM,QAAQ,QAAQ;AAEpC,SAAM,IAAI,MAAM;EACjB,EAAC;CACH;AACF"}
|