@langchain/core 1.0.0-alpha.6 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +131 -0
- package/LICENSE +6 -6
- package/README.md +2 -23
- package/dist/agents.d.ts.map +1 -1
- package/dist/caches/base.d.cts.map +1 -1
- package/dist/callbacks/base.d.cts.map +1 -1
- package/dist/callbacks/base.d.ts.map +1 -1
- package/dist/callbacks/manager.cjs +9 -64
- package/dist/callbacks/manager.cjs.map +1 -1
- package/dist/callbacks/manager.d.cts +1 -23
- package/dist/callbacks/manager.d.cts.map +1 -1
- package/dist/callbacks/manager.d.ts +1 -23
- package/dist/callbacks/manager.d.ts.map +1 -1
- package/dist/callbacks/manager.js +10 -63
- package/dist/callbacks/manager.js.map +1 -1
- package/dist/chat_history.cjs +0 -4
- package/dist/chat_history.cjs.map +1 -1
- package/dist/chat_history.d.cts +1 -3
- package/dist/chat_history.d.cts.map +1 -1
- package/dist/chat_history.d.ts +1 -3
- package/dist/chat_history.d.ts.map +1 -1
- package/dist/chat_history.js +0 -4
- package/dist/chat_history.js.map +1 -1
- package/dist/document_loaders/base.cjs +1 -13
- package/dist/document_loaders/base.cjs.map +1 -1
- package/dist/document_loaders/base.d.cts +0 -9
- package/dist/document_loaders/base.d.cts.map +1 -1
- package/dist/document_loaders/base.d.ts +0 -9
- package/dist/document_loaders/base.d.ts.map +1 -1
- package/dist/document_loaders/base.js +1 -13
- package/dist/document_loaders/base.js.map +1 -1
- package/dist/document_loaders/langsmith.d.cts +1 -1
- package/dist/language_models/base.cjs.map +1 -1
- package/dist/language_models/base.d.cts +0 -16
- package/dist/language_models/base.d.cts.map +1 -1
- package/dist/language_models/base.d.ts +0 -16
- package/dist/language_models/base.d.ts.map +1 -1
- package/dist/language_models/base.js.map +1 -1
- package/dist/language_models/chat_models.cjs +10 -83
- package/dist/language_models/chat_models.cjs.map +1 -1
- package/dist/language_models/chat_models.d.cts +21 -55
- package/dist/language_models/chat_models.d.cts.map +1 -1
- package/dist/language_models/chat_models.d.ts +21 -55
- package/dist/language_models/chat_models.d.ts.map +1 -1
- package/dist/language_models/chat_models.js +11 -83
- package/dist/language_models/chat_models.js.map +1 -1
- package/dist/language_models/llms.cjs +0 -56
- package/dist/language_models/llms.cjs.map +1 -1
- package/dist/language_models/llms.d.cts +1 -43
- package/dist/language_models/llms.d.cts.map +1 -1
- package/dist/language_models/llms.d.ts +1 -43
- package/dist/language_models/llms.d.ts.map +1 -1
- package/dist/language_models/llms.js +0 -56
- package/dist/language_models/llms.js.map +1 -1
- package/dist/load/import_map.cjs +1 -7
- package/dist/load/import_map.cjs.map +1 -1
- package/dist/load/import_map.js +1 -7
- package/dist/load/import_map.js.map +1 -1
- package/dist/messages/ai.cjs +4 -0
- package/dist/messages/ai.cjs.map +1 -1
- package/dist/messages/ai.js +4 -0
- package/dist/messages/ai.js.map +1 -1
- package/dist/messages/block_translators/anthropic.cjs +191 -144
- package/dist/messages/block_translators/anthropic.cjs.map +1 -1
- package/dist/messages/block_translators/anthropic.js +191 -144
- package/dist/messages/block_translators/anthropic.js.map +1 -1
- package/dist/messages/block_translators/index.cjs +2 -2
- package/dist/messages/block_translators/index.cjs.map +1 -1
- package/dist/messages/block_translators/index.js +4 -4
- package/dist/messages/block_translators/index.js.map +1 -1
- package/dist/messages/block_translators/openai.cjs +78 -22
- package/dist/messages/block_translators/openai.cjs.map +1 -1
- package/dist/messages/block_translators/openai.js +78 -22
- package/dist/messages/block_translators/openai.js.map +1 -1
- package/dist/messages/content/tools.cjs +1 -5
- package/dist/messages/content/tools.cjs.map +1 -1
- package/dist/messages/content/tools.d.cts +1 -85
- package/dist/messages/content/tools.d.cts.map +1 -1
- package/dist/messages/content/tools.d.ts +1 -85
- package/dist/messages/content/tools.d.ts.map +1 -1
- package/dist/messages/content/tools.js +1 -5
- package/dist/messages/content/tools.js.map +1 -1
- package/dist/messages/metadata.cjs.map +1 -1
- package/dist/messages/metadata.d.cts +3 -0
- package/dist/messages/metadata.d.cts.map +1 -1
- package/dist/messages/metadata.d.ts +3 -0
- package/dist/messages/metadata.d.ts.map +1 -1
- package/dist/messages/metadata.js.map +1 -1
- package/dist/messages/tool.cjs +2 -0
- package/dist/messages/tool.cjs.map +1 -1
- package/dist/messages/tool.d.cts +2 -0
- package/dist/messages/tool.d.cts.map +1 -1
- package/dist/messages/tool.d.ts +2 -0
- package/dist/messages/tool.d.ts.map +1 -1
- package/dist/messages/tool.js +2 -0
- package/dist/messages/tool.js.map +1 -1
- package/dist/output_parsers/json.cjs +5 -0
- package/dist/output_parsers/json.cjs.map +1 -1
- package/dist/output_parsers/json.d.cts +2 -0
- package/dist/output_parsers/json.d.cts.map +1 -1
- package/dist/output_parsers/json.d.ts +2 -0
- package/dist/output_parsers/json.d.ts.map +1 -1
- package/dist/output_parsers/json.js +5 -0
- package/dist/output_parsers/json.js.map +1 -1
- package/dist/prompts/base.cjs +0 -36
- package/dist/prompts/base.cjs.map +1 -1
- package/dist/prompts/base.d.cts +0 -16
- package/dist/prompts/base.d.cts.map +1 -1
- package/dist/prompts/base.d.ts +0 -16
- package/dist/prompts/base.d.ts.map +1 -1
- package/dist/prompts/base.js +0 -36
- package/dist/prompts/base.js.map +1 -1
- package/dist/prompts/chat.cjs +1 -5
- package/dist/prompts/chat.cjs.map +1 -1
- package/dist/prompts/chat.d.cts +1 -4
- package/dist/prompts/chat.d.cts.map +1 -1
- package/dist/prompts/chat.d.ts +1 -4
- package/dist/prompts/chat.d.ts.map +1 -1
- package/dist/prompts/chat.js +1 -5
- package/dist/prompts/chat.js.map +1 -1
- package/dist/prompts/dict.d.cts +1 -1
- package/dist/prompts/dict.d.ts +1 -1
- package/dist/prompts/few_shot.d.cts +2 -2
- package/dist/prompts/few_shot.d.ts +2 -2
- package/dist/prompts/image.d.cts +1 -1
- package/dist/prompts/image.d.ts +1 -1
- package/dist/prompts/index.d.cts +2 -2
- package/dist/prompts/index.d.ts +2 -2
- package/dist/prompts/pipeline.d.cts +1 -1
- package/dist/prompts/pipeline.d.ts +1 -1
- package/dist/prompts/prompt.d.cts +2 -2
- package/dist/prompts/prompt.d.ts +2 -2
- package/dist/retrievers/index.cjs +3 -18
- package/dist/retrievers/index.cjs.map +1 -1
- package/dist/retrievers/index.d.cts +2 -27
- package/dist/retrievers/index.d.cts.map +1 -1
- package/dist/retrievers/index.d.ts +2 -27
- package/dist/retrievers/index.d.ts.map +1 -1
- package/dist/retrievers/index.js +3 -18
- package/dist/retrievers/index.js.map +1 -1
- package/dist/runnables/base.cjs +24 -63
- package/dist/runnables/base.cjs.map +1 -1
- package/dist/runnables/base.d.cts +10 -39
- package/dist/runnables/base.d.cts.map +1 -1
- package/dist/runnables/base.d.ts +10 -39
- package/dist/runnables/base.d.ts.map +1 -1
- package/dist/runnables/base.js +24 -63
- package/dist/runnables/base.js.map +1 -1
- package/dist/runnables/graph.cjs +1 -1
- package/dist/runnables/graph.cjs.map +1 -1
- package/dist/runnables/graph.js +2 -2
- package/dist/runnables/graph.js.map +1 -1
- package/dist/runnables/graph_mermaid.cjs +1 -10
- package/dist/runnables/graph_mermaid.cjs.map +1 -1
- package/dist/runnables/graph_mermaid.js +1 -10
- package/dist/runnables/graph_mermaid.js.map +1 -1
- package/dist/runnables/history.cjs +1 -1
- package/dist/runnables/history.cjs.map +1 -1
- package/dist/runnables/history.d.cts +2 -2
- package/dist/runnables/history.d.cts.map +1 -1
- package/dist/runnables/history.d.ts +2 -2
- package/dist/runnables/history.d.ts.map +1 -1
- package/dist/runnables/history.js +1 -1
- package/dist/runnables/history.js.map +1 -1
- package/dist/stores.cjs.map +1 -1
- package/dist/stores.d.cts +3 -29
- package/dist/stores.d.cts.map +1 -1
- package/dist/stores.d.ts +3 -29
- package/dist/stores.d.ts.map +1 -1
- package/dist/stores.js.map +1 -1
- package/dist/tools/index.cjs +12 -4
- package/dist/tools/index.cjs.map +1 -1
- package/dist/tools/index.js +12 -4
- package/dist/tools/index.js.map +1 -1
- package/dist/tools/types.cjs.map +1 -1
- package/dist/tools/types.d.cts +4 -0
- package/dist/tools/types.d.cts.map +1 -1
- package/dist/tools/types.d.ts +4 -0
- package/dist/tools/types.d.ts.map +1 -1
- package/dist/tools/types.js.map +1 -1
- package/dist/tracers/base.cjs +1 -1
- package/dist/tracers/base.cjs.map +1 -1
- package/dist/tracers/base.d.cts +1 -1
- package/dist/tracers/base.js +2 -2
- package/dist/tracers/base.js.map +1 -1
- package/dist/tracers/log_stream.d.cts +1 -1
- package/dist/tracers/log_stream.d.ts +1 -1
- package/dist/tracers/tracer_langchain.cjs +1 -0
- package/dist/tracers/tracer_langchain.cjs.map +1 -1
- package/dist/tracers/tracer_langchain.d.cts +2 -2
- package/dist/tracers/tracer_langchain.js +1 -0
- package/dist/tracers/tracer_langchain.js.map +1 -1
- package/dist/utils/env.cjs +1 -9
- package/dist/utils/env.cjs.map +1 -1
- package/dist/utils/env.d.cts +2 -6
- package/dist/utils/env.d.cts.map +1 -1
- package/dist/utils/env.d.ts +2 -6
- package/dist/utils/env.d.ts.map +1 -1
- package/dist/utils/env.js +2 -9
- package/dist/utils/env.js.map +1 -1
- package/dist/utils/testing/message_history.cjs +1 -1
- package/dist/utils/testing/message_history.cjs.map +1 -1
- package/dist/utils/testing/message_history.d.cts +1 -1
- package/dist/utils/testing/message_history.d.cts.map +1 -1
- package/dist/utils/testing/message_history.d.ts +1 -1
- package/dist/utils/testing/message_history.d.ts.map +1 -1
- package/dist/utils/testing/message_history.js +1 -1
- package/dist/utils/testing/message_history.js.map +1 -1
- package/dist/utils/types/index.cjs +6 -0
- package/dist/utils/types/index.d.cts +2 -2
- package/dist/utils/types/index.d.ts +2 -2
- package/dist/utils/types/index.js +5 -2
- package/dist/utils/types/zod.cjs +23 -0
- package/dist/utils/types/zod.cjs.map +1 -1
- package/dist/utils/types/zod.d.cts +11 -1
- package/dist/utils/types/zod.d.cts.map +1 -1
- package/dist/utils/types/zod.d.ts +11 -1
- package/dist/utils/types/zod.d.ts.map +1 -1
- package/dist/utils/types/zod.js +21 -1
- package/dist/utils/types/zod.js.map +1 -1
- package/package.json +121 -154
- package/dist/runnables/remote.cjs +0 -399
- package/dist/runnables/remote.cjs.map +0 -1
- package/dist/runnables/remote.d.cts +0 -73
- package/dist/runnables/remote.d.cts.map +0 -1
- package/dist/runnables/remote.d.ts +0 -73
- package/dist/runnables/remote.d.ts.map +0 -1
- package/dist/runnables/remote.js +0 -393
- package/dist/runnables/remote.js.map +0 -1
- package/dist/tracers/initialize.cjs +0 -46
- package/dist/tracers/initialize.cjs.map +0 -1
- package/dist/tracers/initialize.d.cts +0 -26
- package/dist/tracers/initialize.d.cts.map +0 -1
- package/dist/tracers/initialize.d.ts +0 -26
- package/dist/tracers/initialize.d.ts.map +0 -1
- package/dist/tracers/initialize.js +0 -39
- package/dist/tracers/initialize.js.map +0 -1
- package/dist/tracers/tracer_langchain_v1.cjs +0 -168
- package/dist/tracers/tracer_langchain_v1.cjs.map +0 -1
- package/dist/tracers/tracer_langchain_v1.d.cts +0 -64
- package/dist/tracers/tracer_langchain_v1.d.cts.map +0 -1
- package/dist/tracers/tracer_langchain_v1.d.ts +0 -64
- package/dist/tracers/tracer_langchain_v1.d.ts.map +0 -1
- package/dist/tracers/tracer_langchain_v1.js +0 -162
- package/dist/tracers/tracer_langchain_v1.js.map +0 -1
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { BaseMessage } from "../messages/base.js";
|
|
2
1
|
import { Generation, GenerationChunk, LLMResult } from "../outputs.js";
|
|
3
2
|
import { BaseCache } from "../caches/base.js";
|
|
4
3
|
import { BaseCallbackConfig, CallbackManagerForLLMRun, Callbacks } from "../callbacks/manager.js";
|
|
@@ -11,12 +10,7 @@ type SerializedLLM = {
|
|
|
11
10
|
_model: string;
|
|
12
11
|
_type: string;
|
|
13
12
|
} & Record<string, any>;
|
|
14
|
-
interface BaseLLMParams extends BaseLanguageModelParams {
|
|
15
|
-
/**
|
|
16
|
-
* @deprecated Use `maxConcurrency` instead
|
|
17
|
-
*/
|
|
18
|
-
concurrency?: number;
|
|
19
|
-
}
|
|
13
|
+
interface BaseLLMParams extends BaseLanguageModelParams {}
|
|
20
14
|
interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}
|
|
21
15
|
/**
|
|
22
16
|
* LLM Wrapper. Takes in a prompt (or prompts) and returns a string.
|
|
@@ -26,10 +20,6 @@ declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMC
|
|
|
26
20
|
ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, "signal" | "timeout" | "maxConcurrency">>;
|
|
27
21
|
// Only ever instantiated in main LangChain
|
|
28
22
|
lc_namespace: string[];
|
|
29
|
-
constructor({
|
|
30
|
-
concurrency,
|
|
31
|
-
...rest
|
|
32
|
-
}: BaseLLMParams);
|
|
33
23
|
/**
|
|
34
24
|
* This method takes an input and options, and returns a string. It
|
|
35
25
|
* converts the input to a prompt value and generates a result based on
|
|
@@ -87,33 +77,6 @@ declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMC
|
|
|
87
77
|
* Run the LLM on the given prompts and input, handling caching.
|
|
88
78
|
*/
|
|
89
79
|
generate(prompts: string[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;
|
|
90
|
-
/**
|
|
91
|
-
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
|
|
92
|
-
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
|
|
93
|
-
*/
|
|
94
|
-
call(prompt: string, options?: string[] | CallOptions, callbacks?: Callbacks): Promise<string>;
|
|
95
|
-
/**
|
|
96
|
-
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
|
|
97
|
-
*
|
|
98
|
-
* This method is similar to `call`, but it's used for making predictions
|
|
99
|
-
* based on the input text.
|
|
100
|
-
* @param text Input text for the prediction.
|
|
101
|
-
* @param options Options for the LLM call.
|
|
102
|
-
* @param callbacks Callbacks for the LLM call.
|
|
103
|
-
* @returns A prediction based on the input text.
|
|
104
|
-
*/
|
|
105
|
-
predict(text: string, options?: string[] | CallOptions, callbacks?: Callbacks): Promise<string>;
|
|
106
|
-
/**
|
|
107
|
-
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
|
|
108
|
-
*
|
|
109
|
-
* This method takes a list of messages, options, and callbacks, and
|
|
110
|
-
* returns a predicted message.
|
|
111
|
-
* @param messages A list of messages for the prediction.
|
|
112
|
-
* @param options Options for the LLM call.
|
|
113
|
-
* @param callbacks Callbacks for the LLM call.
|
|
114
|
-
* @returns A predicted message based on the list of messages.
|
|
115
|
-
*/
|
|
116
|
-
predictMessages(messages: BaseMessage[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<BaseMessage>;
|
|
117
80
|
/**
|
|
118
81
|
* Get the identifying parameters of the LLM.
|
|
119
82
|
*/
|
|
@@ -123,11 +86,6 @@ declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMC
|
|
|
123
86
|
* Return the string type key uniquely identifying this class of LLM.
|
|
124
87
|
*/
|
|
125
88
|
abstract _llmType(): string;
|
|
126
|
-
/**
|
|
127
|
-
* @deprecated
|
|
128
|
-
* Return a json-like object representing this LLM.
|
|
129
|
-
*/
|
|
130
|
-
serialize(): SerializedLLM;
|
|
131
89
|
_modelType(): string;
|
|
132
90
|
}
|
|
133
91
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llms.d.ts","names":["BaseMessage","BasePromptValueInterface","LLMResult","Generation","GenerationChunk","BaseCallbackConfig","CallbackManagerForLLMRun","Callbacks","BaseLanguageModel","BaseLanguageModelCallOptions","BaseLanguageModelInput","BaseLanguageModelParams","RunnableConfig","BaseCache","SerializedLLM","Record","BaseLLMParams","BaseLLMCallOptions","BaseLLM","CallOptions","Exclude","Omit","concurrency","Promise","AsyncGenerator","Partial","prompts","cache","llmStringKey","parsedOptions","handledOptions","runId","LLM"],"sources":["../../src/language_models/llms.d.ts"],"sourcesContent":["import { type BaseMessage } from \"../messages/index.js\";\nimport type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport { type LLMResult, type Generation, GenerationChunk } from \"../outputs.js\";\nimport { type BaseCallbackConfig, type CallbackManagerForLLMRun, type Callbacks } from \"../callbacks/manager.js\";\nimport { BaseLanguageModel, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/base.js\";\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n} & Record<string, any>;\nexport interface BaseLLMParams extends BaseLanguageModelParams {\n /**\n * @deprecated Use `maxConcurrency` instead\n */\n concurrency?: number;\n}\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {\n}\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">>;\n // Only ever instantiated in main LangChain\n lc_namespace: string[];\n constructor({ concurrency, ...rest }: BaseLLMParams);\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<string>;\n // eslint-disable-next-line require-yield\n _streamResponseChunks(_input: string, _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;\n protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this[\"ParsedCallOptions\"]];\n _streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<string>;\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n generatePrompt(promptValues: BasePromptValueInterface[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any;\n _flattenLLMResult(llmResult: LLMResult): LLMResult[];\n /** @ignore */\n _generateUncached(prompts: string[], parsedOptions: this[\"ParsedCallOptions\"], handledOptions: BaseCallbackConfig, startedRunManagers?: CallbackManagerForLLMRun[]): Promise<LLMResult>;\n _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }>;\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n generate(prompts: string[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n * Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.\n */\n call(prompt: string, options?: string[] | CallOptions, callbacks?: Callbacks): Promise<string>;\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * This method is similar to `call`, but it's used for making predictions\n * based on the input text.\n * @param text Input text for the prediction.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns A prediction based on the input text.\n */\n predict(text: string, options?: string[] | CallOptions, callbacks?: Callbacks): Promise<string>;\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * This method takes a list of messages, options, and callbacks, and\n * returns a predicted message.\n * @param messages A list of messages for the prediction.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns A predicted message based on the list of messages.\n */\n predictMessages(messages: BaseMessage[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<BaseMessage>;\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any>;\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n /**\n * @deprecated\n * Return a json-like object representing this LLM.\n */\n serialize(): SerializedLLM;\n _modelType(): string;\n}\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport declare abstract class LLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(prompt: string, options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<string>;\n _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n}\n"],"mappings":";;;;;;;;;KAOYc,aAAAA;;EAAAA,KAAAA,EAAAA,MAAAA;AAIZ,CAAA,GADIC,MACaC,CAAAA,MAAAA,EAAAA,GAAa,CAAA;AAMbC,UANAD,aAAAA,SAAsBL,uBAMKF,CAAAA;EAKdS;;;EAA8C,WAAGD,CAAAA,EAAAA,MAAAA;;AAEnDE,UAPXF,kBAAAA,SAA2BR,4BAOhBU,CAAAA;;;;AAGcH,uBALZE,OAKYF,CAAAA,oBALgBC,kBAKhBD,GALqCC,kBAKrCD,CAAAA,SALiER,iBAKjEQ,CAAAA,MAAAA,EAL2FG,WAK3FH,CAAAA,CAAAA;EAAa;EASf,iBAAYG,EAZ7BE,IAY6BF,CAZxBA,WAYwBA,EAZXC,OAYWD,CAAAA,MAZGP,cAYHO,EAAAA,QAAAA,GAAAA,SAAAA,GAAAA,gBAAAA,CAAAA,CAAAA;EAAW;EAAU,YAEoBb,EAAAA,MAAAA,EAAAA;EAAwB,WAAkBF,CAAAA;IAAAA,WAAAA;IAAAA,GAAAA;EAAAA,CAAAA,EAX7FY,aAW6FZ;EAAe;;;;;;;;EAW7F,MAAyBe,CAAAA,KAAAA,EAbhET,sBAagES,EAAAA,OAAAA,CAAAA,EAb9BA,WAa8BA,CAAAA,EAbhBI,OAagBJ,CAAAA,MAAAA,CAAAA;EAAW;EAAuB,qBAAWjB,CAAAA,MAAAA,EAAAA,MAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EAXlCI,wBAWkCJ,CAAAA,EAXPsB,cAWOtB,CAXQE,eAWRF,CAAAA;EAAS,UAAjBqB,4CAAAA,CAAAA,OAAAA,CAAAA,EAVlDE,OAUkDF,CAV1CJ,WAU0CI,CAAAA,CAAAA,EAAAA,CAV1BX,cAU0BW,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA;EAAO,eAInCjB,CAAAA,KAAAA,EAbhEI,sBAagEJ,EAAAA,OAAAA,CAAAA,EAb9Ba,WAa8Bb,CAAAA,EAbhBkB,cAagBlB,CAAAA,MAAAA,CAAAA;EAAwB;;;;;;;;EAQ6D,cAC1JoB,CAAAA,YAAAA,EAbWzB,wBAaXyB,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GAb4DP,WAa5DO,EAAAA,SAAAA,CAAAA,EAbqFnB,SAarFmB,CAAAA,EAbiGH,OAajGG,CAbyGxB,SAazGwB,CAAAA;EAAO;;;EAAoC,SAAEI,SAAAA,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EATwBxB,wBASxBwB,CAAAA,EATmDP,OASnDO,CAT2D5B,SAS3D4B,CAAAA;EAAc;;;EAEzD;EAIc,gBAEtB5B,CAAAA,QAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAAA,GAAAA;EAAS,iBAEII,CAAAA,SAAAA,EAbIJ,SAaJI,CAAAA,EAbgBJ,SAahBI,EAAAA;EAAwB;EAFtC,iBAOsCa,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,aAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,cAAAA,EAhB8Cd,kBAgB9Cc,EAAAA,kBAAAA,CAAAA,EAhBuFb,wBAgBvFa,EAAAA,CAAAA,EAhBoHI,OAgBpHJ,CAhB4HjB,SAgB5HiB,CAAAA;EAAW,eAAcZ,CAAAA;IAAAA,OAAAA;IAAAA,KAAAA;IAAAA,YAAAA;IAAAA,aAAAA;IAAAA,cAAAA;IAAAA;EAgBG,CAhBHA,EAAAA;IAAoBL,OAAAA,EAAAA,MAAAA,EAAAA;IAARqB,KAAAA,EAb3EV,SAa2EU,CAbjEpB,UAaiEoB,EAAAA,CAAAA;IAK5CJ,YAAAA,EAAAA,MAAAA;IAAyBZ;IAAYgB,aAAAA,EAAAA,GAAAA;IAWpCJ,cAAAA,EAzBvBP,cAyBuBO;IAAyBZ,KAAAA,CAAAA,EAAAA,MAAAA;EAAS,CAAA,CAAA,EAvBzEgB,OAuB4EA,CAvBpErB,SAuBoEqB,GAAAA;IAWtDvB,oBAAAA,EAAAA,MAAAA,EAAAA;IAAoCmB,kBAAAA,CAAAA,EAhCrCb,wBAgCqCa,EAAAA;EAAW,CAAA,CAAA;EAAuB;;;EAKpE,QASfL,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GAzCoCK,WAyCpCL,EAAAA,SAAAA,CAAAA,EAzC6DP,SAyC7DO,CAAAA,EAzCyES,OAyCzET,CAzCiFZ,SAyCjFY,CAAAA;EAAa;AAhG8F;AA0G5H;;EAAiC,IAAqBG,CAAAA,MAAAA,EAAAA,MAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GA9CRE,WA8CQF,EAAAA,SAAAA,CAAAA,EA9CiBV,SA8CjBU,CAAAA,EA9C6BM,OA8C7BN,CAAAA,MAAAA,CAAAA;EAAkB;;;;;;;;AAAsC;;6CAnC/DE,yBAAyBZ,YAAYgB;;;;;;;;;;;4BAWtDvB,oCAAoCmB,yBAAyBZ,YAAYgB,QAAQvB;;;;;wBAKrFe;;;;;;;;;eASTD;;;;;;;;;;uBAUakB,wBAAwBf,qBAAqBA,4BAA4BC,QAAQC;;;;kFAI3Bb,2BAA2BiB;gFAC7BjB,2BAA2BiB,QAAQrB"}
|
|
1
|
+
{"version":3,"file":"llms.d.ts","names":["BasePromptValueInterface","LLMResult","Generation","GenerationChunk","BaseCallbackConfig","CallbackManagerForLLMRun","Callbacks","BaseLanguageModel","BaseLanguageModelCallOptions","BaseLanguageModelInput","BaseLanguageModelParams","RunnableConfig","BaseCache","SerializedLLM","Record","BaseLLMParams","BaseLLMCallOptions","BaseLLM","CallOptions","Exclude","Omit","Promise","AsyncGenerator","Partial","prompts","cache","llmStringKey","parsedOptions","handledOptions","runId","LLM"],"sources":["../../src/language_models/llms.d.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport { type LLMResult, type Generation, GenerationChunk } from \"../outputs.js\";\nimport { type BaseCallbackConfig, type CallbackManagerForLLMRun, type Callbacks } from \"../callbacks/manager.js\";\nimport { BaseLanguageModel, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/base.js\";\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n} & Record<string, any>;\nexport interface BaseLLMParams extends BaseLanguageModelParams {\n}\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {\n}\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n ParsedCallOptions: Omit<CallOptions, Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">>;\n // Only ever instantiated in main LangChain\n lc_namespace: string[];\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<string>;\n // eslint-disable-next-line require-yield\n _streamResponseChunks(_input: string, _options: this[\"ParsedCallOptions\"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;\n protected _separateRunnableConfigFromCallOptionsCompat(options?: Partial<CallOptions>): [RunnableConfig, this[\"ParsedCallOptions\"]];\n _streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<string>;\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n generatePrompt(promptValues: BasePromptValueInterface[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any;\n _flattenLLMResult(llmResult: LLMResult): LLMResult[];\n /** @ignore */\n _generateUncached(prompts: string[], parsedOptions: this[\"ParsedCallOptions\"], handledOptions: BaseCallbackConfig, startedRunManagers?: CallbackManagerForLLMRun[]): Promise<LLMResult>;\n _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }>;\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n generate(prompts: string[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any>;\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n _modelType(): string;\n}\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport declare abstract class LLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(prompt: string, options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<string>;\n _generate(prompts: string[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): Promise<LLMResult>;\n}\n"],"mappings":";;;;;;;;KAMYa,aAAAA;;EAAAA,KAAAA,EAAAA,MAAAA;AAIZ,CAAA,GADIC,MACaC,CAAAA,MAAAA,EAAAA,GAAa,CAAA;AAEbC,UAFAD,aAAAA,SAAsBL,uBAEKF,CAAAA,CAK5C;AAAqC,UALpBQ,kBAAAA,SAA2BR,4BAKP,CAAA;;;;AAEkBG,uBAFzBM,OAEyBN,CAAAA,oBAFGK,kBAEHL,GAFwBK,kBAExBL,CAAAA,SAFoDJ,iBAEpDI,CAAAA,MAAAA,EAF8EO,WAE9EP,CAAAA,CAAAA;EAAc;EAArB,iBAAzBS,EAAAA,IAAAA,CAAKF,WAALE,EAAkBD,OAAlBC,CAAAA,MAAgCT,cAAhCS,EAAAA,QAAAA,GAAAA,SAAAA,GAAAA,gBAAAA,CAAAA,CAAAA;EAAI;EAWa,YAAYF,EAAAA,MAAAA,EAAAA;EAAW;;;;;;;;EAId,MAAYA,CAAAA,KAAAA,EAJ3CT,sBAI2CS,EAAAA,OAAAA,CAAAA,EAJTA,WAISA,CAAAA,EAJKG,OAILH,CAAAA,MAAAA,CAAAA;EAAW;EAAiB,qBASxDlB,CAAAA,MAAAA,EAAAA,MAAAA,EAAAA,QAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,WAAAA,CAAAA,EAX4DK,wBAW5DL,CAAAA,EAXuFsB,cAWvFtB,CAXsGG,eAWtGH,CAAAA;EAAwB,UAAyBkB,4CAAAA,CAAAA,OAAAA,CAAAA,EAVbK,OAUaL,CAVLA,WAUKA,CAAAA,CAAAA,EAAAA,CAVWP,cAUXO,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA;EAAW,eAAcZ,CAAAA,KAAAA,EAThFG,sBASgFH,EAAAA,OAAAA,CAAAA,EAT9CY,WAS8CZ,CAAAA,EAThCgB,cASgChB,CAAAA,MAAAA,CAAAA;EAAS;;;;;;;;EAYC,cAAuBD,CAAAA,YAAAA,EAZ3GL,wBAY2GK,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GAZ1Da,WAY0Db,EAAAA,SAAAA,CAAAA,EAZjCC,SAYiCD,CAAAA,EAZrBgB,OAYqBhB,CAZbJ,SAYaI,CAAAA;EAAwB;;;EACvI,SAAEoB,SAAAA,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAT4DpB,wBAS5DoB,CAAAA,EATuFJ,OASvFI,CAT+FxB,SAS/FwB,CAAAA;EAAK;;;EAA6C;EAAO,gBAE/DvB,CAAAA,QAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAAA,GAAAA;EAAU,iBAApBU,CAAAA,SAAAA,EALkBX,SAKlBW,CAAAA,EAL8BX,SAK9BW,EAAAA;EAAS;EAIc,iBAEtBX,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,aAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,cAAAA,EATmFG,kBASnFH,EAAAA,kBAAAA,CAAAA,EAT4HI,wBAS5HJ,EAAAA,CAAAA,EATyJoB,OASzJpB,CATiKA,SASjKA,CAAAA;EAAS,eAEII,CAAAA;IAAAA,OAAAA;IAAAA,KAAAA;IAAAA,YAAAA;IAAAA,aAAAA;IAAAA,cAAAA;IAAAA;EAjD+F,CAiD/FA,EAAAA;IAFrBgB,OAAAA,EAAAA,MAAAA,EAAAA;IAO6CH,KAAAA,EAbtCN,SAasCM,CAb5BhB,UAa4BgB,EAAAA,CAAAA;IAAyBZ,YAAAA,EAAAA,MAAAA;IAAoBL;IAARoB,aAAAA,EAAAA,GAAAA;IAKhEP,cAAAA,EAdFH,cAcEG;IA3DiFP,KAAAA,CAAAA,EAAAA,MAAAA;EAAiB,CAAA,CAAA,EA+CpHc,OA/CoH,CA+C5GpB,SA/C4G,GAAA;IAyE9F6B,oBAAG,EAAA,MAAA,EAAA;IAAA,kBAAA,CAAA,EAxBJzB,wBAwBI,EAAA;EAAA,CAAA,CAAA;EAAuC;;;EAIoC,QAAGgB,CAAAA,OAAAA,EAAAA,MAAAA,EAAAA,EAAAA,OAAAA,CAAAA,EAAAA,MAAAA,EAAAA,GAvB1DH,WAuB0DG,EAAAA,SAAAA,CAAAA,EAvBjCf,SAuBiCe,CAAAA,EAvBrBA,OAuBqBA,CAvBbpB,SAuBaoB,CAAAA;EAAO;;;EACF;EALN,kBAAA,CAAA,CAAA,EAdpFP,MAcoF,CAAA,MAAA,EAAA,GAAA,CAAA;;;;;;;;;;;;;;uBAAhFgB,wBAAwBd,qBAAqBA,4BAA4BC,QAAQC;;;;kFAI3Bb,2BAA2BgB;gFAC7BhB,2BAA2BgB,QAAQpB"}
|
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
import { __export } from "../_virtual/rolldown_runtime.js";
|
|
2
|
-
import { AIMessage } from "../messages/ai.js";
|
|
3
|
-
import { getBufferString } from "../messages/utils.js";
|
|
4
2
|
import { callbackHandlerPrefersStreaming } from "../callbacks/base.js";
|
|
5
3
|
import { CallbackManager } from "../callbacks/manager.js";
|
|
6
4
|
import { concat } from "../utils/stream.js";
|
|
7
5
|
import { GenerationChunk, RUN_KEY } from "../outputs.js";
|
|
8
|
-
import "../messages/index.js";
|
|
9
6
|
import { BaseLanguageModel } from "./base.js";
|
|
10
7
|
|
|
11
8
|
//#region src/language_models/llms.ts
|
|
@@ -23,12 +20,6 @@ var BaseLLM = class BaseLLM extends BaseLanguageModel {
|
|
|
23
20
|
"llms",
|
|
24
21
|
this._llmType()
|
|
25
22
|
];
|
|
26
|
-
constructor({ concurrency,...rest }) {
|
|
27
|
-
super(concurrency ? {
|
|
28
|
-
maxConcurrency: concurrency,
|
|
29
|
-
...rest
|
|
30
|
-
} : rest);
|
|
31
|
-
}
|
|
32
23
|
/**
|
|
33
24
|
* This method takes an input and options, and returns a string. It
|
|
34
25
|
* converts the input to a prompt value and generates a result based on
|
|
@@ -246,58 +237,11 @@ var BaseLLM = class BaseLLM extends BaseLanguageModel {
|
|
|
246
237
|
};
|
|
247
238
|
}
|
|
248
239
|
/**
|
|
249
|
-
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
|
|
250
|
-
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
|
|
251
|
-
*/
|
|
252
|
-
async call(prompt, options, callbacks) {
|
|
253
|
-
const { generations } = await this.generate([prompt], options, callbacks);
|
|
254
|
-
return generations[0][0].text;
|
|
255
|
-
}
|
|
256
|
-
/**
|
|
257
|
-
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
|
|
258
|
-
*
|
|
259
|
-
* This method is similar to `call`, but it's used for making predictions
|
|
260
|
-
* based on the input text.
|
|
261
|
-
* @param text Input text for the prediction.
|
|
262
|
-
* @param options Options for the LLM call.
|
|
263
|
-
* @param callbacks Callbacks for the LLM call.
|
|
264
|
-
* @returns A prediction based on the input text.
|
|
265
|
-
*/
|
|
266
|
-
async predict(text, options, callbacks) {
|
|
267
|
-
return this.call(text, options, callbacks);
|
|
268
|
-
}
|
|
269
|
-
/**
|
|
270
|
-
* @deprecated Use .invoke() instead. Will be removed in 0.2.0.
|
|
271
|
-
*
|
|
272
|
-
* This method takes a list of messages, options, and callbacks, and
|
|
273
|
-
* returns a predicted message.
|
|
274
|
-
* @param messages A list of messages for the prediction.
|
|
275
|
-
* @param options Options for the LLM call.
|
|
276
|
-
* @param callbacks Callbacks for the LLM call.
|
|
277
|
-
* @returns A predicted message based on the list of messages.
|
|
278
|
-
*/
|
|
279
|
-
async predictMessages(messages, options, callbacks) {
|
|
280
|
-
const text = getBufferString(messages);
|
|
281
|
-
const prediction = await this.call(text, options, callbacks);
|
|
282
|
-
return new AIMessage(prediction);
|
|
283
|
-
}
|
|
284
|
-
/**
|
|
285
240
|
* Get the identifying parameters of the LLM.
|
|
286
241
|
*/
|
|
287
242
|
_identifyingParams() {
|
|
288
243
|
return {};
|
|
289
244
|
}
|
|
290
|
-
/**
|
|
291
|
-
* @deprecated
|
|
292
|
-
* Return a json-like object representing this LLM.
|
|
293
|
-
*/
|
|
294
|
-
serialize() {
|
|
295
|
-
return {
|
|
296
|
-
...this._identifyingParams(),
|
|
297
|
-
_type: this._llmType(),
|
|
298
|
-
_model: this._modelType()
|
|
299
|
-
};
|
|
300
|
-
}
|
|
301
245
|
_modelType() {
|
|
302
246
|
return "base_llm";
|
|
303
247
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llms.js","names":["input: BaseLanguageModelInput","options?: CallOptions","_input: string","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","options?: Partial<CallOptions>","promptValues: BasePromptValueInterface[]","options?: string[] | CallOptions","callbacks?: Callbacks","prompts: string[]","_options?: this[\"ParsedCallOptions\"]","llmResult: LLMResult","llmResults: LLMResult[]","parsedOptions: this[\"ParsedCallOptions\"]","handledOptions: BaseCallbackConfig","startedRunManagers?: CallbackManagerForLLMRun[]","runManagers: CallbackManagerForLLMRun[] | undefined","output: LLMResult","flattenedOutputs: LLMResult[]","missingPromptIndices: number[]","generations: Generation[][]","result","parsedOptions: CallOptions | undefined","prompt: string","text: string","messages: BaseMessage[]","options: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun"],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import {\n AIMessage,\n type BaseMessage,\n getBufferString,\n} from \"../messages/index.js\";\nimport type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/base.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {\n /**\n * @deprecated Use `maxConcurrency` instead\n */\n concurrency?: number;\n}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n constructor({ concurrency, ...rest }: BaseLLMParams) {\n super(concurrency ? { maxConcurrency: concurrency, ...rest } : rest);\n }\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: CallOptions | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as CallOptions;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n * Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.\n */\n async call(\n prompt: string,\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<string> {\n const { generations } = await this.generate([prompt], options, callbacks);\n return generations[0][0].text;\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * This method is similar to `call`, but it's used for making predictions\n * based on the input text.\n * @param text Input text for the prediction.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns A prediction based on the input text.\n */\n async predict(\n text: string,\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<string> {\n return this.call(text, options, callbacks);\n }\n\n /**\n * @deprecated Use .invoke() instead. Will be removed in 0.2.0.\n *\n * This method takes a list of messages, options, and callbacks, and\n * returns a predicted message.\n * @param messages A list of messages for the prediction.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns A predicted message based on the list of messages.\n */\n async predictMessages(\n messages: BaseMessage[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<BaseMessage> {\n const text = getBufferString(messages);\n const prediction = await this.call(text, options, callbacks);\n return new AIMessage(prediction);\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n /**\n * @deprecated\n * Return a json-like object representing this LLM.\n */\n serialize(): SerializedLLM {\n return {\n ...this._identifyingParams(),\n _type: this._llmType(),\n _model: this._modelType(),\n };\n }\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA+CA,IAAsB,UAAtB,MAAsB,gBAEZ,kBAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;CAAC;CAErD,YAAY,EAAE,YAAa,GAAG,MAAqB,EAAE;EACnD,MAAM,cAAc;GAAE,gBAAgB;GAAa,GAAG;EAAM,IAAG,KAAK;CACrE;;;;;;;;;CAUD,MAAM,OACJA,OACAC,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;EAC7D,MAAM,SAAS,MAAM,KAAK,eACxB,CAAC,WAAY,GACb,SACA,SAAS,UACV;AACD,SAAO,OAAO,YAAY,GAAG,GAAG;CACjC;CAGD,OAAO,sBACLC,QACAC,UACAC,aACiC;AACjC,QAAM,IAAI,MAAM;CACjB;CAED,AAAU,6CACRC,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,YAAY,GACjC,MAAM,uCAAuC,QAAQ;EACtD,YAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,WAAyC;CAClE;CAED,OAAO,gBACLL,OACAC,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,uBAEjD,MAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;GACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,AAAC,GACnB,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAI,gBAAgB,EACnC,MAAM,GACP;AACD,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,YACH,aAAa;UAEb,aAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,UACxB,MAAM,MAAM;IAEf;GACF,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GACD,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,UAAW,CAAC,EAC5B,EAAC,CACH,CACF;EACF;CACF;;;;;;;;;CAUD,MAAM,eACJK,cACAC,SACAC,WACoB;EACpB,MAAMC,UAAoB,aAAa,IAAI,CAAC,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;CAClD;;;;CAeD,iBAAiBC,UAA2C;AAC1D,SAAO,CAAE;CACV;CAED,kBAAkBC,WAAmC;EACnD,MAAMC,aAA0B,CAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,GACR,WAAW,KAAK;IACd,aAAa,CAAC,OAAQ;IACtB,WAAW,UAAU;GACtB,EAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,CAAE;IAAE,IAC1C;IAEJ,WAAW,KAAK;KACd,aAAa,CAAC,OAAQ;KACtB;IACD,EAAC;GACH;EACF;AAED,SAAO;CACR;;CAGD,MAAM,kBACJH,SACAI,eACAC,gBACAC,oBACoB;EACpB,IAAIC;AACJ,MACE,uBAAuB,UACvB,mBAAmB,WAAW,QAAQ,QAEtC,cAAc;OACT;GACL,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY,QAAQ;GACrB;GACD,cAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,QACA,OACA,QACA,QACA,gBAAgB,QACjB;EACF;EAID,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtD,gCACD;EACD,IAAIC;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,QACjB,aAAa;QAEb,aAAa,OAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,OACjB,OAAM,IAAI,MAAM;GAElB,SAAS;IAAE,aAAa,CAAC,CAAC,UAAW,CAAC;IAAE,WAAW,CAAE;GAAE;GACvD,MAAM,cAAc,GAAG,aAAa,OAAO;EAC5C,SAAQ,GAAG;GACV,MAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;EACP;OACI;AACL,OAAI;IACF,SAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;GACxE,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GAED,MAAMC,mBAAgC,KAAK,kBAAkB,OAAO;GACpE,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;EACF;EACD,MAAM,SAAS,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,IAAI;EAI/D,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,SAAS,EAAE,OAAQ,IAAG;GAC7B,cAAc;EACf,EAAC;AACF,SAAO;CACR;CAED,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,OASD,EAKC;EACA,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY,QAAQ;EACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,QACA,OACA,QACA,QACA,gBAAgB,QACjB;EAGD,MAAMC,uBAAiC,CAAE;EACzC,MAAM,UAAU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,MACZ,qBAAqB,KAAK,MAAM;AAElC,UAAO;EACR,EAAC,CACH;EAID,MAAM,gBAAgB,QACnB,IAAI,CAAC,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;EAAQ,GAAE,CACtE,OACC,CAAC,EAAE,QAAQ,KACR,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAMC,cAA8B,CAAE;EACtC,MAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,YAAY,EAAE,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;IAC7B,YAAY,KAAK,OAAO,IAAI,CAACC,aAAW;KACtCA,SAAO,iBAAiB;MACtB,GAAGA,SAAO;MACV,YAAY,CAAE;KACf;AACD,YAAOA;IACR,EAAC;AACF,QAAI,OAAO,QACT,MAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,MAAO,EACtB,GACD,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;GACF,OAAM;IAEL,MAAM,YAAY,eAChB,cAAc,QACd,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;GAC5C;EACF,EAAC,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;EACrB;EAKD,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AAEF,SAAO;CACR;;;;CAKD,MAAM,SACJZ,SACAF,SACAC,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM;EAGlB,IAAIc;AACJ,MAAI,MAAM,QAAQ,QAAQ,EACxB,gBAAgB,EAAE,MAAM,QAAS;OAEjC,gBAAgB;EAGlB,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,cAAc;EAClE,eAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,oBAAoB,GAC7D,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;EACvB,EAAC;EAEJ,IAAI,YAAY,CAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,IAAI,CAAC,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,SACnB,qBAAqB,IAAI,CAAC,MAAM,qBAAqB,GAAG,GACxD,OACL;GACD,MAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;IACzC,YAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;GACpE,EAAC,CACH;GACD,YAAY,QAAQ,aAAa,CAAE;EACpC;AAED,SAAO;GAAE;GAAa;EAAW;CAClC;;;;;CAMD,MAAM,KACJC,QACAhB,SACAC,WACiB;EACjB,MAAM,EAAE,aAAa,GAAG,MAAM,KAAK,SAAS,CAAC,MAAO,GAAE,SAAS,UAAU;AACzE,SAAO,YAAY,GAAG,GAAG;CAC1B;;;;;;;;;;;CAYD,MAAM,QACJgB,MACAjB,SACAC,WACiB;AACjB,SAAO,KAAK,KAAK,MAAM,SAAS,UAAU;CAC3C;;;;;;;;;;;CAYD,MAAM,gBACJiB,UACAlB,SACAC,WACsB;EACtB,MAAM,OAAO,gBAAgB,SAAS;EACtC,MAAM,aAAa,MAAM,KAAK,KAAK,MAAM,SAAS,UAAU;AAC5D,SAAO,IAAI,UAAU;CACtB;;;;CAMD,qBAA0C;AACxC,SAAO,CAAE;CACV;;;;;CAWD,YAA2B;AACzB,SAAO;GACL,GAAG,KAAK,oBAAoB;GAC5B,OAAO,KAAK,UAAU;GACtB,QAAQ,KAAK,YAAY;EAC1B;CACF;CAED,aAAqB;AACnB,SAAO;CACR;AACF;;;;;;;;AASD,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJC,SACAiB,SACAC,YACoB;EACpB,MAAMP,cAA8B,MAAM,QAAQ,IAChD,QAAQ,IAAI,CAAC,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;EAAa,GAAE,WAAW,CAAC,KAC1D,CAAC,SAAS,CAAC,EAAE,KAAM,CAAC,EACrB,CACF,CACF;AACD,SAAO,EAAE,YAAa;CACvB;AACF"}
|
|
1
|
+
{"version":3,"file":"llms.js","names":["input: BaseLanguageModelInput","options?: CallOptions","_input: string","_options: this[\"ParsedCallOptions\"]","_runManager?: CallbackManagerForLLMRun","options?: Partial<CallOptions>","promptValues: BasePromptValueInterface[]","options?: string[] | CallOptions","callbacks?: Callbacks","prompts: string[]","_options?: this[\"ParsedCallOptions\"]","llmResult: LLMResult","llmResults: LLMResult[]","parsedOptions: this[\"ParsedCallOptions\"]","handledOptions: BaseCallbackConfig","startedRunManagers?: CallbackManagerForLLMRun[]","runManagers: CallbackManagerForLLMRun[] | undefined","output: LLMResult","flattenedOutputs: LLMResult[]","missingPromptIndices: number[]","generations: Generation[][]","result","parsedOptions: CallOptions | undefined","options: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun"],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/base.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: CallOptions\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | CallOptions,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: CallOptions | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as CallOptions;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAqCA,IAAsB,UAAtB,MAAsB,gBAEZ,kBAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;CAAC;;;;;;;;;CAUrD,MAAM,OACJA,OACAC,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;EAC7D,MAAM,SAAS,MAAM,KAAK,eACxB,CAAC,WAAY,GACb,SACA,SAAS,UACV;AACD,SAAO,OAAO,YAAY,GAAG,GAAG;CACjC;CAGD,OAAO,sBACLC,QACAC,UACAC,aACiC;AACjC,QAAM,IAAI,MAAM;CACjB;CAED,AAAU,6CACRC,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,YAAY,GACjC,MAAM,uCAAuC,QAAQ;EACtD,YAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,WAAyC;CAClE;CAED,OAAO,gBACLL,OACAC,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,uBAEjD,MAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;GACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,AAAC,GACnB,eAAe,OACf,QACA,OACA,QACA,QACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAI,gBAAgB,EACnC,MAAM,GACP;AACD,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,YACH,aAAa;UAEb,aAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,UACxB,MAAM,MAAM;IAEf;GACF,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GACD,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,UAAW,CAAC,EAC5B,EAAC,CACH,CACF;EACF;CACF;;;;;;;;;CAUD,MAAM,eACJK,cACAC,SACAC,WACoB;EACpB,MAAMC,UAAoB,aAAa,IAAI,CAAC,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;CAClD;;;;CAeD,iBAAiBC,UAA2C;AAC1D,SAAO,CAAE;CACV;CAED,kBAAkBC,WAAmC;EACnD,MAAMC,aAA0B,CAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,GACR,WAAW,KAAK;IACd,aAAa,CAAC,OAAQ;IACtB,WAAW,UAAU;GACtB,EAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,CAAE;IAAE,IAC1C;IAEJ,WAAW,KAAK;KACd,aAAa,CAAC,OAAQ;KACtB;IACD,EAAC;GACH;EACF;AAED,SAAO;CACR;;CAGD,MAAM,kBACJH,SACAI,eACAC,gBACAC,oBACoB;EACpB,IAAIC;AACJ,MACE,uBAAuB,UACvB,mBAAmB,WAAW,QAAQ,QAEtC,cAAc;OACT;GACL,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY,QAAQ;GACrB;GACD,cAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,QACA,OACA,QACA,QACA,gBAAgB,QACjB;EACF;EAID,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtD,gCACD;EACD,IAAIC;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,QACjB,aAAa;QAEb,aAAa,OAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,OACjB,OAAM,IAAI,MAAM;GAElB,SAAS;IAAE,aAAa,CAAC,CAAC,UAAW,CAAC;IAAE,WAAW,CAAE;GAAE;GACvD,MAAM,cAAc,GAAG,aAAa,OAAO;EAC5C,SAAQ,GAAG;GACV,MAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;EACP;OACI;AACL,OAAI;IACF,SAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;GACxE,SAAQ,KAAK;IACZ,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;GACP;GAED,MAAMC,mBAAgC,KAAK,kBAAkB,OAAO;GACpE,MAAM,QAAQ,KACX,eAAe,CAAE,GAAE,IAAI,CAAC,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;EACF;EACD,MAAM,SAAS,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,IAAI;EAI/D,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,SAAS,EAAE,OAAQ,IAAG;GAC7B,cAAc;EACf,EAAC;AACF,SAAO;CACR;CAED,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,OASD,EAKC;EACA,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,QAAS,EAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY,QAAQ;EACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,QACA,OACA,QACA,QACA,gBAAgB,QACjB;EAGD,MAAMC,uBAAiC,CAAE;EACzC,MAAM,UAAU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,MACZ,qBAAqB,KAAK,MAAM;AAElC,UAAO;EACR,EAAC,CACH;EAID,MAAM,gBAAgB,QACnB,IAAI,CAAC,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;EAAQ,GAAE,CACtE,OACC,CAAC,EAAE,QAAQ,KACR,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAMC,cAA8B,CAAE;EACtC,MAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,YAAY,EAAE,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;IAC7B,YAAY,KAAK,OAAO,IAAI,CAACC,aAAW;KACtCA,SAAO,iBAAiB;MACtB,GAAGA,SAAO;MACV,YAAY,CAAE;KACf;AACD,YAAOA;IACR,EAAC;AACF,QAAI,OAAO,QACT,MAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,MAAO,EACtB,GACD,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;GACF,OAAM;IAEL,MAAM,YAAY,eAChB,cAAc,QACd,QACA,QACA,QACA,EACE,QAAQ,KACT,EACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;GAC5C;EACF,EAAC,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;EACrB;EAKD,OAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,IAAI,CAAC,YAAY,QAAQ,MAAM,CAAE,IACxD;GACJ,cAAc;EACf,EAAC;AAEF,SAAO;CACR;;;;CAKD,MAAM,SACJZ,SACAF,SACAC,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM;EAGlB,IAAIc;AACJ,MAAI,MAAM,QAAQ,QAAQ,EACxB,gBAAgB,EAAE,MAAM,QAAS;OAEjC,gBAAgB;EAGlB,MAAM,CAAC,gBAAgB,YAAY,GACjC,KAAK,6CAA6C,cAAc;EAClE,eAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,OAAO,GAAG;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,oBAAoB,GAC7D,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;EACvB,EAAC;EAEJ,IAAI,YAAY,CAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,IAAI,CAAC,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,SACnB,qBAAqB,IAAI,CAAC,MAAM,qBAAqB,GAAG,GACxD,OACL;GACD,MAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;IACzC,YAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;GACpE,EAAC,CACH;GACD,YAAY,QAAQ,aAAa,CAAE;EACpC;AAED,SAAO;GAAE;GAAa;EAAW;CAClC;;;;CAMD,qBAA0C;AACxC,SAAO,CAAE;CACV;CAOD,aAAqB;AACnB,SAAO;CACR;AACF;;;;;;;;AASD,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJb,SACAc,SACAC,YACoB;EACpB,MAAMJ,cAA8B,MAAM,QAAQ,IAChD,QAAQ,IAAI,CAAC,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;EAAa,GAAE,WAAW,CAAC,KAC1D,CAAC,SAAS,CAAC,EAAE,KAAM,CAAC,EACrB,CACF,CACF;AACD,SAAO,EAAE,YAAa;CACvB;AACF"}
|
package/dist/load/import_map.cjs
CHANGED
|
@@ -45,16 +45,13 @@ const require_output_parsers_index = require('../output_parsers/index.cjs');
|
|
|
45
45
|
const require_output_parsers_openai_tools_index = require('../output_parsers/openai_tools/index.cjs');
|
|
46
46
|
const require_output_parsers_openai_functions_index = require('../output_parsers/openai_functions/index.cjs');
|
|
47
47
|
const require_prompts_index = require('../prompts/index.cjs');
|
|
48
|
-
const require_utils_event_source_parse = require('../utils/event_source_parse.cjs');
|
|
49
|
-
const require_runnables_remote = require('../runnables/remote.cjs');
|
|
50
48
|
const require_retrievers_document_compressors_base = require('../retrievers/document_compressors/base.cjs');
|
|
51
49
|
const require_structured_query_index = require('../structured_query/index.cjs');
|
|
52
50
|
const require_tools_index = require('../tools/index.cjs');
|
|
53
|
-
const require_tracers_tracer_langchain_v1 = require('../tracers/tracer_langchain_v1.cjs');
|
|
54
|
-
const require_tracers_initialize = require('../tracers/initialize.cjs');
|
|
55
51
|
const require_tracers_run_collector = require('../tracers/run_collector.cjs');
|
|
56
52
|
const require_types_stream = require('../types/stream.cjs');
|
|
57
53
|
const require_utils_chunk_array = require('../utils/chunk_array.cjs');
|
|
54
|
+
const require_utils_event_source_parse = require('../utils/event_source_parse.cjs');
|
|
58
55
|
const require_utils_function_calling = require('../utils/function_calling.cjs');
|
|
59
56
|
const require_utils_math = require('../utils/math.cjs');
|
|
60
57
|
const require_utils_testing_index = require('../utils/testing/index.cjs');
|
|
@@ -96,18 +93,15 @@ require_rolldown_runtime.__export(import_map_exports, {
|
|
|
96
93
|
retrievers__document_compressors: () => require_retrievers_document_compressors_base.base_exports,
|
|
97
94
|
runnables: () => require_runnables_index.runnables_exports,
|
|
98
95
|
runnables__graph: () => require_runnables_graph.graph_exports,
|
|
99
|
-
runnables__remote: () => require_runnables_remote.remote_exports,
|
|
100
96
|
singletons: () => require_singletons_index.singletons_exports,
|
|
101
97
|
stores: () => require_stores.stores_exports,
|
|
102
98
|
structured_query: () => require_structured_query_index.structured_query_exports,
|
|
103
99
|
tools: () => require_tools_index.tools_exports,
|
|
104
100
|
tracers__base: () => require_tracers_base.base_exports,
|
|
105
101
|
tracers__console: () => require_tracers_console.console_exports,
|
|
106
|
-
tracers__initialize: () => require_tracers_initialize.initialize_exports,
|
|
107
102
|
tracers__log_stream: () => require_tracers_log_stream.log_stream_exports,
|
|
108
103
|
tracers__run_collector: () => require_tracers_run_collector.run_collector_exports,
|
|
109
104
|
tracers__tracer_langchain: () => require_tracers_tracer_langchain.tracer_langchain_exports,
|
|
110
|
-
tracers__tracer_langchain_v1: () => require_tracers_tracer_langchain_v1.tracer_langchain_v1_exports,
|
|
111
105
|
types__stream: () => require_types_stream.stream_exports,
|
|
112
106
|
utils__async_caller: () => require_utils_async_caller.async_caller_exports,
|
|
113
107
|
utils__chunk_array: () => require_utils_chunk_array.chunk_array_exports,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"import_map.cjs","names":[],"sources":["../../src/load/import_map.ts"],"sourcesContent":["// Auto-generated by import-map plugin. Do not edit manually.\n\nexport * as index from \"../index.js\";\nexport * as agents from \"../agents.js\";\nexport * as caches from \"../caches/base.js\";\nexport * as callbacks__base from \"../callbacks/base.js\";\nexport * as callbacks__dispatch from \"../callbacks/dispatch/index.js\";\nexport * as callbacks__dispatch__web from \"../callbacks/dispatch/web.js\";\nexport * as callbacks__manager from \"../callbacks/manager.js\";\nexport * as callbacks__promises from \"../callbacks/promises.js\";\nexport * as chat_history from \"../chat_history.js\";\nexport * as context from \"../context.js\";\nexport * as documents from \"../documents/index.js\";\nexport * as document_loaders__base from \"../document_loaders/base.js\";\nexport * as document_loaders__langsmith from \"../document_loaders/langsmith.js\";\nexport * as embeddings from \"../embeddings.js\";\nexport * as example_selectors from \"../example_selectors/index.js\";\nexport * as indexing from \"../indexing/index.js\";\nexport * as language_models__base from \"../language_models/base.js\";\nexport * as language_models__chat_models from \"../language_models/chat_models.js\";\nexport * as language_models__llms from \"../language_models/llms.js\";\nexport * as load__serializable from \"../load/serializable.js\";\nexport * as memory from \"../memory.js\";\nexport * as messages from \"../messages/index.js\";\nexport * as messages__tool from \"../messages/tool.js\";\nexport * as output_parsers from \"../output_parsers/index.js\";\nexport * as output_parsers__openai_tools from \"../output_parsers/openai_tools/index.js\";\nexport * as output_parsers__openai_functions from \"../output_parsers/openai_functions/index.js\";\nexport * as outputs from \"../outputs.js\";\nexport * as prompts from \"../prompts/index.js\";\nexport * as prompt_values from \"../prompt_values.js\";\nexport * as runnables from \"../runnables/index.js\";\nexport * as runnables__graph from \"../runnables/graph.js\";\nexport * as
|
|
1
|
+
{"version":3,"file":"import_map.cjs","names":[],"sources":["../../src/load/import_map.ts"],"sourcesContent":["// Auto-generated by import-map plugin. Do not edit manually.\n\nexport * as index from \"../index.js\";\nexport * as agents from \"../agents.js\";\nexport * as caches from \"../caches/base.js\";\nexport * as callbacks__base from \"../callbacks/base.js\";\nexport * as callbacks__dispatch from \"../callbacks/dispatch/index.js\";\nexport * as callbacks__dispatch__web from \"../callbacks/dispatch/web.js\";\nexport * as callbacks__manager from \"../callbacks/manager.js\";\nexport * as callbacks__promises from \"../callbacks/promises.js\";\nexport * as chat_history from \"../chat_history.js\";\nexport * as context from \"../context.js\";\nexport * as documents from \"../documents/index.js\";\nexport * as document_loaders__base from \"../document_loaders/base.js\";\nexport * as document_loaders__langsmith from \"../document_loaders/langsmith.js\";\nexport * as embeddings from \"../embeddings.js\";\nexport * as example_selectors from \"../example_selectors/index.js\";\nexport * as indexing from \"../indexing/index.js\";\nexport * as language_models__base from \"../language_models/base.js\";\nexport * as language_models__chat_models from \"../language_models/chat_models.js\";\nexport * as language_models__llms from \"../language_models/llms.js\";\nexport * as load__serializable from \"../load/serializable.js\";\nexport * as memory from \"../memory.js\";\nexport * as messages from \"../messages/index.js\";\nexport * as messages__tool from \"../messages/tool.js\";\nexport * as output_parsers from \"../output_parsers/index.js\";\nexport * as output_parsers__openai_tools from \"../output_parsers/openai_tools/index.js\";\nexport * as output_parsers__openai_functions from \"../output_parsers/openai_functions/index.js\";\nexport * as outputs from \"../outputs.js\";\nexport * as prompts from \"../prompts/index.js\";\nexport * as prompt_values from \"../prompt_values.js\";\nexport * as runnables from \"../runnables/index.js\";\nexport * as runnables__graph from \"../runnables/graph.js\";\nexport * as retrievers from \"../retrievers/index.js\";\nexport * as retrievers__document_compressors from \"../retrievers/document_compressors/base.js\";\nexport * as singletons from \"../singletons/index.js\";\nexport * as stores from \"../stores.js\";\nexport * as structured_query from \"../structured_query/index.js\";\nexport * as tools from \"../tools/index.js\";\nexport * as tracers__base from \"../tracers/base.js\";\nexport * as tracers__console from \"../tracers/console.js\";\nexport * as tracers__log_stream from \"../tracers/log_stream.js\";\nexport * as tracers__run_collector from \"../tracers/run_collector.js\";\nexport * as tracers__tracer_langchain from \"../tracers/tracer_langchain.js\";\nexport * as types__stream from \"../types/stream.js\";\nexport * as utils__async_caller from \"../utils/async_caller.js\";\nexport * as utils__chunk_array from \"../utils/chunk_array.js\";\nexport * as utils__env from \"../utils/env.js\";\nexport * as utils__event_source_parse from \"../utils/event_source_parse.js\";\nexport * as utils__function_calling from \"../utils/function_calling.js\";\nexport * as utils__hash from \"../utils/hash.js\";\nexport * as utils__json_patch from \"../utils/json_patch.js\";\nexport * as utils__json_schema from \"../utils/json_schema.js\";\nexport * as utils__math from \"../utils/math.js\";\nexport * as utils__stream from \"../utils/stream.js\";\nexport * as utils__testing from \"../utils/testing/index.js\";\nexport * as utils__tiktoken from \"../utils/tiktoken.js\";\nexport * as utils__types from \"../utils/types/index.js\";\nexport * as vectorstores from \"../vectorstores.js\";\n"],"mappings":""}
|
package/dist/load/import_map.js
CHANGED
|
@@ -45,16 +45,13 @@ import { output_parsers_exports } from "../output_parsers/index.js";
|
|
|
45
45
|
import { openai_tools_exports } from "../output_parsers/openai_tools/index.js";
|
|
46
46
|
import { openai_functions_exports } from "../output_parsers/openai_functions/index.js";
|
|
47
47
|
import { prompts_exports } from "../prompts/index.js";
|
|
48
|
-
import { event_source_parse_exports } from "../utils/event_source_parse.js";
|
|
49
|
-
import { remote_exports } from "../runnables/remote.js";
|
|
50
48
|
import { base_exports as base_exports$5 } from "../retrievers/document_compressors/base.js";
|
|
51
49
|
import { structured_query_exports } from "../structured_query/index.js";
|
|
52
50
|
import { tools_exports } from "../tools/index.js";
|
|
53
|
-
import { tracer_langchain_v1_exports } from "../tracers/tracer_langchain_v1.js";
|
|
54
|
-
import { initialize_exports } from "../tracers/initialize.js";
|
|
55
51
|
import { run_collector_exports } from "../tracers/run_collector.js";
|
|
56
52
|
import { stream_exports as stream_exports$1 } from "../types/stream.js";
|
|
57
53
|
import { chunk_array_exports } from "../utils/chunk_array.js";
|
|
54
|
+
import { event_source_parse_exports } from "../utils/event_source_parse.js";
|
|
58
55
|
import { function_calling_exports } from "../utils/function_calling.js";
|
|
59
56
|
import { math_exports } from "../utils/math.js";
|
|
60
57
|
import { testing_exports } from "../utils/testing/index.js";
|
|
@@ -96,18 +93,15 @@ __export(import_map_exports, {
|
|
|
96
93
|
retrievers__document_compressors: () => base_exports$5,
|
|
97
94
|
runnables: () => runnables_exports,
|
|
98
95
|
runnables__graph: () => graph_exports,
|
|
99
|
-
runnables__remote: () => remote_exports,
|
|
100
96
|
singletons: () => singletons_exports,
|
|
101
97
|
stores: () => stores_exports,
|
|
102
98
|
structured_query: () => structured_query_exports,
|
|
103
99
|
tools: () => tools_exports,
|
|
104
100
|
tracers__base: () => base_exports$1,
|
|
105
101
|
tracers__console: () => console_exports,
|
|
106
|
-
tracers__initialize: () => initialize_exports,
|
|
107
102
|
tracers__log_stream: () => log_stream_exports,
|
|
108
103
|
tracers__run_collector: () => run_collector_exports,
|
|
109
104
|
tracers__tracer_langchain: () => tracer_langchain_exports,
|
|
110
|
-
tracers__tracer_langchain_v1: () => tracer_langchain_v1_exports,
|
|
111
105
|
types__stream: () => stream_exports$1,
|
|
112
106
|
utils__async_caller: () => async_caller_exports,
|
|
113
107
|
utils__chunk_array: () => chunk_array_exports,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"import_map.js","names":[],"sources":["../../src/load/import_map.ts"],"sourcesContent":["// Auto-generated by import-map plugin. Do not edit manually.\n\nexport * as index from \"../index.js\";\nexport * as agents from \"../agents.js\";\nexport * as caches from \"../caches/base.js\";\nexport * as callbacks__base from \"../callbacks/base.js\";\nexport * as callbacks__dispatch from \"../callbacks/dispatch/index.js\";\nexport * as callbacks__dispatch__web from \"../callbacks/dispatch/web.js\";\nexport * as callbacks__manager from \"../callbacks/manager.js\";\nexport * as callbacks__promises from \"../callbacks/promises.js\";\nexport * as chat_history from \"../chat_history.js\";\nexport * as context from \"../context.js\";\nexport * as documents from \"../documents/index.js\";\nexport * as document_loaders__base from \"../document_loaders/base.js\";\nexport * as document_loaders__langsmith from \"../document_loaders/langsmith.js\";\nexport * as embeddings from \"../embeddings.js\";\nexport * as example_selectors from \"../example_selectors/index.js\";\nexport * as indexing from \"../indexing/index.js\";\nexport * as language_models__base from \"../language_models/base.js\";\nexport * as language_models__chat_models from \"../language_models/chat_models.js\";\nexport * as language_models__llms from \"../language_models/llms.js\";\nexport * as load__serializable from \"../load/serializable.js\";\nexport * as memory from \"../memory.js\";\nexport * as messages from \"../messages/index.js\";\nexport * as messages__tool from \"../messages/tool.js\";\nexport * as output_parsers from \"../output_parsers/index.js\";\nexport * as output_parsers__openai_tools from \"../output_parsers/openai_tools/index.js\";\nexport * as output_parsers__openai_functions from \"../output_parsers/openai_functions/index.js\";\nexport * as outputs from \"../outputs.js\";\nexport * as prompts from \"../prompts/index.js\";\nexport * as prompt_values from \"../prompt_values.js\";\nexport * as runnables from \"../runnables/index.js\";\nexport * as runnables__graph from \"../runnables/graph.js\";\nexport * as
|
|
1
|
+
{"version":3,"file":"import_map.js","names":[],"sources":["../../src/load/import_map.ts"],"sourcesContent":["// Auto-generated by import-map plugin. Do not edit manually.\n\nexport * as index from \"../index.js\";\nexport * as agents from \"../agents.js\";\nexport * as caches from \"../caches/base.js\";\nexport * as callbacks__base from \"../callbacks/base.js\";\nexport * as callbacks__dispatch from \"../callbacks/dispatch/index.js\";\nexport * as callbacks__dispatch__web from \"../callbacks/dispatch/web.js\";\nexport * as callbacks__manager from \"../callbacks/manager.js\";\nexport * as callbacks__promises from \"../callbacks/promises.js\";\nexport * as chat_history from \"../chat_history.js\";\nexport * as context from \"../context.js\";\nexport * as documents from \"../documents/index.js\";\nexport * as document_loaders__base from \"../document_loaders/base.js\";\nexport * as document_loaders__langsmith from \"../document_loaders/langsmith.js\";\nexport * as embeddings from \"../embeddings.js\";\nexport * as example_selectors from \"../example_selectors/index.js\";\nexport * as indexing from \"../indexing/index.js\";\nexport * as language_models__base from \"../language_models/base.js\";\nexport * as language_models__chat_models from \"../language_models/chat_models.js\";\nexport * as language_models__llms from \"../language_models/llms.js\";\nexport * as load__serializable from \"../load/serializable.js\";\nexport * as memory from \"../memory.js\";\nexport * as messages from \"../messages/index.js\";\nexport * as messages__tool from \"../messages/tool.js\";\nexport * as output_parsers from \"../output_parsers/index.js\";\nexport * as output_parsers__openai_tools from \"../output_parsers/openai_tools/index.js\";\nexport * as output_parsers__openai_functions from \"../output_parsers/openai_functions/index.js\";\nexport * as outputs from \"../outputs.js\";\nexport * as prompts from \"../prompts/index.js\";\nexport * as prompt_values from \"../prompt_values.js\";\nexport * as runnables from \"../runnables/index.js\";\nexport * as runnables__graph from \"../runnables/graph.js\";\nexport * as retrievers from \"../retrievers/index.js\";\nexport * as retrievers__document_compressors from \"../retrievers/document_compressors/base.js\";\nexport * as singletons from \"../singletons/index.js\";\nexport * as stores from \"../stores.js\";\nexport * as structured_query from \"../structured_query/index.js\";\nexport * as tools from \"../tools/index.js\";\nexport * as tracers__base from \"../tracers/base.js\";\nexport * as tracers__console from \"../tracers/console.js\";\nexport * as tracers__log_stream from \"../tracers/log_stream.js\";\nexport * as tracers__run_collector from \"../tracers/run_collector.js\";\nexport * as tracers__tracer_langchain from \"../tracers/tracer_langchain.js\";\nexport * as types__stream from \"../types/stream.js\";\nexport * as utils__async_caller from \"../utils/async_caller.js\";\nexport * as utils__chunk_array from \"../utils/chunk_array.js\";\nexport * as utils__env from \"../utils/env.js\";\nexport * as utils__event_source_parse from \"../utils/event_source_parse.js\";\nexport * as utils__function_calling from \"../utils/function_calling.js\";\nexport * as utils__hash from \"../utils/hash.js\";\nexport * as utils__json_patch from \"../utils/json_patch.js\";\nexport * as utils__json_schema from \"../utils/json_schema.js\";\nexport * as utils__math from \"../utils/math.js\";\nexport * as utils__stream from \"../utils/stream.js\";\nexport * as utils__testing from \"../utils/testing/index.js\";\nexport * as utils__tiktoken from \"../utils/tiktoken.js\";\nexport * as utils__types from \"../utils/types/index.js\";\nexport * as vectorstores from \"../vectorstores.js\";\n"],"mappings":""}
|
package/dist/messages/ai.cjs
CHANGED
|
@@ -48,6 +48,10 @@ var AIMessage = class extends require_base.BaseMessage {
|
|
|
48
48
|
initParams.tool_calls = [];
|
|
49
49
|
initParams.invalid_tool_calls = [];
|
|
50
50
|
}
|
|
51
|
+
if (initParams.response_metadata !== void 0 && "output_version" in initParams.response_metadata && initParams.response_metadata.output_version === "v1") {
|
|
52
|
+
initParams.contentBlocks = initParams.content;
|
|
53
|
+
initParams.content = void 0;
|
|
54
|
+
}
|
|
51
55
|
if (initParams.contentBlocks !== void 0) {
|
|
52
56
|
initParams.contentBlocks.push(...initParams.tool_calls.map((toolCall) => ({
|
|
53
57
|
type: "tool_call",
|
package/dist/messages/ai.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai.cjs","names":["BaseMessage","fields: $InferMessageContent<TStructure, \"ai\"> | AIMessageFields<TStructure>","initParams: AIMessageFields<TStructure>","toolCalls","defaultToolCallParser","getTranslator","obj: unknown","x: BaseMessage","x: BaseMessageChunk","BaseMessageChunk","fields:\n | $InferMessageContent<TStructure, \"ai\">\n | AIMessageChunkFields<TStructure>","initParams: AIMessageChunkFields<TStructure>","toolCalls: ToolCall[]","invalidToolCalls: InvalidToolCall[]","parsedArgs: Record<string, unknown> | null","parsePartialJson","chunk: AIMessageChunk<TStructure>","combinedFields: AIMessageChunkFields","mergeContent","_mergeDicts","mergeResponseMetadata","_mergeLists","mergeUsageMetadata"],"sources":["../../src/messages/ai.ts"],"sourcesContent":["import { parsePartialJson } from \"../utils/json.js\";\nimport {\n BaseMessage,\n BaseMessageChunk,\n mergeContent,\n _mergeDicts,\n _mergeLists,\n BaseMessageFields,\n} from \"./base.js\";\nimport { getTranslator } from \"./block_translators/index.js\";\nimport { ContentBlock } from \"./content/index.js\";\nimport {\n $InferMessageContent,\n $InferMessageProperty,\n MessageStructure,\n} from \"./message.js\";\nimport { mergeResponseMetadata, mergeUsageMetadata } from \"./metadata.js\";\nimport {\n InvalidToolCall,\n ToolCall,\n ToolCallChunk,\n defaultToolCallParser,\n} from \"./tool.js\";\nimport { Constructor } from \"./utils.js\";\n\nexport interface AIMessageFields<\n TStructure extends MessageStructure = MessageStructure\n> extends BaseMessageFields<TStructure, \"ai\"> {\n tool_calls?: ToolCall[];\n invalid_tool_calls?: InvalidToolCall[];\n usage_metadata?: $InferMessageProperty<TStructure, \"ai\", \"usage_metadata\">;\n}\n\nexport class AIMessage<TStructure extends MessageStructure = MessageStructure>\n extends BaseMessage<TStructure, \"ai\">\n implements AIMessageFields<TStructure>\n{\n readonly type = \"ai\" as const;\n\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n usage_metadata?: AIMessageFields<TStructure>[\"usage_metadata\"];\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n };\n }\n\n constructor(\n fields: $InferMessageContent<TStructure, \"ai\"> | AIMessageFields<TStructure>\n ) {\n let initParams: AIMessageFields<TStructure>;\n if (typeof fields === \"string\" || Array.isArray(fields)) {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n additional_kwargs: {},\n };\n } else {\n initParams = fields;\n const rawToolCalls = initParams.additional_kwargs?.tool_calls;\n const toolCalls = initParams.tool_calls;\n if (\n !(rawToolCalls == null) &&\n rawToolCalls.length > 0 &&\n (toolCalls === undefined || toolCalls.length === 0)\n ) {\n console.warn(\n [\n \"New LangChain packages are available that more efficiently handle\",\n \"tool calling.\\n\\nPlease upgrade your packages to versions that set\",\n \"message tool calls. e.g., `pnpm install @langchain/anthropic`,\",\n \"pnpm install @langchain/openai`, etc.\",\n ].join(\" \")\n );\n }\n try {\n if (!(rawToolCalls == null) && toolCalls === undefined) {\n const [toolCalls, invalidToolCalls] =\n defaultToolCallParser(rawToolCalls);\n initParams.tool_calls = toolCalls ?? [];\n initParams.invalid_tool_calls = invalidToolCalls ?? [];\n } else {\n initParams.tool_calls = initParams.tool_calls ?? [];\n initParams.invalid_tool_calls = initParams.invalid_tool_calls ?? [];\n }\n } catch {\n // Do nothing if parsing fails\n initParams.tool_calls = [];\n initParams.invalid_tool_calls = [];\n }\n if (initParams.contentBlocks !== undefined) {\n // Add constructor tool calls as content blocks\n initParams.contentBlocks.push(\n ...initParams.tool_calls.map((toolCall) => ({\n type: \"tool_call\" as const,\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n }))\n );\n // Add content block tool calls that aren't in the constructor tool calls\n const missingToolCalls = initParams.contentBlocks\n .filter<ContentBlock.Tools.ToolCall>(\n (block): block is ContentBlock.Tools.ToolCall =>\n block.type === \"tool_call\"\n )\n .filter(\n (block) =>\n !initParams.tool_calls?.some(\n (toolCall) =>\n toolCall.id === block.id && toolCall.name === block.name\n )\n );\n if (missingToolCalls.length > 0) {\n initParams.tool_calls = missingToolCalls.map((block) => ({\n type: \"tool_call\" as const,\n id: block.id!,\n name: block.name,\n args: block.args as Record<string, unknown>,\n }));\n }\n }\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n if (typeof initParams !== \"string\") {\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n }\n this.usage_metadata = initParams.usage_metadata;\n }\n\n static lc_name() {\n return \"AIMessage\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (\n this.response_metadata &&\n \"output_version\" in this.response_metadata &&\n this.response_metadata.output_version === \"v1\"\n ) {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n if (\n this.response_metadata &&\n \"model_provider\" in this.response_metadata &&\n typeof this.response_metadata.model_provider === \"string\"\n ) {\n const translator = getTranslator(this.response_metadata.model_provider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n const missingToolCalls = this.tool_calls.filter(\n (block) =>\n !blocks.some((b) => b.id === block.id && b.name === block.name)\n );\n blocks.push(\n ...missingToolCalls.map((block) => ({\n ...block,\n type: \"tool_call\" as const,\n id: block.id,\n name: block.name,\n args: block.args,\n }))\n );\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n static isInstance(obj: unknown): obj is AIMessage {\n return super.isInstance(obj) && obj.type === \"ai\";\n }\n}\n\n/**\n * @deprecated Use {@link AIMessage.isInstance} instead\n */\nexport function isAIMessage<TStructure extends MessageStructure>(\n x: BaseMessage\n): x is AIMessage<TStructure> {\n return x._getType() === \"ai\";\n}\n\n/**\n * @deprecated Use {@link AIMessageChunk.isInstance} instead\n */\nexport function isAIMessageChunk<TStructure extends MessageStructure>(\n x: BaseMessageChunk\n): x is AIMessageChunk<TStructure> {\n return x._getType() === \"ai\";\n}\n\nexport type AIMessageChunkFields<\n TStructure extends MessageStructure = MessageStructure\n> = AIMessageFields<TStructure> & {\n tool_call_chunks?: ToolCallChunk[];\n};\n\n/**\n * Represents a chunk of an AI message, which can be concatenated with\n * other AI message chunks.\n */\nexport class AIMessageChunk<\n TStructure extends MessageStructure = MessageStructure\n >\n extends BaseMessageChunk<TStructure, \"ai\">\n implements AIMessage<TStructure>, AIMessageChunkFields<TStructure>\n{\n readonly type = \"ai\" as const;\n\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n tool_call_chunks?: ToolCallChunk[] = [];\n\n usage_metadata?: AIMessageChunkFields<TStructure>[\"usage_metadata\"];\n\n constructor(\n fields:\n | $InferMessageContent<TStructure, \"ai\">\n | AIMessageChunkFields<TStructure>\n ) {\n let initParams: AIMessageChunkFields<TStructure>;\n if (typeof fields === \"string\" || Array.isArray(fields)) {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n };\n } else if (fields.tool_call_chunks === undefined) {\n initParams = {\n ...fields,\n tool_calls: fields.tool_calls ?? [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n } else {\n const groupedToolCallChunks = fields.tool_call_chunks.reduce(\n (acc, chunk) => {\n const matchedChunkIndex = acc.findIndex(([match]) => {\n // If chunk has an id and index, match if both are present\n if (\n \"id\" in chunk &&\n chunk.id &&\n \"index\" in chunk &&\n chunk.index !== undefined\n ) {\n return chunk.id === match.id && chunk.index === match.index;\n }\n // If chunk has an id, we match on id\n if (\"id\" in chunk && chunk.id) {\n return chunk.id === match.id;\n }\n // If chunk has an index, we match on index\n if (\"index\" in chunk && chunk.index !== undefined) {\n return chunk.index === match.index;\n }\n return false;\n });\n if (matchedChunkIndex !== -1) {\n acc[matchedChunkIndex].push(chunk);\n } else {\n acc.push([chunk]);\n }\n return acc;\n },\n [] as ToolCallChunk[][]\n );\n\n const toolCalls: ToolCall[] = [];\n const invalidToolCalls: InvalidToolCall[] = [];\n for (const chunks of groupedToolCallChunks) {\n let parsedArgs: Record<string, unknown> | null = null;\n const name = chunks[0]?.name ?? \"\";\n const joinedArgs = chunks.map((c) => c.args || \"\").join(\"\");\n const argsStr = joinedArgs.length ? joinedArgs : \"{}\";\n const id = chunks[0]?.id;\n try {\n parsedArgs = parsePartialJson(argsStr);\n if (\n !id ||\n parsedArgs === null ||\n typeof parsedArgs !== \"object\" ||\n Array.isArray(parsedArgs)\n ) {\n throw new Error(\"Malformed tool call chunk args.\");\n }\n toolCalls.push({\n name,\n args: parsedArgs,\n id,\n type: \"tool_call\",\n });\n } catch {\n invalidToolCalls.push({\n name,\n args: argsStr,\n id,\n error: \"Malformed args.\",\n type: \"invalid_tool_call\",\n });\n }\n }\n initParams = {\n ...fields,\n tool_calls: toolCalls,\n invalid_tool_calls: invalidToolCalls,\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n this.tool_call_chunks =\n initParams.tool_call_chunks ?? this.tool_call_chunks;\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n this.usage_metadata = initParams.usage_metadata;\n }\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n tool_call_chunks: \"tool_call_chunks\",\n };\n }\n\n static lc_name() {\n return \"AIMessageChunk\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (\n this.response_metadata &&\n \"output_version\" in this.response_metadata &&\n this.response_metadata.output_version === \"v1\"\n ) {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n if (\n this.response_metadata &&\n \"model_provider\" in this.response_metadata &&\n typeof this.response_metadata.model_provider === \"string\"\n ) {\n const translator = getTranslator(this.response_metadata.model_provider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n if (typeof this.content !== \"string\") {\n const contentToolCalls = this.content\n .filter((block) => block.type === \"tool_call\")\n .map((block) => block.id);\n for (const toolCall of this.tool_calls) {\n if (toolCall.id && !contentToolCalls.includes(toolCall.id)) {\n blocks.push({\n ...toolCall,\n type: \"tool_call\",\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n });\n }\n }\n }\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n tool_call_chunks: this.tool_call_chunks,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n concat(chunk: AIMessageChunk<TStructure>) {\n const combinedFields: AIMessageChunkFields = {\n content: mergeContent(this.content, chunk.content),\n additional_kwargs: _mergeDicts(\n this.additional_kwargs,\n chunk.additional_kwargs\n ),\n response_metadata: mergeResponseMetadata(\n this.response_metadata,\n chunk.response_metadata\n ),\n tool_call_chunks: [],\n id: this.id ?? chunk.id,\n };\n if (\n this.tool_call_chunks !== undefined ||\n chunk.tool_call_chunks !== undefined\n ) {\n const rawToolCalls = _mergeLists(\n this.tool_call_chunks as ContentBlock.Tools.ToolCallChunk[],\n chunk.tool_call_chunks as ContentBlock.Tools.ToolCallChunk[]\n );\n if (rawToolCalls !== undefined && rawToolCalls.length > 0) {\n combinedFields.tool_call_chunks = rawToolCalls;\n }\n }\n if (\n this.usage_metadata !== undefined ||\n chunk.usage_metadata !== undefined\n ) {\n combinedFields.usage_metadata = mergeUsageMetadata(\n this.usage_metadata,\n chunk.usage_metadata\n );\n }\n const Cls = this.constructor as Constructor<this>;\n return new Cls(combinedFields);\n }\n\n static isInstance(obj: unknown): obj is AIMessageChunk {\n return super.isInstance(obj) && obj.type === \"ai\";\n }\n}\n"],"mappings":";;;;;;;AAiCA,IAAa,YAAb,cACUA,yBAEV;CACE,AAAS,OAAO;CAEhB,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C;CAEA,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;EACrB;CACF;CAED,YACEC,QACA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,YAAY,MAAM,QAAQ,OAAO,EACrD,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,mBAAmB,CAAE;EACtB;OACI;GACL,aAAa;GACb,MAAM,eAAe,WAAW,mBAAmB;GACnD,MAAM,YAAY,WAAW;AAC7B,OACE,EAAE,gBAAgB,SAClB,aAAa,SAAS,MACrB,cAAc,UAAa,UAAU,WAAW,IAEjD,QAAQ,KACN;IACE;IACA;IACA;IACA;GACD,EAAC,KAAK,IAAI,CACZ;AAEH,OAAI;AACF,QAAI,EAAE,gBAAgB,SAAS,cAAc,QAAW;KACtD,MAAM,CAACC,aAAW,iBAAiB,GACjCC,4CAAsB,aAAa;KACrC,WAAW,aAAaD,eAAa,CAAE;KACvC,WAAW,qBAAqB,oBAAoB,CAAE;IACvD,OAAM;KACL,WAAW,aAAa,WAAW,cAAc,CAAE;KACnD,WAAW,qBAAqB,WAAW,sBAAsB,CAAE;IACpE;GACF,QAAO;IAEN,WAAW,aAAa,CAAE;IAC1B,WAAW,qBAAqB,CAAE;GACnC;AACD,OAAI,WAAW,kBAAkB,QAAW;IAE1C,WAAW,cAAc,KACvB,GAAG,WAAW,WAAW,IAAI,CAAC,cAAc;KAC1C,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,GAAE,CACJ;IAED,MAAM,mBAAmB,WAAW,cACjC,OACC,CAAC,UACC,MAAM,SAAS,YAClB,CACA,OACC,CAAC,UACC,CAAC,WAAW,YAAY,KACtB,CAAC,aACC,SAAS,OAAO,MAAM,MAAM,SAAS,SAAS,MAAM,KACvD,CACJ;AACH,QAAI,iBAAiB,SAAS,GAC5B,WAAW,aAAa,iBAAiB,IAAI,CAAC,WAAW;KACvD,MAAM;KACN,IAAI,MAAM;KACV,MAAM,MAAM;KACZ,MAAM,MAAM;IACb,GAAE;GAEN;EACF;EAGD,MAAM,WAAW;AACjB,MAAI,OAAO,eAAe,UAAU;GAClC,KAAK,aAAa,WAAW,cAAc,KAAK;GAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACzC;EACD,KAAK,iBAAiB,WAAW;CAClC;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,KAAK,kBAAkB,mBAAmB,KAE1C,QAAO,KAAK;AAGd,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,OAAO,KAAK,kBAAkB,mBAAmB,UACjD;GACA,MAAM,aAAaE,4BAAc,KAAK,kBAAkB,eAAe;AACvE,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YAAY;GACnB,MAAM,mBAAmB,KAAK,WAAW,OACvC,CAAC,UACC,CAAC,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,MAAM,EAAE,SAAS,MAAM,KAAK,CAClE;GACD,OAAO,KACL,GAAG,iBAAiB,IAAI,CAAC,WAAW;IAClC,GAAG;IACH,MAAM;IACN,IAAI,MAAM;IACV,MAAM,MAAM;IACZ,MAAM,MAAM;GACb,GAAE,CACJ;EACF;AAED,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAO,WAAWC,KAAgC;AAChD,SAAO,MAAM,WAAW,IAAI,IAAI,IAAI,SAAS;CAC9C;AACF;;;;AAKD,SAAgB,YACdC,GAC4B;AAC5B,QAAO,EAAE,UAAU,KAAK;AACzB;;;;AAKD,SAAgB,iBACdC,GACiC;AACjC,QAAO,EAAE,UAAU,KAAK;AACzB;;;;;AAYD,IAAa,iBAAb,cAGUC,8BAEV;CACE,AAAS,OAAO;CAEhB,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C,mBAAqC,CAAE;CAEvC;CAEA,YACEC,QAGA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,YAAY,MAAM,QAAQ,OAAO,EACrD,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;EACrB;WACQ,OAAO,qBAAqB,QACrC,aAAa;GACX,GAAG;GACH,YAAY,OAAO,cAAc,CAAE;GACnC,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;GACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;EACP;OACI;GACL,MAAM,wBAAwB,OAAO,iBAAiB,OACpD,CAAC,KAAK,UAAU;IACd,MAAM,oBAAoB,IAAI,UAAU,CAAC,CAAC,MAAM,KAAK;AAEnD,SACE,QAAQ,SACR,MAAM,MACN,WAAW,SACX,MAAM,UAAU,OAEhB,QAAO,MAAM,OAAO,MAAM,MAAM,MAAM,UAAU,MAAM;AAGxD,SAAI,QAAQ,SAAS,MAAM,GACzB,QAAO,MAAM,OAAO,MAAM;AAG5B,SAAI,WAAW,SAAS,MAAM,UAAU,OACtC,QAAO,MAAM,UAAU,MAAM;AAE/B,YAAO;IACR,EAAC;AACF,QAAI,sBAAsB,IACxB,IAAI,mBAAmB,KAAK,MAAM;SAElC,IAAI,KAAK,CAAC,KAAM,EAAC;AAEnB,WAAO;GACR,GACD,CAAE,EACH;GAED,MAAMC,YAAwB,CAAE;GAChC,MAAMC,mBAAsC,CAAE;AAC9C,QAAK,MAAM,UAAU,uBAAuB;IAC1C,IAAIC,aAA6C;IACjD,MAAM,OAAO,OAAO,IAAI,QAAQ;IAChC,MAAM,aAAa,OAAO,IAAI,CAAC,MAAM,EAAE,QAAQ,GAAG,CAAC,KAAK,GAAG;IAC3D,MAAM,UAAU,WAAW,SAAS,aAAa;IACjD,MAAM,KAAK,OAAO,IAAI;AACtB,QAAI;KACF,aAAaC,8BAAiB,QAAQ;AACtC,SACE,CAAC,MACD,eAAe,QACf,OAAO,eAAe,YACtB,MAAM,QAAQ,WAAW,CAEzB,OAAM,IAAI,MAAM;KAElB,UAAU,KAAK;MACb;MACA,MAAM;MACN;MACA,MAAM;KACP,EAAC;IACH,QAAO;KACN,iBAAiB,KAAK;MACpB;MACA,MAAM;MACN;MACA,OAAO;MACP,MAAM;KACP,EAAC;IACH;GACF;GACD,aAAa;IACX,GAAG;IACH,YAAY;IACZ,oBAAoB;IACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;GACP;EACF;EAGD,MAAM,WAAW;EACjB,KAAK,mBACH,WAAW,oBAAoB,KAAK;EACtC,KAAK,aAAa,WAAW,cAAc,KAAK;EAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACxC,KAAK,iBAAiB,WAAW;CAClC;CAED,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;GACpB,kBAAkB;EACnB;CACF;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,KAAK,kBAAkB,mBAAmB,KAE1C,QAAO,KAAK;AAGd,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,OAAO,KAAK,kBAAkB,mBAAmB,UACjD;GACA,MAAM,aAAaV,4BAAc,KAAK,kBAAkB,eAAe;AACvE,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YACP;OAAI,OAAO,KAAK,YAAY,UAAU;IACpC,MAAM,mBAAmB,KAAK,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,YAAY,CAC7C,IAAI,CAAC,UAAU,MAAM,GAAG;AAC3B,SAAK,MAAM,YAAY,KAAK,WAC1B,KAAI,SAAS,MAAM,CAAC,iBAAiB,SAAS,SAAS,GAAG,EACxD,OAAO,KAAK;KACV,GAAG;KACH,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,EAAC;GAGP;;AAGH,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,kBAAkB,KAAK;GACvB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAOW,OAAmC;EACxC,MAAMC,iBAAuC;GAC3C,SAASC,0BAAa,KAAK,SAAS,MAAM,QAAQ;GAClD,mBAAmBC,yBACjB,KAAK,mBACL,MAAM,kBACP;GACD,mBAAmBC,uCACjB,KAAK,mBACL,MAAM,kBACP;GACD,kBAAkB,CAAE;GACpB,IAAI,KAAK,MAAM,MAAM;EACtB;AACD,MACE,KAAK,qBAAqB,UAC1B,MAAM,qBAAqB,QAC3B;GACA,MAAM,eAAeC,yBACnB,KAAK,kBACL,MAAM,iBACP;AACD,OAAI,iBAAiB,UAAa,aAAa,SAAS,GACtD,eAAe,mBAAmB;EAErC;AACD,MACE,KAAK,mBAAmB,UACxB,MAAM,mBAAmB,QAEzB,eAAe,iBAAiBC,oCAC9B,KAAK,gBACL,MAAM,eACP;EAEH,MAAM,MAAM,KAAK;AACjB,SAAO,IAAI,IAAI;CAChB;CAED,OAAO,WAAWhB,KAAqC;AACrD,SAAO,MAAM,WAAW,IAAI,IAAI,IAAI,SAAS;CAC9C;AACF"}
|
|
1
|
+
{"version":3,"file":"ai.cjs","names":["BaseMessage","fields: $InferMessageContent<TStructure, \"ai\"> | AIMessageFields<TStructure>","initParams: AIMessageFields<TStructure>","toolCalls","defaultToolCallParser","getTranslator","obj: unknown","x: BaseMessage","x: BaseMessageChunk","BaseMessageChunk","fields:\n | $InferMessageContent<TStructure, \"ai\">\n | AIMessageChunkFields<TStructure>","initParams: AIMessageChunkFields<TStructure>","toolCalls: ToolCall[]","invalidToolCalls: InvalidToolCall[]","parsedArgs: Record<string, unknown> | null","parsePartialJson","chunk: AIMessageChunk<TStructure>","combinedFields: AIMessageChunkFields","mergeContent","_mergeDicts","mergeResponseMetadata","_mergeLists","mergeUsageMetadata"],"sources":["../../src/messages/ai.ts"],"sourcesContent":["import { parsePartialJson } from \"../utils/json.js\";\nimport {\n BaseMessage,\n BaseMessageChunk,\n mergeContent,\n _mergeDicts,\n _mergeLists,\n BaseMessageFields,\n} from \"./base.js\";\nimport { getTranslator } from \"./block_translators/index.js\";\nimport { ContentBlock } from \"./content/index.js\";\nimport {\n $InferMessageContent,\n $InferMessageProperty,\n MessageStructure,\n} from \"./message.js\";\nimport { mergeResponseMetadata, mergeUsageMetadata } from \"./metadata.js\";\nimport {\n InvalidToolCall,\n ToolCall,\n ToolCallChunk,\n defaultToolCallParser,\n} from \"./tool.js\";\nimport { Constructor } from \"./utils.js\";\n\nexport interface AIMessageFields<\n TStructure extends MessageStructure = MessageStructure\n> extends BaseMessageFields<TStructure, \"ai\"> {\n tool_calls?: ToolCall[];\n invalid_tool_calls?: InvalidToolCall[];\n usage_metadata?: $InferMessageProperty<TStructure, \"ai\", \"usage_metadata\">;\n}\n\nexport class AIMessage<TStructure extends MessageStructure = MessageStructure>\n extends BaseMessage<TStructure, \"ai\">\n implements AIMessageFields<TStructure>\n{\n readonly type = \"ai\" as const;\n\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n usage_metadata?: AIMessageFields<TStructure>[\"usage_metadata\"];\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n };\n }\n\n constructor(\n fields: $InferMessageContent<TStructure, \"ai\"> | AIMessageFields<TStructure>\n ) {\n let initParams: AIMessageFields<TStructure>;\n if (typeof fields === \"string\" || Array.isArray(fields)) {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n additional_kwargs: {},\n };\n } else {\n initParams = fields;\n const rawToolCalls = initParams.additional_kwargs?.tool_calls;\n const toolCalls = initParams.tool_calls;\n if (\n !(rawToolCalls == null) &&\n rawToolCalls.length > 0 &&\n (toolCalls === undefined || toolCalls.length === 0)\n ) {\n console.warn(\n [\n \"New LangChain packages are available that more efficiently handle\",\n \"tool calling.\\n\\nPlease upgrade your packages to versions that set\",\n \"message tool calls. e.g., `pnpm install @langchain/anthropic`,\",\n \"pnpm install @langchain/openai`, etc.\",\n ].join(\" \")\n );\n }\n try {\n if (!(rawToolCalls == null) && toolCalls === undefined) {\n const [toolCalls, invalidToolCalls] =\n defaultToolCallParser(rawToolCalls);\n initParams.tool_calls = toolCalls ?? [];\n initParams.invalid_tool_calls = invalidToolCalls ?? [];\n } else {\n initParams.tool_calls = initParams.tool_calls ?? [];\n initParams.invalid_tool_calls = initParams.invalid_tool_calls ?? [];\n }\n } catch {\n // Do nothing if parsing fails\n initParams.tool_calls = [];\n initParams.invalid_tool_calls = [];\n }\n\n // Convert content to content blocks if output version is v1\n if (\n initParams.response_metadata !== undefined &&\n \"output_version\" in initParams.response_metadata &&\n initParams.response_metadata.output_version === \"v1\"\n ) {\n initParams.contentBlocks =\n initParams.content as Array<ContentBlock.Standard>;\n initParams.content = undefined;\n }\n\n if (initParams.contentBlocks !== undefined) {\n // Add constructor tool calls as content blocks\n initParams.contentBlocks.push(\n ...initParams.tool_calls.map((toolCall) => ({\n type: \"tool_call\" as const,\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n }))\n );\n // Add content block tool calls that aren't in the constructor tool calls\n const missingToolCalls = initParams.contentBlocks\n .filter<ContentBlock.Tools.ToolCall>(\n (block): block is ContentBlock.Tools.ToolCall =>\n block.type === \"tool_call\"\n )\n .filter(\n (block) =>\n !initParams.tool_calls?.some(\n (toolCall) =>\n toolCall.id === block.id && toolCall.name === block.name\n )\n );\n if (missingToolCalls.length > 0) {\n initParams.tool_calls = missingToolCalls.map((block) => ({\n type: \"tool_call\" as const,\n id: block.id!,\n name: block.name,\n args: block.args as Record<string, unknown>,\n }));\n }\n }\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n if (typeof initParams !== \"string\") {\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n }\n this.usage_metadata = initParams.usage_metadata;\n }\n\n static lc_name() {\n return \"AIMessage\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (\n this.response_metadata &&\n \"output_version\" in this.response_metadata &&\n this.response_metadata.output_version === \"v1\"\n ) {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n if (\n this.response_metadata &&\n \"model_provider\" in this.response_metadata &&\n typeof this.response_metadata.model_provider === \"string\"\n ) {\n const translator = getTranslator(this.response_metadata.model_provider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n const missingToolCalls = this.tool_calls.filter(\n (block) =>\n !blocks.some((b) => b.id === block.id && b.name === block.name)\n );\n blocks.push(\n ...missingToolCalls.map((block) => ({\n ...block,\n type: \"tool_call\" as const,\n id: block.id,\n name: block.name,\n args: block.args,\n }))\n );\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n static isInstance(obj: unknown): obj is AIMessage {\n return super.isInstance(obj) && obj.type === \"ai\";\n }\n}\n\n/**\n * @deprecated Use {@link AIMessage.isInstance} instead\n */\nexport function isAIMessage<TStructure extends MessageStructure>(\n x: BaseMessage\n): x is AIMessage<TStructure> {\n return x._getType() === \"ai\";\n}\n\n/**\n * @deprecated Use {@link AIMessageChunk.isInstance} instead\n */\nexport function isAIMessageChunk<TStructure extends MessageStructure>(\n x: BaseMessageChunk\n): x is AIMessageChunk<TStructure> {\n return x._getType() === \"ai\";\n}\n\nexport type AIMessageChunkFields<\n TStructure extends MessageStructure = MessageStructure\n> = AIMessageFields<TStructure> & {\n tool_call_chunks?: ToolCallChunk[];\n};\n\n/**\n * Represents a chunk of an AI message, which can be concatenated with\n * other AI message chunks.\n */\nexport class AIMessageChunk<\n TStructure extends MessageStructure = MessageStructure\n >\n extends BaseMessageChunk<TStructure, \"ai\">\n implements AIMessage<TStructure>, AIMessageChunkFields<TStructure>\n{\n readonly type = \"ai\" as const;\n\n tool_calls?: ToolCall[] = [];\n\n invalid_tool_calls?: InvalidToolCall[] = [];\n\n tool_call_chunks?: ToolCallChunk[] = [];\n\n usage_metadata?: AIMessageChunkFields<TStructure>[\"usage_metadata\"];\n\n constructor(\n fields:\n | $InferMessageContent<TStructure, \"ai\">\n | AIMessageChunkFields<TStructure>\n ) {\n let initParams: AIMessageChunkFields<TStructure>;\n if (typeof fields === \"string\" || Array.isArray(fields)) {\n initParams = {\n content: fields,\n tool_calls: [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n };\n } else if (fields.tool_call_chunks === undefined) {\n initParams = {\n ...fields,\n tool_calls: fields.tool_calls ?? [],\n invalid_tool_calls: [],\n tool_call_chunks: [],\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n } else {\n const groupedToolCallChunks = fields.tool_call_chunks.reduce(\n (acc, chunk) => {\n const matchedChunkIndex = acc.findIndex(([match]) => {\n // If chunk has an id and index, match if both are present\n if (\n \"id\" in chunk &&\n chunk.id &&\n \"index\" in chunk &&\n chunk.index !== undefined\n ) {\n return chunk.id === match.id && chunk.index === match.index;\n }\n // If chunk has an id, we match on id\n if (\"id\" in chunk && chunk.id) {\n return chunk.id === match.id;\n }\n // If chunk has an index, we match on index\n if (\"index\" in chunk && chunk.index !== undefined) {\n return chunk.index === match.index;\n }\n return false;\n });\n if (matchedChunkIndex !== -1) {\n acc[matchedChunkIndex].push(chunk);\n } else {\n acc.push([chunk]);\n }\n return acc;\n },\n [] as ToolCallChunk[][]\n );\n\n const toolCalls: ToolCall[] = [];\n const invalidToolCalls: InvalidToolCall[] = [];\n for (const chunks of groupedToolCallChunks) {\n let parsedArgs: Record<string, unknown> | null = null;\n const name = chunks[0]?.name ?? \"\";\n const joinedArgs = chunks.map((c) => c.args || \"\").join(\"\");\n const argsStr = joinedArgs.length ? joinedArgs : \"{}\";\n const id = chunks[0]?.id;\n try {\n parsedArgs = parsePartialJson(argsStr);\n if (\n !id ||\n parsedArgs === null ||\n typeof parsedArgs !== \"object\" ||\n Array.isArray(parsedArgs)\n ) {\n throw new Error(\"Malformed tool call chunk args.\");\n }\n toolCalls.push({\n name,\n args: parsedArgs,\n id,\n type: \"tool_call\",\n });\n } catch {\n invalidToolCalls.push({\n name,\n args: argsStr,\n id,\n error: \"Malformed args.\",\n type: \"invalid_tool_call\",\n });\n }\n }\n initParams = {\n ...fields,\n tool_calls: toolCalls,\n invalid_tool_calls: invalidToolCalls,\n usage_metadata:\n fields.usage_metadata !== undefined\n ? fields.usage_metadata\n : undefined,\n };\n }\n // Sadly, TypeScript only allows super() calls at root if the class has\n // properties with initializers, so we have to check types twice.\n super(initParams);\n this.tool_call_chunks =\n initParams.tool_call_chunks ?? this.tool_call_chunks;\n this.tool_calls = initParams.tool_calls ?? this.tool_calls;\n this.invalid_tool_calls =\n initParams.invalid_tool_calls ?? this.invalid_tool_calls;\n this.usage_metadata = initParams.usage_metadata;\n }\n\n get lc_aliases(): Record<string, string> {\n // exclude snake case conversion to pascal case\n return {\n ...super.lc_aliases,\n tool_calls: \"tool_calls\",\n invalid_tool_calls: \"invalid_tool_calls\",\n tool_call_chunks: \"tool_call_chunks\",\n };\n }\n\n static lc_name() {\n return \"AIMessageChunk\";\n }\n\n get contentBlocks(): Array<ContentBlock.Standard> {\n if (\n this.response_metadata &&\n \"output_version\" in this.response_metadata &&\n this.response_metadata.output_version === \"v1\"\n ) {\n return this.content as Array<ContentBlock.Standard>;\n }\n\n if (\n this.response_metadata &&\n \"model_provider\" in this.response_metadata &&\n typeof this.response_metadata.model_provider === \"string\"\n ) {\n const translator = getTranslator(this.response_metadata.model_provider);\n if (translator) {\n return translator.translateContent(this);\n }\n }\n\n const blocks = super.contentBlocks;\n\n if (this.tool_calls) {\n if (typeof this.content !== \"string\") {\n const contentToolCalls = this.content\n .filter((block) => block.type === \"tool_call\")\n .map((block) => block.id);\n for (const toolCall of this.tool_calls) {\n if (toolCall.id && !contentToolCalls.includes(toolCall.id)) {\n blocks.push({\n ...toolCall,\n type: \"tool_call\",\n id: toolCall.id,\n name: toolCall.name,\n args: toolCall.args,\n });\n }\n }\n }\n }\n\n return blocks;\n }\n\n override get _printableFields(): Record<string, unknown> {\n return {\n ...super._printableFields,\n tool_calls: this.tool_calls,\n tool_call_chunks: this.tool_call_chunks,\n invalid_tool_calls: this.invalid_tool_calls,\n usage_metadata: this.usage_metadata,\n };\n }\n\n concat(chunk: AIMessageChunk<TStructure>) {\n const combinedFields: AIMessageChunkFields = {\n content: mergeContent(this.content, chunk.content),\n additional_kwargs: _mergeDicts(\n this.additional_kwargs,\n chunk.additional_kwargs\n ),\n response_metadata: mergeResponseMetadata(\n this.response_metadata,\n chunk.response_metadata\n ),\n tool_call_chunks: [],\n id: this.id ?? chunk.id,\n };\n if (\n this.tool_call_chunks !== undefined ||\n chunk.tool_call_chunks !== undefined\n ) {\n const rawToolCalls = _mergeLists(\n this.tool_call_chunks as ContentBlock.Tools.ToolCallChunk[],\n chunk.tool_call_chunks as ContentBlock.Tools.ToolCallChunk[]\n );\n if (rawToolCalls !== undefined && rawToolCalls.length > 0) {\n combinedFields.tool_call_chunks = rawToolCalls;\n }\n }\n if (\n this.usage_metadata !== undefined ||\n chunk.usage_metadata !== undefined\n ) {\n combinedFields.usage_metadata = mergeUsageMetadata(\n this.usage_metadata,\n chunk.usage_metadata\n );\n }\n const Cls = this.constructor as Constructor<this>;\n return new Cls(combinedFields);\n }\n\n static isInstance(obj: unknown): obj is AIMessageChunk {\n return super.isInstance(obj) && obj.type === \"ai\";\n }\n}\n"],"mappings":";;;;;;;AAiCA,IAAa,YAAb,cACUA,yBAEV;CACE,AAAS,OAAO;CAEhB,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C;CAEA,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;EACrB;CACF;CAED,YACEC,QACA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,YAAY,MAAM,QAAQ,OAAO,EACrD,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,mBAAmB,CAAE;EACtB;OACI;GACL,aAAa;GACb,MAAM,eAAe,WAAW,mBAAmB;GACnD,MAAM,YAAY,WAAW;AAC7B,OACE,EAAE,gBAAgB,SAClB,aAAa,SAAS,MACrB,cAAc,UAAa,UAAU,WAAW,IAEjD,QAAQ,KACN;IACE;IACA;IACA;IACA;GACD,EAAC,KAAK,IAAI,CACZ;AAEH,OAAI;AACF,QAAI,EAAE,gBAAgB,SAAS,cAAc,QAAW;KACtD,MAAM,CAACC,aAAW,iBAAiB,GACjCC,4CAAsB,aAAa;KACrC,WAAW,aAAaD,eAAa,CAAE;KACvC,WAAW,qBAAqB,oBAAoB,CAAE;IACvD,OAAM;KACL,WAAW,aAAa,WAAW,cAAc,CAAE;KACnD,WAAW,qBAAqB,WAAW,sBAAsB,CAAE;IACpE;GACF,QAAO;IAEN,WAAW,aAAa,CAAE;IAC1B,WAAW,qBAAqB,CAAE;GACnC;AAGD,OACE,WAAW,sBAAsB,UACjC,oBAAoB,WAAW,qBAC/B,WAAW,kBAAkB,mBAAmB,MAChD;IACA,WAAW,gBACT,WAAW;IACb,WAAW,UAAU;GACtB;AAED,OAAI,WAAW,kBAAkB,QAAW;IAE1C,WAAW,cAAc,KACvB,GAAG,WAAW,WAAW,IAAI,CAAC,cAAc;KAC1C,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,GAAE,CACJ;IAED,MAAM,mBAAmB,WAAW,cACjC,OACC,CAAC,UACC,MAAM,SAAS,YAClB,CACA,OACC,CAAC,UACC,CAAC,WAAW,YAAY,KACtB,CAAC,aACC,SAAS,OAAO,MAAM,MAAM,SAAS,SAAS,MAAM,KACvD,CACJ;AACH,QAAI,iBAAiB,SAAS,GAC5B,WAAW,aAAa,iBAAiB,IAAI,CAAC,WAAW;KACvD,MAAM;KACN,IAAI,MAAM;KACV,MAAM,MAAM;KACZ,MAAM,MAAM;IACb,GAAE;GAEN;EACF;EAGD,MAAM,WAAW;AACjB,MAAI,OAAO,eAAe,UAAU;GAClC,KAAK,aAAa,WAAW,cAAc,KAAK;GAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACzC;EACD,KAAK,iBAAiB,WAAW;CAClC;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,KAAK,kBAAkB,mBAAmB,KAE1C,QAAO,KAAK;AAGd,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,OAAO,KAAK,kBAAkB,mBAAmB,UACjD;GACA,MAAM,aAAaE,4BAAc,KAAK,kBAAkB,eAAe;AACvE,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YAAY;GACnB,MAAM,mBAAmB,KAAK,WAAW,OACvC,CAAC,UACC,CAAC,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,MAAM,EAAE,SAAS,MAAM,KAAK,CAClE;GACD,OAAO,KACL,GAAG,iBAAiB,IAAI,CAAC,WAAW;IAClC,GAAG;IACH,MAAM;IACN,IAAI,MAAM;IACV,MAAM,MAAM;IACZ,MAAM,MAAM;GACb,GAAE,CACJ;EACF;AAED,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAO,WAAWC,KAAgC;AAChD,SAAO,MAAM,WAAW,IAAI,IAAI,IAAI,SAAS;CAC9C;AACF;;;;AAKD,SAAgB,YACdC,GAC4B;AAC5B,QAAO,EAAE,UAAU,KAAK;AACzB;;;;AAKD,SAAgB,iBACdC,GACiC;AACjC,QAAO,EAAE,UAAU,KAAK;AACzB;;;;;AAYD,IAAa,iBAAb,cAGUC,8BAEV;CACE,AAAS,OAAO;CAEhB,aAA0B,CAAE;CAE5B,qBAAyC,CAAE;CAE3C,mBAAqC,CAAE;CAEvC;CAEA,YACEC,QAGA;EACA,IAAIC;AACJ,MAAI,OAAO,WAAW,YAAY,MAAM,QAAQ,OAAO,EACrD,aAAa;GACX,SAAS;GACT,YAAY,CAAE;GACd,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;EACrB;WACQ,OAAO,qBAAqB,QACrC,aAAa;GACX,GAAG;GACH,YAAY,OAAO,cAAc,CAAE;GACnC,oBAAoB,CAAE;GACtB,kBAAkB,CAAE;GACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;EACP;OACI;GACL,MAAM,wBAAwB,OAAO,iBAAiB,OACpD,CAAC,KAAK,UAAU;IACd,MAAM,oBAAoB,IAAI,UAAU,CAAC,CAAC,MAAM,KAAK;AAEnD,SACE,QAAQ,SACR,MAAM,MACN,WAAW,SACX,MAAM,UAAU,OAEhB,QAAO,MAAM,OAAO,MAAM,MAAM,MAAM,UAAU,MAAM;AAGxD,SAAI,QAAQ,SAAS,MAAM,GACzB,QAAO,MAAM,OAAO,MAAM;AAG5B,SAAI,WAAW,SAAS,MAAM,UAAU,OACtC,QAAO,MAAM,UAAU,MAAM;AAE/B,YAAO;IACR,EAAC;AACF,QAAI,sBAAsB,IACxB,IAAI,mBAAmB,KAAK,MAAM;SAElC,IAAI,KAAK,CAAC,KAAM,EAAC;AAEnB,WAAO;GACR,GACD,CAAE,EACH;GAED,MAAMC,YAAwB,CAAE;GAChC,MAAMC,mBAAsC,CAAE;AAC9C,QAAK,MAAM,UAAU,uBAAuB;IAC1C,IAAIC,aAA6C;IACjD,MAAM,OAAO,OAAO,IAAI,QAAQ;IAChC,MAAM,aAAa,OAAO,IAAI,CAAC,MAAM,EAAE,QAAQ,GAAG,CAAC,KAAK,GAAG;IAC3D,MAAM,UAAU,WAAW,SAAS,aAAa;IACjD,MAAM,KAAK,OAAO,IAAI;AACtB,QAAI;KACF,aAAaC,8BAAiB,QAAQ;AACtC,SACE,CAAC,MACD,eAAe,QACf,OAAO,eAAe,YACtB,MAAM,QAAQ,WAAW,CAEzB,OAAM,IAAI,MAAM;KAElB,UAAU,KAAK;MACb;MACA,MAAM;MACN;MACA,MAAM;KACP,EAAC;IACH,QAAO;KACN,iBAAiB,KAAK;MACpB;MACA,MAAM;MACN;MACA,OAAO;MACP,MAAM;KACP,EAAC;IACH;GACF;GACD,aAAa;IACX,GAAG;IACH,YAAY;IACZ,oBAAoB;IACpB,gBACE,OAAO,mBAAmB,SACtB,OAAO,iBACP;GACP;EACF;EAGD,MAAM,WAAW;EACjB,KAAK,mBACH,WAAW,oBAAoB,KAAK;EACtC,KAAK,aAAa,WAAW,cAAc,KAAK;EAChD,KAAK,qBACH,WAAW,sBAAsB,KAAK;EACxC,KAAK,iBAAiB,WAAW;CAClC;CAED,IAAI,aAAqC;AAEvC,SAAO;GACL,GAAG,MAAM;GACT,YAAY;GACZ,oBAAoB;GACpB,kBAAkB;EACnB;CACF;CAED,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,gBAA8C;AAChD,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,KAAK,kBAAkB,mBAAmB,KAE1C,QAAO,KAAK;AAGd,MACE,KAAK,qBACL,oBAAoB,KAAK,qBACzB,OAAO,KAAK,kBAAkB,mBAAmB,UACjD;GACA,MAAM,aAAaV,4BAAc,KAAK,kBAAkB,eAAe;AACvE,OAAI,WACF,QAAO,WAAW,iBAAiB,KAAK;EAE3C;EAED,MAAM,SAAS,MAAM;AAErB,MAAI,KAAK,YACP;OAAI,OAAO,KAAK,YAAY,UAAU;IACpC,MAAM,mBAAmB,KAAK,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,YAAY,CAC7C,IAAI,CAAC,UAAU,MAAM,GAAG;AAC3B,SAAK,MAAM,YAAY,KAAK,WAC1B,KAAI,SAAS,MAAM,CAAC,iBAAiB,SAAS,SAAS,GAAG,EACxD,OAAO,KAAK;KACV,GAAG;KACH,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS;KACf,MAAM,SAAS;IAChB,EAAC;GAGP;;AAGH,SAAO;CACR;CAED,IAAa,mBAA4C;AACvD,SAAO;GACL,GAAG,MAAM;GACT,YAAY,KAAK;GACjB,kBAAkB,KAAK;GACvB,oBAAoB,KAAK;GACzB,gBAAgB,KAAK;EACtB;CACF;CAED,OAAOW,OAAmC;EACxC,MAAMC,iBAAuC;GAC3C,SAASC,0BAAa,KAAK,SAAS,MAAM,QAAQ;GAClD,mBAAmBC,yBACjB,KAAK,mBACL,MAAM,kBACP;GACD,mBAAmBC,uCACjB,KAAK,mBACL,MAAM,kBACP;GACD,kBAAkB,CAAE;GACpB,IAAI,KAAK,MAAM,MAAM;EACtB;AACD,MACE,KAAK,qBAAqB,UAC1B,MAAM,qBAAqB,QAC3B;GACA,MAAM,eAAeC,yBACnB,KAAK,kBACL,MAAM,iBACP;AACD,OAAI,iBAAiB,UAAa,aAAa,SAAS,GACtD,eAAe,mBAAmB;EAErC;AACD,MACE,KAAK,mBAAmB,UACxB,MAAM,mBAAmB,QAEzB,eAAe,iBAAiBC,oCAC9B,KAAK,gBACL,MAAM,eACP;EAEH,MAAM,MAAM,KAAK;AACjB,SAAO,IAAI,IAAI;CAChB;CAED,OAAO,WAAWhB,KAAqC;AACrD,SAAO,MAAM,WAAW,IAAI,IAAI,IAAI,SAAS;CAC9C;AACF"}
|
package/dist/messages/ai.js
CHANGED
|
@@ -48,6 +48,10 @@ var AIMessage = class extends BaseMessage {
|
|
|
48
48
|
initParams.tool_calls = [];
|
|
49
49
|
initParams.invalid_tool_calls = [];
|
|
50
50
|
}
|
|
51
|
+
if (initParams.response_metadata !== void 0 && "output_version" in initParams.response_metadata && initParams.response_metadata.output_version === "v1") {
|
|
52
|
+
initParams.contentBlocks = initParams.content;
|
|
53
|
+
initParams.content = void 0;
|
|
54
|
+
}
|
|
51
55
|
if (initParams.contentBlocks !== void 0) {
|
|
52
56
|
initParams.contentBlocks.push(...initParams.tool_calls.map((toolCall) => ({
|
|
53
57
|
type: "tool_call",
|