@langchain/core 1.1.33-dev-1773786580575 → 1.1.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/agents.cjs +1 -0
- package/agents.d.cts +1 -0
- package/agents.d.ts +1 -0
- package/agents.js +1 -0
- package/caches.cjs +1 -0
- package/caches.d.cts +1 -0
- package/caches.d.ts +1 -0
- package/caches.js +1 -0
- package/callbacks/base.cjs +1 -0
- package/callbacks/base.d.cts +1 -0
- package/callbacks/base.d.ts +1 -0
- package/callbacks/base.js +1 -0
- package/callbacks/dispatch/web.cjs +1 -0
- package/callbacks/dispatch/web.d.cts +1 -0
- package/callbacks/dispatch/web.d.ts +1 -0
- package/callbacks/dispatch/web.js +1 -0
- package/callbacks/dispatch.cjs +1 -0
- package/callbacks/dispatch.d.cts +1 -0
- package/callbacks/dispatch.d.ts +1 -0
- package/callbacks/dispatch.js +1 -0
- package/callbacks/manager.cjs +1 -0
- package/callbacks/manager.d.cts +1 -0
- package/callbacks/manager.d.ts +1 -0
- package/callbacks/manager.js +1 -0
- package/callbacks/promises.cjs +1 -0
- package/callbacks/promises.d.cts +1 -0
- package/callbacks/promises.d.ts +1 -0
- package/callbacks/promises.js +1 -0
- package/chat_history.cjs +1 -0
- package/chat_history.d.cts +1 -0
- package/chat_history.d.ts +1 -0
- package/chat_history.js +1 -0
- package/context.cjs +1 -0
- package/context.d.cts +1 -0
- package/context.d.ts +1 -0
- package/context.js +1 -0
- package/dist/language_models/base.cjs +1 -1
- package/dist/language_models/base.cjs.map +1 -1
- package/dist/language_models/base.js +1 -1
- package/dist/language_models/base.js.map +1 -1
- package/dist/language_models/chat_models.cjs +21 -6
- package/dist/language_models/chat_models.cjs.map +1 -1
- package/dist/language_models/chat_models.d.cts.map +1 -1
- package/dist/language_models/chat_models.d.ts.map +1 -1
- package/dist/language_models/chat_models.js +22 -7
- package/dist/language_models/chat_models.js.map +1 -1
- package/dist/language_models/llms.cjs +22 -6
- package/dist/language_models/llms.cjs.map +1 -1
- package/dist/language_models/llms.d.cts.map +1 -1
- package/dist/language_models/llms.d.ts.map +1 -1
- package/dist/language_models/llms.js +22 -6
- package/dist/language_models/llms.js.map +1 -1
- package/dist/language_models/utils.cjs +4 -0
- package/dist/language_models/utils.cjs.map +1 -1
- package/dist/language_models/utils.js +4 -1
- package/dist/language_models/utils.js.map +1 -1
- package/dist/messages/utils.cjs +29 -1
- package/dist/messages/utils.cjs.map +1 -1
- package/dist/messages/utils.d.cts.map +1 -1
- package/dist/messages/utils.d.ts.map +1 -1
- package/dist/messages/utils.js +29 -1
- package/dist/messages/utils.js.map +1 -1
- package/dist/tools/index.cjs.map +1 -1
- package/dist/tools/index.d.cts +1 -1
- package/dist/tools/index.d.cts.map +1 -1
- package/dist/tools/index.d.ts +1 -1
- package/dist/tools/index.d.ts.map +1 -1
- package/dist/tools/index.js.map +1 -1
- package/dist/tools/types.cjs.map +1 -1
- package/dist/tools/types.d.cts +1 -1
- package/dist/tools/types.d.cts.map +1 -1
- package/dist/tools/types.d.ts +1 -1
- package/dist/tools/types.d.ts.map +1 -1
- package/dist/tools/types.js.map +1 -1
- package/document_loaders/base.cjs +1 -0
- package/document_loaders/base.d.cts +1 -0
- package/document_loaders/base.d.ts +1 -0
- package/document_loaders/base.js +1 -0
- package/document_loaders/langsmith.cjs +1 -0
- package/document_loaders/langsmith.d.cts +1 -0
- package/document_loaders/langsmith.d.ts +1 -0
- package/document_loaders/langsmith.js +1 -0
- package/documents.cjs +1 -0
- package/documents.d.cts +1 -0
- package/documents.d.ts +1 -0
- package/documents.js +1 -0
- package/embeddings.cjs +1 -0
- package/embeddings.d.cts +1 -0
- package/embeddings.d.ts +1 -0
- package/embeddings.js +1 -0
- package/errors.cjs +1 -0
- package/errors.d.cts +1 -0
- package/errors.d.ts +1 -0
- package/errors.js +1 -0
- package/example_selectors.cjs +1 -0
- package/example_selectors.d.cts +1 -0
- package/example_selectors.d.ts +1 -0
- package/example_selectors.js +1 -0
- package/indexing.cjs +1 -0
- package/indexing.d.cts +1 -0
- package/indexing.d.ts +1 -0
- package/indexing.js +1 -0
- package/language_models/base.cjs +1 -0
- package/language_models/base.d.cts +1 -0
- package/language_models/base.d.ts +1 -0
- package/language_models/base.js +1 -0
- package/language_models/chat_models.cjs +1 -0
- package/language_models/chat_models.d.cts +1 -0
- package/language_models/chat_models.d.ts +1 -0
- package/language_models/chat_models.js +1 -0
- package/language_models/llms.cjs +1 -0
- package/language_models/llms.d.cts +1 -0
- package/language_models/llms.d.ts +1 -0
- package/language_models/llms.js +1 -0
- package/language_models/profile.cjs +1 -0
- package/language_models/profile.d.cts +1 -0
- package/language_models/profile.d.ts +1 -0
- package/language_models/profile.js +1 -0
- package/language_models/structured_output.cjs +1 -0
- package/language_models/structured_output.d.cts +1 -0
- package/language_models/structured_output.d.ts +1 -0
- package/language_models/structured_output.js +1 -0
- package/load/serializable.cjs +1 -0
- package/load/serializable.d.cts +1 -0
- package/load/serializable.d.ts +1 -0
- package/load/serializable.js +1 -0
- package/load.cjs +1 -0
- package/load.d.cts +1 -0
- package/load.d.ts +1 -0
- package/load.js +1 -0
- package/memory.cjs +1 -0
- package/memory.d.cts +1 -0
- package/memory.d.ts +1 -0
- package/memory.js +1 -0
- package/messages/tool.cjs +1 -0
- package/messages/tool.d.cts +1 -0
- package/messages/tool.d.ts +1 -0
- package/messages/tool.js +1 -0
- package/messages.cjs +1 -0
- package/messages.d.cts +1 -0
- package/messages.d.ts +1 -0
- package/messages.js +1 -0
- package/output_parsers/openai_functions.cjs +1 -0
- package/output_parsers/openai_functions.d.cts +1 -0
- package/output_parsers/openai_functions.d.ts +1 -0
- package/output_parsers/openai_functions.js +1 -0
- package/output_parsers/openai_tools.cjs +1 -0
- package/output_parsers/openai_tools.d.cts +1 -0
- package/output_parsers/openai_tools.d.ts +1 -0
- package/output_parsers/openai_tools.js +1 -0
- package/output_parsers.cjs +1 -0
- package/output_parsers.d.cts +1 -0
- package/output_parsers.d.ts +1 -0
- package/output_parsers.js +1 -0
- package/outputs.cjs +1 -0
- package/outputs.d.cts +1 -0
- package/outputs.d.ts +1 -0
- package/outputs.js +1 -0
- package/package.json +3 -3
- package/prompt_values.cjs +1 -0
- package/prompt_values.d.cts +1 -0
- package/prompt_values.d.ts +1 -0
- package/prompt_values.js +1 -0
- package/prompts.cjs +1 -0
- package/prompts.d.cts +1 -0
- package/prompts.d.ts +1 -0
- package/prompts.js +1 -0
- package/retrievers/document_compressors.cjs +1 -0
- package/retrievers/document_compressors.d.cts +1 -0
- package/retrievers/document_compressors.d.ts +1 -0
- package/retrievers/document_compressors.js +1 -0
- package/retrievers.cjs +1 -0
- package/retrievers.d.cts +1 -0
- package/retrievers.d.ts +1 -0
- package/retrievers.js +1 -0
- package/runnables/graph.cjs +1 -0
- package/runnables/graph.d.cts +1 -0
- package/runnables/graph.d.ts +1 -0
- package/runnables/graph.js +1 -0
- package/runnables.cjs +1 -0
- package/runnables.d.cts +1 -0
- package/runnables.d.ts +1 -0
- package/runnables.js +1 -0
- package/singletons.cjs +1 -0
- package/singletons.d.cts +1 -0
- package/singletons.d.ts +1 -0
- package/singletons.js +1 -0
- package/stores.cjs +1 -0
- package/stores.d.cts +1 -0
- package/stores.d.ts +1 -0
- package/stores.js +1 -0
- package/structured_query.cjs +1 -0
- package/structured_query.d.cts +1 -0
- package/structured_query.d.ts +1 -0
- package/structured_query.js +1 -0
- package/tools.cjs +1 -0
- package/tools.d.cts +1 -0
- package/tools.d.ts +1 -0
- package/tools.js +1 -0
- package/tracers/base.cjs +1 -0
- package/tracers/base.d.cts +1 -0
- package/tracers/base.d.ts +1 -0
- package/tracers/base.js +1 -0
- package/tracers/console.cjs +1 -0
- package/tracers/console.d.cts +1 -0
- package/tracers/console.d.ts +1 -0
- package/tracers/console.js +1 -0
- package/tracers/log_stream.cjs +1 -0
- package/tracers/log_stream.d.cts +1 -0
- package/tracers/log_stream.d.ts +1 -0
- package/tracers/log_stream.js +1 -0
- package/tracers/run_collector.cjs +1 -0
- package/tracers/run_collector.d.cts +1 -0
- package/tracers/run_collector.d.ts +1 -0
- package/tracers/run_collector.js +1 -0
- package/tracers/tracer_langchain.cjs +1 -0
- package/tracers/tracer_langchain.d.cts +1 -0
- package/tracers/tracer_langchain.d.ts +1 -0
- package/tracers/tracer_langchain.js +1 -0
- package/types/stream.cjs +1 -0
- package/types/stream.d.cts +1 -0
- package/types/stream.d.ts +1 -0
- package/types/stream.js +1 -0
- package/utils/async_caller.cjs +1 -0
- package/utils/async_caller.d.cts +1 -0
- package/utils/async_caller.d.ts +1 -0
- package/utils/async_caller.js +1 -0
- package/utils/chunk_array.cjs +1 -0
- package/utils/chunk_array.d.cts +1 -0
- package/utils/chunk_array.d.ts +1 -0
- package/utils/chunk_array.js +1 -0
- package/utils/context.cjs +1 -0
- package/utils/context.d.cts +1 -0
- package/utils/context.d.ts +1 -0
- package/utils/context.js +1 -0
- package/utils/env.cjs +1 -0
- package/utils/env.d.cts +1 -0
- package/utils/env.d.ts +1 -0
- package/utils/env.js +1 -0
- package/utils/event_source_parse.cjs +1 -0
- package/utils/event_source_parse.d.cts +1 -0
- package/utils/event_source_parse.d.ts +1 -0
- package/utils/event_source_parse.js +1 -0
- package/utils/format.cjs +1 -0
- package/utils/format.d.cts +1 -0
- package/utils/format.d.ts +1 -0
- package/utils/format.js +1 -0
- package/utils/function_calling.cjs +1 -0
- package/utils/function_calling.d.cts +1 -0
- package/utils/function_calling.d.ts +1 -0
- package/utils/function_calling.js +1 -0
- package/utils/hash.cjs +1 -0
- package/utils/hash.d.cts +1 -0
- package/utils/hash.d.ts +1 -0
- package/utils/hash.js +1 -0
- package/utils/json_patch.cjs +1 -0
- package/utils/json_patch.d.cts +1 -0
- package/utils/json_patch.d.ts +1 -0
- package/utils/json_patch.js +1 -0
- package/utils/json_schema.cjs +1 -0
- package/utils/json_schema.d.cts +1 -0
- package/utils/json_schema.d.ts +1 -0
- package/utils/json_schema.js +1 -0
- package/utils/math.cjs +1 -0
- package/utils/math.d.cts +1 -0
- package/utils/math.d.ts +1 -0
- package/utils/math.js +1 -0
- package/utils/ssrf.cjs +1 -0
- package/utils/ssrf.d.cts +1 -0
- package/utils/ssrf.d.ts +1 -0
- package/utils/ssrf.js +1 -0
- package/utils/standard_schema.cjs +1 -0
- package/utils/standard_schema.d.cts +1 -0
- package/utils/standard_schema.d.ts +1 -0
- package/utils/standard_schema.js +1 -0
- package/utils/stream.cjs +1 -0
- package/utils/stream.d.cts +1 -0
- package/utils/stream.d.ts +1 -0
- package/utils/stream.js +1 -0
- package/utils/testing.cjs +1 -0
- package/utils/testing.d.cts +1 -0
- package/utils/testing.d.ts +1 -0
- package/utils/testing.js +1 -0
- package/utils/tiktoken.cjs +1 -0
- package/utils/tiktoken.d.cts +1 -0
- package/utils/tiktoken.d.ts +1 -0
- package/utils/tiktoken.js +1 -0
- package/utils/types.cjs +1 -0
- package/utils/types.d.cts +1 -0
- package/utils/types.d.ts +1 -0
- package/utils/types.js +1 -0
- package/vectorstores.cjs +1 -0
- package/vectorstores.d.cts +1 -0
- package/vectorstores.d.ts +1 -0
- package/vectorstores.js +1 -0
|
@@ -5,6 +5,7 @@ const require_callbacks_manager = require("../callbacks/manager.cjs");
|
|
|
5
5
|
const require_utils_stream = require("../utils/stream.cjs");
|
|
6
6
|
const require_outputs = require("../outputs.cjs");
|
|
7
7
|
const require_language_models_base = require("./base.cjs");
|
|
8
|
+
const require_utils = require("./utils.cjs");
|
|
8
9
|
//#region src/language_models/llms.ts
|
|
9
10
|
var llms_exports = /* @__PURE__ */ require_runtime.__exportAll({
|
|
10
11
|
BaseLLM: () => BaseLLM,
|
|
@@ -44,10 +45,15 @@ var BaseLLM = class BaseLLM extends require_language_models_base.BaseLanguageMod
|
|
|
44
45
|
else {
|
|
45
46
|
const prompt = BaseLLM._convertInputToPromptValue(input);
|
|
46
47
|
const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(options);
|
|
47
|
-
const
|
|
48
|
+
const invocationParams = this?.invocationParams(callOptions);
|
|
49
|
+
const metadataInvocationParams = require_utils.parseMetadataInvocationParams(invocationParams);
|
|
50
|
+
const callbackManager_ = require_callbacks_manager.CallbackManager.configure(runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, {
|
|
51
|
+
...metadataInvocationParams,
|
|
52
|
+
...this.metadata
|
|
53
|
+
}, { verbose: this.verbose });
|
|
48
54
|
const extra = {
|
|
49
55
|
options: callOptions,
|
|
50
|
-
invocation_params:
|
|
56
|
+
invocation_params: invocationParams,
|
|
51
57
|
batch_size: 1
|
|
52
58
|
};
|
|
53
59
|
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], runnableConfig.runId, void 0, extra, void 0, void 0, runnableConfig.runName);
|
|
@@ -109,10 +115,15 @@ var BaseLLM = class BaseLLM extends require_language_models_base.BaseLanguageMod
|
|
|
109
115
|
let runManagers;
|
|
110
116
|
if (startedRunManagers !== void 0 && startedRunManagers.length === prompts.length) runManagers = startedRunManagers;
|
|
111
117
|
else {
|
|
112
|
-
const
|
|
118
|
+
const invocationParams = this?.invocationParams(parsedOptions);
|
|
119
|
+
const metadataInvocationParams = require_utils.parseMetadataInvocationParams(invocationParams);
|
|
120
|
+
const callbackManager_ = require_callbacks_manager.CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, {
|
|
121
|
+
...metadataInvocationParams,
|
|
122
|
+
...this.metadata
|
|
123
|
+
}, { verbose: this.verbose });
|
|
113
124
|
const extra = {
|
|
114
125
|
options: parsedOptions,
|
|
115
|
-
invocation_params:
|
|
126
|
+
invocation_params: invocationParams,
|
|
116
127
|
batch_size: prompts.length
|
|
117
128
|
};
|
|
118
129
|
runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, handledOptions.runId, void 0, extra, void 0, void 0, handledOptions?.runName);
|
|
@@ -152,10 +163,15 @@ var BaseLLM = class BaseLLM extends require_language_models_base.BaseLanguageMod
|
|
|
152
163
|
return output;
|
|
153
164
|
}
|
|
154
165
|
async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId }) {
|
|
155
|
-
const
|
|
166
|
+
const invocationParams = this?.invocationParams(parsedOptions);
|
|
167
|
+
const metadataInvocationParams = require_utils.parseMetadataInvocationParams(invocationParams);
|
|
168
|
+
const callbackManager_ = require_callbacks_manager.CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, {
|
|
169
|
+
...metadataInvocationParams,
|
|
170
|
+
...this.metadata
|
|
171
|
+
}, { verbose: this.verbose });
|
|
156
172
|
const extra = {
|
|
157
173
|
options: parsedOptions,
|
|
158
|
-
invocation_params:
|
|
174
|
+
invocation_params: invocationParams,
|
|
159
175
|
batch_size: prompts.length
|
|
160
176
|
};
|
|
161
177
|
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, runId, void 0, extra, void 0, void 0, handledOptions?.runName);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llms.cjs","names":["BaseLanguageModel","CallbackManager","GenerationChunk","callbackHandlerPrefersStreaming","concat","RUN_KEY"],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/index.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: Partial<CallOptions> | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as Partial<CallOptions>;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AAqCA,IAAsB,UAAtB,MAAsB,gBAEZA,6BAAAA,kBAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;EAAC;;;;;;;;;CAUrD,MAAM,OACJ,OACA,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;AAM7D,UALe,MAAM,KAAK,eACxB,CAAC,YAAY,EACb,SACA,SAAS,UACV,EACa,YAAY,GAAG,GAAG;;CAIlC,OAAO,sBACL,QACA,UACA,aACiC;AACjC,QAAM,IAAI,MAAM,mBAAmB;;CAGrC,6CACE,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,eACrB,MAAM,uCAAuC,QAAQ;AACtD,cAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,YAAyC;;CAGnE,OAAO,gBACL,OACA,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,OAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAMC,0BAAAA,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;IACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,CAAC,EACnB,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAIC,gBAAAA,gBAAgB,EACnC,MAAM,IACP,CAAC;AACF,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,WACH,cAAa;SAEb,cAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,SACxB,OAAM,MAAM;;YAGT,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;AAER,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,WAAW,CAAC,EAC5B,CAAC,CACH,CACF;;;;;;;;;;;CAYL,MAAM,eACJ,cACA,SACA,WACoB;EACpB,MAAM,UAAoB,aAAa,KAAK,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;;;;;CAgBnD,iBAAiB,UAA2C;AAC1D,SAAO,EAAE;;CAGX,kBAAkB,WAAmC;EACnD,MAAM,aAA0B,EAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,EACR,YAAW,KAAK;IACd,aAAa,CAAC,QAAQ;IACtB,WAAW,UAAU;IACtB,CAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,EAAE;KAAE,GAC1C,KAAA;AAEJ,eAAW,KAAK;KACd,aAAa,CAAC,QAAQ;KACtB;KACD,CAAC;;;AAIN,SAAO;;;CAIT,MAAM,kBACJ,SACA,eACA,gBACA,oBACoB;EACpB,IAAI;AACJ,MACE,uBAAuB,KAAA,KACvB,mBAAmB,WAAW,QAAQ,OAEtC,eAAc;OACT;GACL,MAAM,mBAAmB,MAAMD,0BAAAA,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY,QAAQ;IACrB;AACD,iBAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;;EAKH,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtDE,uBAAAA,gCACD;EACD,IAAI;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,KAAA,EACjB,cAAa;OAEb,cAAaC,qBAAAA,OAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,KAAA,EACjB,OAAM,IAAI,MAAM,gDAAgD;AAElE,YAAS;IAAE,aAAa,CAAC,CAAC,WAAW,CAAC;IAAE,WAAW,EAAE;IAAE;AACvD,SAAM,cAAc,GAAG,aAAa,OAAO;WACpC,GAAG;AACV,SAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;;OAEH;AACL,OAAI;AACF,aAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;YAChE,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;GAGR,MAAM,mBAAgC,KAAK,kBAAkB,OAAO;AACpE,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;;EAEH,MAAM,SAAS,aAAa,KAAK,YAAY,QAAQ,MAAM,IAAI,KAAA;AAI/D,SAAO,eAAe,QAAQC,gBAAAA,SAAS;GACrC,OAAO,SAAS,EAAE,QAAQ,GAAG,KAAA;GAC7B,cAAc;GACf,CAAC;AACF,SAAO;;CAGT,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,SAcA;EACA,MAAM,mBAAmB,MAAMJ,0BAAAA,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,SAAS,CAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY,QAAQ;GACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;EAGD,MAAM,uBAAiC,EAAE;EAazC,MAAM,iBAZU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,KACZ,sBAAqB,KAAK,MAAM;AAElC,UAAO;IACP,CACH,EAKE,KAAK,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;GAAQ,EAAE,CACtE,QACE,EAAE,aACA,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAM,cAA8B,EAAE;AACtC,QAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,cAAc,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;AAC7B,gBAAY,KAAK,OAAO,KAAK,WAAW;AACtC,YAAO,iBAAiB;MACtB,GAAG,OAAO;MACV,YAAY,EAAE;MACf;AACD,YAAO;MACP;AACF,QAAI,OAAO,OACT,OAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,OAAO,EACtB,EACD,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;UACI;AAEL,UAAM,YAAY,eAChB,cAAc,QACd,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;;IAE7C,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;GACrB;AAKD,SAAO,eAAe,QAAQI,gBAAAA,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,KAAK,YAAY,QAAQ,MAAM,EAAE,GACxD,KAAA;GACJ,cAAc;GACf,CAAC;AAEF,SAAO;;;;;CAMT,MAAM,SACJ,SACA,SACA,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kDAAkD;EAGpE,IAAI;AACJ,MAAI,MAAM,QAAQ,QAAQ,CACxB,iBAAgB,EAAE,MAAM,SAAS;MAEjC,iBAAgB;EAGlB,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,cAAc;AAClE,iBAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,UAAU;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,uBACzC,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;GACvB,CAAC;EAEJ,IAAI,YAAY,EAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,KAAK,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,KAAA,IACnB,qBAAqB,KAAK,MAAM,qBAAqB,GAAG,GACxD,KAAA,EACL;AACD,SAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;AACzC,gBAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;KACnE,CACH;AACD,eAAY,QAAQ,aAAa,EAAE;;AAGrC,SAAO;GAAE;GAAa;GAAW;;;;;CAOnC,qBAA0C;AACxC,SAAO,EAAE;;CAQX,aAAqB;AACnB,SAAO;;;;;;;;;;AAWX,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJ,SACA,SACA,YACoB;AAQpB,SAAO,EAAE,aAP2B,MAAM,QAAQ,IAChD,QAAQ,KAAK,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;GAAa,EAAE,WAAW,CAAC,MACzD,SAAS,CAAC,EAAE,MAAM,CAAC,CACrB,CACF,CACF,EACqB"}
|
|
1
|
+
{"version":3,"file":"llms.cjs","names":["BaseLanguageModel","parseMetadataInvocationParams","CallbackManager","GenerationChunk","callbackHandlerPrefersStreaming","concat","RUN_KEY"],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/index.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\nimport { parseMetadataInvocationParams } from \"./utils.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const invocationParams = this?.invocationParams(callOptions);\n const metadataInvocationParams =\n parseMetadataInvocationParams(invocationParams);\n const callbackManager_ = CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n { ...metadataInvocationParams, ...this.metadata },\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: invocationParams,\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const invocationParams = this?.invocationParams(parsedOptions);\n const metadataInvocationParams =\n parseMetadataInvocationParams(invocationParams);\n const callbackManager_ = CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n { ...metadataInvocationParams, ...this.metadata },\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: invocationParams,\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const invocationParams = this?.invocationParams(parsedOptions);\n const metadataInvocationParams =\n parseMetadataInvocationParams(invocationParams);\n const callbackManager_ = CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n { ...metadataInvocationParams, ...this.metadata },\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: invocationParams,\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: Partial<CallOptions> | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as Partial<CallOptions>;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAsCA,IAAsB,UAAtB,MAAsB,gBAEZA,6BAAAA,kBAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;EAAC;;;;;;;;;CAUrD,MAAM,OACJ,OACA,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;AAM7D,UALe,MAAM,KAAK,eACxB,CAAC,YAAY,EACb,SACA,SAAS,UACV,EACa,YAAY,GAAG,GAAG;;CAIlC,OAAO,sBACL,QACA,UACA,aACiC;AACjC,QAAM,IAAI,MAAM,mBAAmB;;CAGrC,6CACE,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,eACrB,MAAM,uCAAuC,QAAQ;AACtD,cAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,YAAyC;;CAGnE,OAAO,gBACL,OACA,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,OAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAM,iBAAiB,YAAY;GAC5D,MAAM,2BACJC,cAAAA,8BAA8B,iBAAiB;GACjD,MAAM,mBAAmBC,0BAAAA,gBAAgB,UACvC,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf;IAAE,GAAG;IAA0B,GAAG,KAAK;IAAU,EACjD,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB;IACnB,YAAY;IACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,CAAC,EACnB,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAIC,gBAAAA,gBAAgB,EACnC,MAAM,IACP,CAAC;AACF,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,WACH,cAAa;SAEb,cAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,SACxB,OAAM,MAAM;;YAGT,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;AAER,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,WAAW,CAAC,EAC5B,CAAC,CACH,CACF;;;;;;;;;;;CAYL,MAAM,eACJ,cACA,SACA,WACoB;EACpB,MAAM,UAAoB,aAAa,KAAK,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;;;;;CAgBnD,iBAAiB,UAA2C;AAC1D,SAAO,EAAE;;CAGX,kBAAkB,WAAmC;EACnD,MAAM,aAA0B,EAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,EACR,YAAW,KAAK;IACd,aAAa,CAAC,QAAQ;IACtB,WAAW,UAAU;IACtB,CAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,EAAE;KAAE,GAC1C,KAAA;AAEJ,eAAW,KAAK;KACd,aAAa,CAAC,QAAQ;KACtB;KACD,CAAC;;;AAIN,SAAO;;;CAIT,MAAM,kBACJ,SACA,eACA,gBACA,oBACoB;EACpB,IAAI;AACJ,MACE,uBAAuB,KAAA,KACvB,mBAAmB,WAAW,QAAQ,OAEtC,eAAc;OACT;GACL,MAAM,mBAAmB,MAAM,iBAAiB,cAAc;GAC9D,MAAM,2BACJF,cAAAA,8BAA8B,iBAAiB;GACjD,MAAM,mBAAmBC,0BAAAA,gBAAgB,UACvC,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf;IAAE,GAAG;IAA0B,GAAG,KAAK;IAAU,EACjD,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB;IACnB,YAAY,QAAQ;IACrB;AACD,iBAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;;EAKH,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtDE,uBAAAA,gCACD;EACD,IAAI;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,KAAA,EACjB,cAAa;OAEb,cAAaC,qBAAAA,OAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,KAAA,EACjB,OAAM,IAAI,MAAM,gDAAgD;AAElE,YAAS;IAAE,aAAa,CAAC,CAAC,WAAW,CAAC;IAAE,WAAW,EAAE;IAAE;AACvD,SAAM,cAAc,GAAG,aAAa,OAAO;WACpC,GAAG;AACV,SAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;;OAEH;AACL,OAAI;AACF,aAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;YAChE,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;GAGR,MAAM,mBAAgC,KAAK,kBAAkB,OAAO;AACpE,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;;EAEH,MAAM,SAAS,aAAa,KAAK,YAAY,QAAQ,MAAM,IAAI,KAAA;AAI/D,SAAO,eAAe,QAAQC,gBAAAA,SAAS;GACrC,OAAO,SAAS,EAAE,QAAQ,GAAG,KAAA;GAC7B,cAAc;GACf,CAAC;AACF,SAAO;;CAGT,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,SAcA;EACA,MAAM,mBAAmB,MAAM,iBAAiB,cAAc;EAC9D,MAAM,2BACJL,cAAAA,8BAA8B,iBAAiB;EACjD,MAAM,mBAAmBC,0BAAAA,gBAAgB,UACvC,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf;GAAE,GAAG;GAA0B,GAAG,KAAK;GAAU,EACjD,EAAE,SAAS,KAAK,SAAS,CAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB;GACnB,YAAY,QAAQ;GACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;EAGD,MAAM,uBAAiC,EAAE;EAazC,MAAM,iBAZU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,KACZ,sBAAqB,KAAK,MAAM;AAElC,UAAO;IACP,CACH,EAKE,KAAK,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;GAAQ,EAAE,CACtE,QACE,EAAE,aACA,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAM,cAA8B,EAAE;AACtC,QAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,cAAc,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;AAC7B,gBAAY,KAAK,OAAO,KAAK,WAAW;AACtC,YAAO,iBAAiB;MACtB,GAAG,OAAO;MACV,YAAY,EAAE;MACf;AACD,YAAO;MACP;AACF,QAAI,OAAO,OACT,OAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,OAAO,EACtB,EACD,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;UACI;AAEL,UAAM,YAAY,eAChB,cAAc,QACd,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;;IAE7C,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;GACrB;AAKD,SAAO,eAAe,QAAQI,gBAAAA,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,KAAK,YAAY,QAAQ,MAAM,EAAE,GACxD,KAAA;GACJ,cAAc;GACf,CAAC;AAEF,SAAO;;;;;CAMT,MAAM,SACJ,SACA,SACA,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kDAAkD;EAGpE,IAAI;AACJ,MAAI,MAAM,QAAQ,QAAQ,CACxB,iBAAgB,EAAE,MAAM,SAAS;MAEjC,iBAAgB;EAGlB,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,cAAc;AAClE,iBAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,UAAU;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,uBACzC,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;GACvB,CAAC;EAEJ,IAAI,YAAY,EAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,KAAK,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,KAAA,IACnB,qBAAqB,KAAK,MAAM,qBAAqB,GAAG,GACxD,KAAA,EACL;AACD,SAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;AACzC,gBAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;KACnE,CACH;AACD,eAAY,QAAQ,aAAa,EAAE;;AAGrC,SAAO;GAAE;GAAa;GAAW;;;;;CAOnC,qBAA0C;AACxC,SAAO,EAAE;;CAQX,aAAqB;AACnB,SAAO;;;;;;;;;;AAWX,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJ,SACA,SACA,YACoB;AAQpB,SAAO,EAAE,aAP2B,MAAM,QAAQ,IAChD,QAAQ,KAAK,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;GAAa,EAAE,WAAW,CAAC,MACzD,SAAS,CAAC,EAAE,MAAM,CAAC,CACrB,CACF,CACF,EACqB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llms.d.cts","names":[],"sources":["../../src/language_models/llms.ts"],"mappings":";;;;;;;;
|
|
1
|
+
{"version":3,"file":"llms.d.cts","names":[],"sources":["../../src/language_models/llms.ts"],"mappings":";;;;;;;;KAyBY,aAAA;EACV,MAAA;EACA,KAAA;AAAA,IAEE,MAAA;AAAA,UAEa,aAAA,SAAsB,uBAAA;AAAA,UAEtB,kBAAA,SAA2B,4BAAA;;;;uBAKtB,OAAA,qBACA,kBAAA,GAAqB,kBAAA,UACjC,iBAAA,SAA0B,WAAA;EAE1B,iBAAA,EAAmB,IAAA,CACzB,WAAA,EACA,OAAA,OAAc,cAAA;EAIhB,YAAA;EAjBqC;;AAEvC;;;;;AAKA;EAoBQ,MAAA,CACJ,KAAA,EAAO,sBAAA,EACP,OAAA,GAAU,OAAA,CAAQ,WAAA,IACjB,OAAA;EAWI,qBAAA,CACL,MAAA,UACA,QAAA,6BACA,WAAA,GAAc,wBAAA,GACb,cAAA,CAAe,eAAA;EAAA,UAIR,4CAAA,CACR,OAAA,GAAU,OAAA,CAAQ,WAAA,KAChB,cAAA;EAQG,eAAA,CACL,KAAA,EAAO,sBAAA,EACP,OAAA,GAAU,OAAA,CAAQ,WAAA,IACjB,cAAA;EArD+B;;;;;;;;EAsI5B,cAAA,CACJ,YAAA,EAAc,wBAAA,IACd,OAAA,cAAqB,OAAA,CAAQ,WAAA,GAC7B,SAAA,GAAY,SAAA,GACX,OAAA,CAAQ,SAAA;EAvGK;;;EAAA,SAiHP,SAAA,CACP,OAAA,YACA,OAAA,6BACA,UAAA,GAAa,wBAAA,GACZ,OAAA,CAAQ,SAAA;EA/GC;;;EAqHZ,gBAAA,CAAiB,QAAA;EAIjB,iBAAA,CAAkB,SAAA,EAAW,SAAA,GAAY,SAAA;EA7GtC;EAwIG,iBAAA,CACJ,OAAA,YACA,aAAA,6BACA,cAAA,EAAgB,kBAAA,EAChB,kBAAA,GAAqB,wBAAA,KACpB,OAAA,CAAQ,SAAA;EAqGL,eAAA,CAAA;IACJ,OAAA;IACA,KAAA;IACA,YAAA;IACA,aAAA;IACA,cAAA;IACA;EAAA;IAEA,OAAA;IACA,KAAA,EAAO,SAAA,CAAU,UAAA;IACjB,YAAA;IAEA,aAAA;IACA,cAAA,EAAgB,cAAA;IAChB,KAAA;EAAA,IACE,OAAA,CACF,SAAA;IACE,oBAAA;IACA,kBAAA,GAAqB,wBAAA;EAAA;EAxHF;;;EA8OjB,QAAA,CACJ,OAAA,YACA,OAAA,cAAqB,OAAA,CAAQ,WAAA,GAC7B,SAAA,GAAY,SAAA,GACX,OAAA,CAAQ,SAAA;EA1IT;;;EAuMF,kBAAA,CAAA,GAAsB,MAAA;EAnMpB;;;EAAA,SA0MO,QAAA,CAAA;EAET,UAAA,CAAA;AAAA;;;;;;;;uBAYoB,GAAA,qBACA,kBAAA,GAAqB,kBAAA,UACjC,OAAA,CAAQ,WAAA;EAvgBS;;;EAAA,SA2gBhB,KAAA,CACP,MAAA,UACA,OAAA,6BACA,UAAA,GAAa,wBAAA,GACZ,OAAA;EAEG,SAAA,CACJ,OAAA,YACA,OAAA,6BACA,UAAA,GAAa,wBAAA,GACZ,OAAA,CAAQ,SAAA;AAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llms.d.ts","names":[],"sources":["../../src/language_models/llms.ts"],"mappings":";;;;;;;;
|
|
1
|
+
{"version":3,"file":"llms.d.ts","names":[],"sources":["../../src/language_models/llms.ts"],"mappings":";;;;;;;;KAyBY,aAAA;EACV,MAAA;EACA,KAAA;AAAA,IAEE,MAAA;AAAA,UAEa,aAAA,SAAsB,uBAAA;AAAA,UAEtB,kBAAA,SAA2B,4BAAA;;;;uBAKtB,OAAA,qBACA,kBAAA,GAAqB,kBAAA,UACjC,iBAAA,SAA0B,WAAA;EAE1B,iBAAA,EAAmB,IAAA,CACzB,WAAA,EACA,OAAA,OAAc,cAAA;EAIhB,YAAA;;;;AAfF;;;;;EAyBQ,MAAA,CACJ,KAAA,EAAO,sBAAA,EACP,OAAA,GAAU,OAAA,CAAQ,WAAA,IACjB,OAAA;EAWI,qBAAA,CACL,MAAA,UACA,QAAA,6BACA,WAAA,GAAc,wBAAA,GACb,cAAA,CAAe,eAAA;EAAA,UAIR,4CAAA,CACR,OAAA,GAAU,OAAA,CAAQ,WAAA,KAChB,cAAA;EAQG,eAAA,CACL,KAAA,EAAO,sBAAA,EACP,OAAA,GAAU,OAAA,CAAQ,WAAA,IACjB,cAAA;EAtDsC;;;;;;;;EAuInC,cAAA,CACJ,YAAA,EAAc,wBAAA,IACd,OAAA,cAAqB,OAAA,CAAQ,WAAA,GAC7B,SAAA,GAAY,SAAA,GACX,OAAA,CAAQ,SAAA;EArHR;;;EAAA,SA+HM,SAAA,CACP,OAAA,YACA,OAAA,6BACA,UAAA,GAAa,wBAAA,GACZ,OAAA,CAAQ,SAAA;EA/GS;;;EAqHpB,gBAAA,CAAiB,QAAA;EAIjB,iBAAA,CAAkB,SAAA,EAAW,SAAA,GAAY,SAAA;EA9G7B;EAyIN,iBAAA,CACJ,OAAA,YACA,aAAA,6BACA,cAAA,EAAgB,kBAAA,EAChB,kBAAA,GAAqB,wBAAA,KACpB,OAAA,CAAQ,SAAA;EAqGL,eAAA,CAAA;IACJ,OAAA;IACA,KAAA;IACA,YAAA;IACA,aAAA;IACA,cAAA;IACA;EAAA;IAEA,OAAA;IACA,KAAA,EAAO,SAAA,CAAU,UAAA;IACjB,YAAA;IAEA,aAAA;IACA,cAAA,EAAgB,cAAA;IAChB,KAAA;EAAA,IACE,OAAA,CACF,SAAA;IACE,oBAAA;IACA,kBAAA,GAAqB,wBAAA;EAAA;EAzHP;;;EA+OZ,QAAA,CACJ,OAAA,YACA,OAAA,cAAqB,OAAA,CAAQ,WAAA,GAC7B,SAAA,GAAY,SAAA,GACX,OAAA,CAAQ,SAAA;EA3IT;;;EAwMF,kBAAA,CAAA,GAAsB,MAAA;EApMpB;;;EAAA,SA2MO,QAAA,CAAA;EAET,UAAA,CAAA;AAAA;;;;;;;;uBAYoB,GAAA,qBACA,kBAAA,GAAqB,kBAAA,UACjC,OAAA,CAAQ,WAAA;EAvgBR;;;EAAA,SA2gBC,KAAA,CACP,MAAA,UACA,OAAA,6BACA,UAAA,GAAa,wBAAA,GACZ,OAAA;EAEG,SAAA,CACJ,OAAA,YACA,OAAA,6BACA,UAAA,GAAa,wBAAA,GACZ,OAAA,CAAQ,SAAA;AAAA"}
|
|
@@ -4,6 +4,7 @@ import { CallbackManager } from "../callbacks/manager.js";
|
|
|
4
4
|
import { concat } from "../utils/stream.js";
|
|
5
5
|
import { GenerationChunk, RUN_KEY } from "../outputs.js";
|
|
6
6
|
import { BaseLanguageModel } from "./base.js";
|
|
7
|
+
import { parseMetadataInvocationParams } from "./utils.js";
|
|
7
8
|
//#region src/language_models/llms.ts
|
|
8
9
|
var llms_exports = /* @__PURE__ */ __exportAll({
|
|
9
10
|
BaseLLM: () => BaseLLM,
|
|
@@ -43,10 +44,15 @@ var BaseLLM = class BaseLLM extends BaseLanguageModel {
|
|
|
43
44
|
else {
|
|
44
45
|
const prompt = BaseLLM._convertInputToPromptValue(input);
|
|
45
46
|
const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptionsCompat(options);
|
|
46
|
-
const
|
|
47
|
+
const invocationParams = this?.invocationParams(callOptions);
|
|
48
|
+
const metadataInvocationParams = parseMetadataInvocationParams(invocationParams);
|
|
49
|
+
const callbackManager_ = CallbackManager.configure(runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, {
|
|
50
|
+
...metadataInvocationParams,
|
|
51
|
+
...this.metadata
|
|
52
|
+
}, { verbose: this.verbose });
|
|
47
53
|
const extra = {
|
|
48
54
|
options: callOptions,
|
|
49
|
-
invocation_params:
|
|
55
|
+
invocation_params: invocationParams,
|
|
50
56
|
batch_size: 1
|
|
51
57
|
};
|
|
52
58
|
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], runnableConfig.runId, void 0, extra, void 0, void 0, runnableConfig.runName);
|
|
@@ -108,10 +114,15 @@ var BaseLLM = class BaseLLM extends BaseLanguageModel {
|
|
|
108
114
|
let runManagers;
|
|
109
115
|
if (startedRunManagers !== void 0 && startedRunManagers.length === prompts.length) runManagers = startedRunManagers;
|
|
110
116
|
else {
|
|
111
|
-
const
|
|
117
|
+
const invocationParams = this?.invocationParams(parsedOptions);
|
|
118
|
+
const metadataInvocationParams = parseMetadataInvocationParams(invocationParams);
|
|
119
|
+
const callbackManager_ = CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, {
|
|
120
|
+
...metadataInvocationParams,
|
|
121
|
+
...this.metadata
|
|
122
|
+
}, { verbose: this.verbose });
|
|
112
123
|
const extra = {
|
|
113
124
|
options: parsedOptions,
|
|
114
|
-
invocation_params:
|
|
125
|
+
invocation_params: invocationParams,
|
|
115
126
|
batch_size: prompts.length
|
|
116
127
|
};
|
|
117
128
|
runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, handledOptions.runId, void 0, extra, void 0, void 0, handledOptions?.runName);
|
|
@@ -151,10 +162,15 @@ var BaseLLM = class BaseLLM extends BaseLanguageModel {
|
|
|
151
162
|
return output;
|
|
152
163
|
}
|
|
153
164
|
async _generateCached({ prompts, cache, llmStringKey, parsedOptions, handledOptions, runId }) {
|
|
154
|
-
const
|
|
165
|
+
const invocationParams = this?.invocationParams(parsedOptions);
|
|
166
|
+
const metadataInvocationParams = parseMetadataInvocationParams(invocationParams);
|
|
167
|
+
const callbackManager_ = CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, {
|
|
168
|
+
...metadataInvocationParams,
|
|
169
|
+
...this.metadata
|
|
170
|
+
}, { verbose: this.verbose });
|
|
155
171
|
const extra = {
|
|
156
172
|
options: parsedOptions,
|
|
157
|
-
invocation_params:
|
|
173
|
+
invocation_params: invocationParams,
|
|
158
174
|
batch_size: prompts.length
|
|
159
175
|
};
|
|
160
176
|
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, runId, void 0, extra, void 0, void 0, handledOptions?.runName);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llms.js","names":[],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/index.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const callbackManager_ = await CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: this?.invocationParams(callOptions),\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const callbackManager_ = await CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n this.metadata,\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: this?.invocationParams(parsedOptions),\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: Partial<CallOptions> | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as Partial<CallOptions>;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAqCA,IAAsB,UAAtB,MAAsB,gBAEZ,kBAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;EAAC;;;;;;;;;CAUrD,MAAM,OACJ,OACA,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;AAM7D,UALe,MAAM,KAAK,eACxB,CAAC,YAAY,EACb,SACA,SAAS,UACV,EACa,YAAY,GAAG,GAAG;;CAIlC,OAAO,sBACL,QACA,UACA,aACiC;AACjC,QAAM,IAAI,MAAM,mBAAmB;;CAGrC,6CACE,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,eACrB,MAAM,uCAAuC,QAAQ;AACtD,cAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,YAAyC;;CAGnE,OAAO,gBACL,OACA,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,OAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,YAAY;IACtD,YAAY;IACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,CAAC,EACnB,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAI,gBAAgB,EACnC,MAAM,IACP,CAAC;AACF,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,WACH,cAAa;SAEb,cAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,SACxB,OAAM,MAAM;;YAGT,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;AAER,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,WAAW,CAAC,EAC5B,CAAC,CACH,CACF;;;;;;;;;;;CAYL,MAAM,eACJ,cACA,SACA,WACoB;EACpB,MAAM,UAAoB,aAAa,KAAK,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;;;;;CAgBnD,iBAAiB,UAA2C;AAC1D,SAAO,EAAE;;CAGX,kBAAkB,WAAmC;EACnD,MAAM,aAA0B,EAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,EACR,YAAW,KAAK;IACd,aAAa,CAAC,QAAQ;IACtB,WAAW,UAAU;IACtB,CAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,EAAE;KAAE,GAC1C,KAAA;AAEJ,eAAW,KAAK;KACd,aAAa,CAAC,QAAQ;KACtB;KACD,CAAC;;;AAIN,SAAO;;;CAIT,MAAM,kBACJ,SACA,eACA,gBACA,oBACoB;EACpB,IAAI;AACJ,MACE,uBAAuB,KAAA,KACvB,mBAAmB,WAAW,QAAQ,OAEtC,eAAc;OACT;GACL,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB,MAAM,iBAAiB,cAAc;IACxD,YAAY,QAAQ;IACrB;AACD,iBAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;;EAKH,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtD,gCACD;EACD,IAAI;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,KAAA,EACjB,cAAa;OAEb,cAAa,OAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,KAAA,EACjB,OAAM,IAAI,MAAM,gDAAgD;AAElE,YAAS;IAAE,aAAa,CAAC,CAAC,WAAW,CAAC;IAAE,WAAW,EAAE;IAAE;AACvD,SAAM,cAAc,GAAG,aAAa,OAAO;WACpC,GAAG;AACV,SAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;;OAEH;AACL,OAAI;AACF,aAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;YAChE,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;GAGR,MAAM,mBAAgC,KAAK,kBAAkB,OAAO;AACpE,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;;EAEH,MAAM,SAAS,aAAa,KAAK,YAAY,QAAQ,MAAM,IAAI,KAAA;AAI/D,SAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,SAAS,EAAE,QAAQ,GAAG,KAAA;GAC7B,cAAc;GACf,CAAC;AACF,SAAO;;CAGT,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,SAcA;EACA,MAAM,mBAAmB,MAAM,gBAAgB,UAC7C,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf,KAAK,UACL,EAAE,SAAS,KAAK,SAAS,CAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB,MAAM,iBAAiB,cAAc;GACxD,YAAY,QAAQ;GACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;EAGD,MAAM,uBAAiC,EAAE;EAazC,MAAM,iBAZU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,KACZ,sBAAqB,KAAK,MAAM;AAElC,UAAO;IACP,CACH,EAKE,KAAK,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;GAAQ,EAAE,CACtE,QACE,EAAE,aACA,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAM,cAA8B,EAAE;AACtC,QAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,cAAc,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;AAC7B,gBAAY,KAAK,OAAO,KAAK,WAAW;AACtC,YAAO,iBAAiB;MACtB,GAAG,OAAO;MACV,YAAY,EAAE;MACf;AACD,YAAO;MACP;AACF,QAAI,OAAO,OACT,OAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,OAAO,EACtB,EACD,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;UACI;AAEL,UAAM,YAAY,eAChB,cAAc,QACd,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;;IAE7C,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;GACrB;AAKD,SAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,KAAK,YAAY,QAAQ,MAAM,EAAE,GACxD,KAAA;GACJ,cAAc;GACf,CAAC;AAEF,SAAO;;;;;CAMT,MAAM,SACJ,SACA,SACA,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kDAAkD;EAGpE,IAAI;AACJ,MAAI,MAAM,QAAQ,QAAQ,CACxB,iBAAgB,EAAE,MAAM,SAAS;MAEjC,iBAAgB;EAGlB,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,cAAc;AAClE,iBAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,UAAU;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,uBACzC,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;GACvB,CAAC;EAEJ,IAAI,YAAY,EAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,KAAK,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,KAAA,IACnB,qBAAqB,KAAK,MAAM,qBAAqB,GAAG,GACxD,KAAA,EACL;AACD,SAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;AACzC,gBAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;KACnE,CACH;AACD,eAAY,QAAQ,aAAa,EAAE;;AAGrC,SAAO;GAAE;GAAa;GAAW;;;;;CAOnC,qBAA0C;AACxC,SAAO,EAAE;;CAQX,aAAqB;AACnB,SAAO;;;;;;;;;;AAWX,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJ,SACA,SACA,YACoB;AAQpB,SAAO,EAAE,aAP2B,MAAM,QAAQ,IAChD,QAAQ,KAAK,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;GAAa,EAAE,WAAW,CAAC,MACzD,SAAS,CAAC,EAAE,MAAM,CAAC,CACrB,CACF,CACF,EACqB"}
|
|
1
|
+
{"version":3,"file":"llms.js","names":[],"sources":["../../src/language_models/llms.ts"],"sourcesContent":["import type { BasePromptValueInterface } from \"../prompt_values.js\";\nimport {\n type LLMResult,\n RUN_KEY,\n type Generation,\n GenerationChunk,\n} from \"../outputs.js\";\nimport {\n type BaseCallbackConfig,\n CallbackManager,\n type CallbackManagerForLLMRun,\n type Callbacks,\n} from \"../callbacks/manager.js\";\nimport {\n BaseLanguageModel,\n type BaseLanguageModelCallOptions,\n type BaseLanguageModelInput,\n type BaseLanguageModelParams,\n} from \"./base.js\";\nimport type { RunnableConfig } from \"../runnables/config.js\";\nimport type { BaseCache } from \"../caches/index.js\";\nimport { concat } from \"../utils/stream.js\";\nimport { callbackHandlerPrefersStreaming } from \"../callbacks/base.js\";\nimport { parseMetadataInvocationParams } from \"./utils.js\";\n\nexport type SerializedLLM = {\n _model: string;\n _type: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n} & Record<string, any>;\n\nexport interface BaseLLMParams extends BaseLanguageModelParams {}\n\nexport interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {}\n\n/**\n * LLM Wrapper. Takes in a prompt (or prompts) and returns a string.\n */\nexport abstract class BaseLLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLanguageModel<string, CallOptions> {\n // Backwards compatibility since fields have been moved to RunnableConfig\n declare ParsedCallOptions: Omit<\n CallOptions,\n Exclude<keyof RunnableConfig, \"signal\" | \"timeout\" | \"maxConcurrency\">\n >;\n\n // Only ever instantiated in main LangChain\n lc_namespace = [\"langchain\", \"llms\", this._llmType()];\n\n /**\n * This method takes an input and options, and returns a string. It\n * converts the input to a prompt value and generates a result based on\n * the prompt.\n * @param input Input for the LLM.\n * @param options Options for the LLM call.\n * @returns A string result based on the prompt.\n */\n async invoke(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): Promise<string> {\n const promptValue = BaseLLM._convertInputToPromptValue(input);\n const result = await this.generatePrompt(\n [promptValue],\n options,\n options?.callbacks\n );\n return result.generations[0][0].text;\n }\n\n // eslint-disable-next-line require-yield\n async *_streamResponseChunks(\n _input: string,\n _options: this[\"ParsedCallOptions\"],\n _runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<GenerationChunk> {\n throw new Error(\"Not implemented.\");\n }\n\n protected _separateRunnableConfigFromCallOptionsCompat(\n options?: Partial<CallOptions>\n ): [RunnableConfig, this[\"ParsedCallOptions\"]] {\n // For backwards compat, keep `signal` in both runnableConfig and callOptions\n const [runnableConfig, callOptions] =\n super._separateRunnableConfigFromCallOptions(options);\n (callOptions as this[\"ParsedCallOptions\"]).signal = runnableConfig.signal;\n return [runnableConfig, callOptions as this[\"ParsedCallOptions\"]];\n }\n\n async *_streamIterator(\n input: BaseLanguageModelInput,\n options?: Partial<CallOptions>\n ): AsyncGenerator<string> {\n // Subclass check required to avoid double callbacks with default implementation\n if (\n this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks\n ) {\n yield this.invoke(input, options);\n } else {\n const prompt = BaseLLM._convertInputToPromptValue(input);\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(options);\n const invocationParams = this?.invocationParams(callOptions);\n const metadataInvocationParams =\n parseMetadataInvocationParams(invocationParams);\n const callbackManager_ = CallbackManager.configure(\n runnableConfig.callbacks,\n this.callbacks,\n runnableConfig.tags,\n this.tags,\n runnableConfig.metadata,\n { ...metadataInvocationParams, ...this.metadata },\n { verbose: this.verbose }\n );\n const extra = {\n options: callOptions,\n invocation_params: invocationParams,\n batch_size: 1,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n [prompt.toString()],\n runnableConfig.runId,\n undefined,\n extra,\n undefined,\n undefined,\n runnableConfig.runName\n );\n let generation = new GenerationChunk({\n text: \"\",\n });\n try {\n for await (const chunk of this._streamResponseChunks(\n prompt.toString(),\n callOptions,\n runManagers?.[0]\n )) {\n if (!generation) {\n generation = chunk;\n } else {\n generation = generation.concat(chunk);\n }\n if (typeof chunk.text === \"string\") {\n yield chunk.text;\n }\n }\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMEnd({\n generations: [[generation]],\n })\n )\n );\n }\n }\n\n /**\n * This method takes prompt values, options, and callbacks, and generates\n * a result based on the prompts.\n * @param promptValues Prompt values for the LLM.\n * @param options Options for the LLM call.\n * @param callbacks Callbacks for the LLM call.\n * @returns An LLMResult based on the prompts.\n */\n async generatePrompt(\n promptValues: BasePromptValueInterface[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n const prompts: string[] = promptValues.map((promptValue) =>\n promptValue.toString()\n );\n return this.generate(prompts, options, callbacks);\n }\n\n /**\n * Run the LLM on the given prompts and input.\n */\n abstract _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult>;\n\n /**\n * Get the parameters used to invoke the model\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n invocationParams(_options?: this[\"ParsedCallOptions\"]): any {\n return {};\n }\n\n _flattenLLMResult(llmResult: LLMResult): LLMResult[] {\n const llmResults: LLMResult[] = [];\n\n for (let i = 0; i < llmResult.generations.length; i += 1) {\n const genList = llmResult.generations[i];\n\n if (i === 0) {\n llmResults.push({\n generations: [genList],\n llmOutput: llmResult.llmOutput,\n });\n } else {\n const llmOutput = llmResult.llmOutput\n ? { ...llmResult.llmOutput, tokenUsage: {} }\n : undefined;\n\n llmResults.push({\n generations: [genList],\n llmOutput,\n });\n }\n }\n\n return llmResults;\n }\n\n /** @ignore */\n async _generateUncached(\n prompts: string[],\n parsedOptions: this[\"ParsedCallOptions\"],\n handledOptions: BaseCallbackConfig,\n startedRunManagers?: CallbackManagerForLLMRun[]\n ): Promise<LLMResult> {\n let runManagers: CallbackManagerForLLMRun[] | undefined;\n if (\n startedRunManagers !== undefined &&\n startedRunManagers.length === prompts.length\n ) {\n runManagers = startedRunManagers;\n } else {\n const invocationParams = this?.invocationParams(parsedOptions);\n const metadataInvocationParams =\n parseMetadataInvocationParams(invocationParams);\n const callbackManager_ = CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n { ...metadataInvocationParams, ...this.metadata },\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: invocationParams,\n batch_size: prompts.length,\n };\n runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n handledOptions.runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n }\n // Even if stream is not explicitly called, check if model is implicitly\n // called from streamEvents() or streamLog() to get all streamed events.\n // Bail out if _streamResponseChunks not overridden\n const hasStreamingHandler = !!runManagers?.[0].handlers.find(\n callbackHandlerPrefersStreaming\n );\n let output: LLMResult;\n if (\n hasStreamingHandler &&\n prompts.length === 1 &&\n this._streamResponseChunks !== BaseLLM.prototype._streamResponseChunks\n ) {\n try {\n const stream = await this._streamResponseChunks(\n prompts[0],\n parsedOptions,\n runManagers?.[0]\n );\n let aggregated;\n for await (const chunk of stream) {\n if (aggregated === undefined) {\n aggregated = chunk;\n } else {\n aggregated = concat(aggregated, chunk);\n }\n }\n if (aggregated === undefined) {\n throw new Error(\"Received empty response from chat model call.\");\n }\n output = { generations: [[aggregated]], llmOutput: {} };\n await runManagers?.[0].handleLLMEnd(output);\n } catch (e) {\n await runManagers?.[0].handleLLMError(e);\n throw e;\n }\n } else {\n try {\n output = await this._generate(prompts, parsedOptions, runManagers?.[0]);\n } catch (err) {\n await Promise.all(\n (runManagers ?? []).map((runManager) =>\n runManager?.handleLLMError(err)\n )\n );\n throw err;\n }\n\n const flattenedOutputs: LLMResult[] = this._flattenLLMResult(output);\n await Promise.all(\n (runManagers ?? []).map((runManager, i) =>\n runManager?.handleLLMEnd(flattenedOutputs[i])\n )\n );\n }\n const runIds = runManagers?.map((manager) => manager.runId) || undefined;\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runIds ? { runIds } : undefined,\n configurable: true,\n });\n return output;\n }\n\n async _generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions,\n handledOptions,\n runId,\n }: {\n prompts: string[];\n cache: BaseCache<Generation[]>;\n llmStringKey: string;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsedOptions: any;\n handledOptions: RunnableConfig;\n runId?: string;\n }): Promise<\n LLMResult & {\n missingPromptIndices: number[];\n startedRunManagers?: CallbackManagerForLLMRun[];\n }\n > {\n const invocationParams = this?.invocationParams(parsedOptions);\n const metadataInvocationParams =\n parseMetadataInvocationParams(invocationParams);\n const callbackManager_ = CallbackManager.configure(\n handledOptions.callbacks,\n this.callbacks,\n handledOptions.tags,\n this.tags,\n handledOptions.metadata,\n { ...metadataInvocationParams, ...this.metadata },\n { verbose: this.verbose }\n );\n const extra = {\n options: parsedOptions,\n invocation_params: invocationParams,\n batch_size: prompts.length,\n };\n const runManagers = await callbackManager_?.handleLLMStart(\n this.toJSON(),\n prompts,\n runId,\n undefined,\n extra,\n undefined,\n undefined,\n handledOptions?.runName\n );\n\n // generate results\n const missingPromptIndices: number[] = [];\n const results = await Promise.allSettled(\n prompts.map(async (prompt, index) => {\n const result = await cache.lookup(prompt, llmStringKey);\n if (result == null) {\n missingPromptIndices.push(index);\n }\n return result;\n })\n );\n\n // Map run managers to the results before filtering out null results\n // Null results are just absent from the cache.\n const cachedResults = results\n .map((result, index) => ({ result, runManager: runManagers?.[index] }))\n .filter(\n ({ result }) =>\n (result.status === \"fulfilled\" && result.value != null) ||\n result.status === \"rejected\"\n );\n\n // Handle results and call run managers\n const generations: Generation[][] = [];\n await Promise.all(\n cachedResults.map(async ({ result: promiseResult, runManager }, i) => {\n if (promiseResult.status === \"fulfilled\") {\n const result = promiseResult.value as Generation[];\n generations[i] = result.map((result) => {\n result.generationInfo = {\n ...result.generationInfo,\n tokenUsage: {},\n };\n return result;\n });\n if (result.length) {\n await runManager?.handleLLMNewToken(result[0].text);\n }\n return runManager?.handleLLMEnd(\n {\n generations: [result],\n },\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n } else {\n // status === \"rejected\"\n await runManager?.handleLLMError(\n promiseResult.reason,\n undefined,\n undefined,\n undefined,\n {\n cached: true,\n }\n );\n return Promise.reject(promiseResult.reason);\n }\n })\n );\n\n const output = {\n generations,\n missingPromptIndices,\n startedRunManagers: runManagers,\n };\n\n // This defines RUN_KEY as a non-enumerable property on the output object\n // so that it is not serialized when the output is stringified, and so that\n // it isnt included when listing the keys of the output object.\n Object.defineProperty(output, RUN_KEY, {\n value: runManagers\n ? { runIds: runManagers?.map((manager) => manager.runId) }\n : undefined,\n configurable: true,\n });\n\n return output;\n }\n\n /**\n * Run the LLM on the given prompts and input, handling caching.\n */\n async generate(\n prompts: string[],\n options?: string[] | Partial<CallOptions>,\n callbacks?: Callbacks\n ): Promise<LLMResult> {\n if (!Array.isArray(prompts)) {\n throw new Error(\"Argument 'prompts' is expected to be a string[]\");\n }\n\n let parsedOptions: Partial<CallOptions> | undefined;\n if (Array.isArray(options)) {\n parsedOptions = { stop: options } as Partial<CallOptions>;\n } else {\n parsedOptions = options;\n }\n\n const [runnableConfig, callOptions] =\n this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);\n runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;\n\n if (!this.cache) {\n return this._generateUncached(prompts, callOptions, runnableConfig);\n }\n\n const { cache } = this;\n const llmStringKey = this._getSerializedCacheKeyParametersForCall(\n callOptions as CallOptions\n );\n const { generations, missingPromptIndices, startedRunManagers } =\n await this._generateCached({\n prompts,\n cache,\n llmStringKey,\n parsedOptions: callOptions,\n handledOptions: runnableConfig,\n runId: runnableConfig.runId,\n });\n\n let llmOutput = {};\n if (missingPromptIndices.length > 0) {\n const results = await this._generateUncached(\n missingPromptIndices.map((i) => prompts[i]),\n callOptions,\n runnableConfig,\n startedRunManagers !== undefined\n ? missingPromptIndices.map((i) => startedRunManagers?.[i])\n : undefined\n );\n await Promise.all(\n results.generations.map(async (generation, index) => {\n const promptIndex = missingPromptIndices[index];\n generations[promptIndex] = generation;\n return cache.update(prompts[promptIndex], llmStringKey, generation);\n })\n );\n llmOutput = results.llmOutput ?? {};\n }\n\n return { generations, llmOutput } as LLMResult;\n }\n\n /**\n * Get the identifying parameters of the LLM.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _identifyingParams(): Record<string, any> {\n return {};\n }\n\n /**\n * Return the string type key uniquely identifying this class of LLM.\n */\n abstract _llmType(): string;\n\n _modelType(): string {\n return \"base_llm\" as const;\n }\n}\n\n/**\n * LLM class that provides a simpler interface to subclass than {@link BaseLLM}.\n *\n * Requires only implementing a simpler {@link _call} method instead of {@link _generate}.\n *\n * @augments BaseLLM\n */\nexport abstract class LLM<\n CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions,\n> extends BaseLLM<CallOptions> {\n /**\n * Run the LLM on the given prompt and input.\n */\n abstract _call(\n prompt: string,\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<string>;\n\n async _generate(\n prompts: string[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<LLMResult> {\n const generations: Generation[][] = await Promise.all(\n prompts.map((prompt, promptIndex) =>\n this._call(prompt, { ...options, promptIndex }, runManager).then(\n (text) => [{ text }]\n )\n )\n );\n return { generations };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AAsCA,IAAsB,UAAtB,MAAsB,gBAEZ,kBAAuC;CAQ/C,eAAe;EAAC;EAAa;EAAQ,KAAK,UAAU;EAAC;;;;;;;;;CAUrD,MAAM,OACJ,OACA,SACiB;EACjB,MAAM,cAAc,QAAQ,2BAA2B,MAAM;AAM7D,UALe,MAAM,KAAK,eACxB,CAAC,YAAY,EACb,SACA,SAAS,UACV,EACa,YAAY,GAAG,GAAG;;CAIlC,OAAO,sBACL,QACA,UACA,aACiC;AACjC,QAAM,IAAI,MAAM,mBAAmB;;CAGrC,6CACE,SAC6C;EAE7C,MAAM,CAAC,gBAAgB,eACrB,MAAM,uCAAuC,QAAQ;AACtD,cAA0C,SAAS,eAAe;AACnE,SAAO,CAAC,gBAAgB,YAAyC;;CAGnE,OAAO,gBACL,OACA,SACwB;AAExB,MACE,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,OAAM,KAAK,OAAO,OAAO,QAAQ;OAC5B;GACL,MAAM,SAAS,QAAQ,2BAA2B,MAAM;GACxD,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,QAAQ;GAC5D,MAAM,mBAAmB,MAAM,iBAAiB,YAAY;GAC5D,MAAM,2BACJ,8BAA8B,iBAAiB;GACjD,MAAM,mBAAmB,gBAAgB,UACvC,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf;IAAE,GAAG;IAA0B,GAAG,KAAK;IAAU,EACjD,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB;IACnB,YAAY;IACb;GACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,CAAC,OAAO,UAAU,CAAC,EACnB,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,eAAe,QAChB;GACD,IAAI,aAAa,IAAI,gBAAgB,EACnC,MAAM,IACP,CAAC;AACF,OAAI;AACF,eAAW,MAAM,SAAS,KAAK,sBAC7B,OAAO,UAAU,EACjB,aACA,cAAc,GACf,EAAE;AACD,SAAI,CAAC,WACH,cAAa;SAEb,cAAa,WAAW,OAAO,MAAM;AAEvC,SAAI,OAAO,MAAM,SAAS,SACxB,OAAM,MAAM;;YAGT,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;AAER,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,aAAa,EACvB,aAAa,CAAC,CAAC,WAAW,CAAC,EAC5B,CAAC,CACH,CACF;;;;;;;;;;;CAYL,MAAM,eACJ,cACA,SACA,WACoB;EACpB,MAAM,UAAoB,aAAa,KAAK,gBAC1C,YAAY,UAAU,CACvB;AACD,SAAO,KAAK,SAAS,SAAS,SAAS,UAAU;;;;;CAgBnD,iBAAiB,UAA2C;AAC1D,SAAO,EAAE;;CAGX,kBAAkB,WAAmC;EACnD,MAAM,aAA0B,EAAE;AAElC,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,YAAY,QAAQ,KAAK,GAAG;GACxD,MAAM,UAAU,UAAU,YAAY;AAEtC,OAAI,MAAM,EACR,YAAW,KAAK;IACd,aAAa,CAAC,QAAQ;IACtB,WAAW,UAAU;IACtB,CAAC;QACG;IACL,MAAM,YAAY,UAAU,YACxB;KAAE,GAAG,UAAU;KAAW,YAAY,EAAE;KAAE,GAC1C,KAAA;AAEJ,eAAW,KAAK;KACd,aAAa,CAAC,QAAQ;KACtB;KACD,CAAC;;;AAIN,SAAO;;;CAIT,MAAM,kBACJ,SACA,eACA,gBACA,oBACoB;EACpB,IAAI;AACJ,MACE,uBAAuB,KAAA,KACvB,mBAAmB,WAAW,QAAQ,OAEtC,eAAc;OACT;GACL,MAAM,mBAAmB,MAAM,iBAAiB,cAAc;GAC9D,MAAM,2BACJ,8BAA8B,iBAAiB;GACjD,MAAM,mBAAmB,gBAAgB,UACvC,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf;IAAE,GAAG;IAA0B,GAAG,KAAK;IAAU,EACjD,EAAE,SAAS,KAAK,SAAS,CAC1B;GACD,MAAM,QAAQ;IACZ,SAAS;IACT,mBAAmB;IACnB,YAAY,QAAQ;IACrB;AACD,iBAAc,MAAM,kBAAkB,eACpC,KAAK,QAAQ,EACb,SACA,eAAe,OACf,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;;EAKH,MAAM,sBAAsB,CAAC,CAAC,cAAc,GAAG,SAAS,KACtD,gCACD;EACD,IAAI;AACJ,MACE,uBACA,QAAQ,WAAW,KACnB,KAAK,0BAA0B,QAAQ,UAAU,sBAEjD,KAAI;GACF,MAAM,SAAS,MAAM,KAAK,sBACxB,QAAQ,IACR,eACA,cAAc,GACf;GACD,IAAI;AACJ,cAAW,MAAM,SAAS,OACxB,KAAI,eAAe,KAAA,EACjB,cAAa;OAEb,cAAa,OAAO,YAAY,MAAM;AAG1C,OAAI,eAAe,KAAA,EACjB,OAAM,IAAI,MAAM,gDAAgD;AAElE,YAAS;IAAE,aAAa,CAAC,CAAC,WAAW,CAAC;IAAE,WAAW,EAAE;IAAE;AACvD,SAAM,cAAc,GAAG,aAAa,OAAO;WACpC,GAAG;AACV,SAAM,cAAc,GAAG,eAAe,EAAE;AACxC,SAAM;;OAEH;AACL,OAAI;AACF,aAAS,MAAM,KAAK,UAAU,SAAS,eAAe,cAAc,GAAG;YAChE,KAAK;AACZ,UAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,eACvB,YAAY,eAAe,IAAI,CAChC,CACF;AACD,UAAM;;GAGR,MAAM,mBAAgC,KAAK,kBAAkB,OAAO;AACpE,SAAM,QAAQ,KACX,eAAe,EAAE,EAAE,KAAK,YAAY,MACnC,YAAY,aAAa,iBAAiB,GAAG,CAC9C,CACF;;EAEH,MAAM,SAAS,aAAa,KAAK,YAAY,QAAQ,MAAM,IAAI,KAAA;AAI/D,SAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,SAAS,EAAE,QAAQ,GAAG,KAAA;GAC7B,cAAc;GACf,CAAC;AACF,SAAO;;CAGT,MAAM,gBAAgB,EACpB,SACA,OACA,cACA,eACA,gBACA,SAcA;EACA,MAAM,mBAAmB,MAAM,iBAAiB,cAAc;EAC9D,MAAM,2BACJ,8BAA8B,iBAAiB;EACjD,MAAM,mBAAmB,gBAAgB,UACvC,eAAe,WACf,KAAK,WACL,eAAe,MACf,KAAK,MACL,eAAe,UACf;GAAE,GAAG;GAA0B,GAAG,KAAK;GAAU,EACjD,EAAE,SAAS,KAAK,SAAS,CAC1B;EACD,MAAM,QAAQ;GACZ,SAAS;GACT,mBAAmB;GACnB,YAAY,QAAQ;GACrB;EACD,MAAM,cAAc,MAAM,kBAAkB,eAC1C,KAAK,QAAQ,EACb,SACA,OACA,KAAA,GACA,OACA,KAAA,GACA,KAAA,GACA,gBAAgB,QACjB;EAGD,MAAM,uBAAiC,EAAE;EAazC,MAAM,iBAZU,MAAM,QAAQ,WAC5B,QAAQ,IAAI,OAAO,QAAQ,UAAU;GACnC,MAAM,SAAS,MAAM,MAAM,OAAO,QAAQ,aAAa;AACvD,OAAI,UAAU,KACZ,sBAAqB,KAAK,MAAM;AAElC,UAAO;IACP,CACH,EAKE,KAAK,QAAQ,WAAW;GAAE;GAAQ,YAAY,cAAc;GAAQ,EAAE,CACtE,QACE,EAAE,aACA,OAAO,WAAW,eAAe,OAAO,SAAS,QAClD,OAAO,WAAW,WACrB;EAGH,MAAM,cAA8B,EAAE;AACtC,QAAM,QAAQ,IACZ,cAAc,IAAI,OAAO,EAAE,QAAQ,eAAe,cAAc,MAAM;AACpE,OAAI,cAAc,WAAW,aAAa;IACxC,MAAM,SAAS,cAAc;AAC7B,gBAAY,KAAK,OAAO,KAAK,WAAW;AACtC,YAAO,iBAAiB;MACtB,GAAG,OAAO;MACV,YAAY,EAAE;MACf;AACD,YAAO;MACP;AACF,QAAI,OAAO,OACT,OAAM,YAAY,kBAAkB,OAAO,GAAG,KAAK;AAErD,WAAO,YAAY,aACjB,EACE,aAAa,CAAC,OAAO,EACtB,EACD,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;UACI;AAEL,UAAM,YAAY,eAChB,cAAc,QACd,KAAA,GACA,KAAA,GACA,KAAA,GACA,EACE,QAAQ,MACT,CACF;AACD,WAAO,QAAQ,OAAO,cAAc,OAAO;;IAE7C,CACH;EAED,MAAM,SAAS;GACb;GACA;GACA,oBAAoB;GACrB;AAKD,SAAO,eAAe,QAAQ,SAAS;GACrC,OAAO,cACH,EAAE,QAAQ,aAAa,KAAK,YAAY,QAAQ,MAAM,EAAE,GACxD,KAAA;GACJ,cAAc;GACf,CAAC;AAEF,SAAO;;;;;CAMT,MAAM,SACJ,SACA,SACA,WACoB;AACpB,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kDAAkD;EAGpE,IAAI;AACJ,MAAI,MAAM,QAAQ,QAAQ,CACxB,iBAAgB,EAAE,MAAM,SAAS;MAEjC,iBAAgB;EAGlB,MAAM,CAAC,gBAAgB,eACrB,KAAK,6CAA6C,cAAc;AAClE,iBAAe,YAAY,eAAe,aAAa;AAEvD,MAAI,CAAC,KAAK,MACR,QAAO,KAAK,kBAAkB,SAAS,aAAa,eAAe;EAGrE,MAAM,EAAE,UAAU;EAClB,MAAM,eAAe,KAAK,wCACxB,YACD;EACD,MAAM,EAAE,aAAa,sBAAsB,uBACzC,MAAM,KAAK,gBAAgB;GACzB;GACA;GACA;GACA,eAAe;GACf,gBAAgB;GAChB,OAAO,eAAe;GACvB,CAAC;EAEJ,IAAI,YAAY,EAAE;AAClB,MAAI,qBAAqB,SAAS,GAAG;GACnC,MAAM,UAAU,MAAM,KAAK,kBACzB,qBAAqB,KAAK,MAAM,QAAQ,GAAG,EAC3C,aACA,gBACA,uBAAuB,KAAA,IACnB,qBAAqB,KAAK,MAAM,qBAAqB,GAAG,GACxD,KAAA,EACL;AACD,SAAM,QAAQ,IACZ,QAAQ,YAAY,IAAI,OAAO,YAAY,UAAU;IACnD,MAAM,cAAc,qBAAqB;AACzC,gBAAY,eAAe;AAC3B,WAAO,MAAM,OAAO,QAAQ,cAAc,cAAc,WAAW;KACnE,CACH;AACD,eAAY,QAAQ,aAAa,EAAE;;AAGrC,SAAO;GAAE;GAAa;GAAW;;;;;CAOnC,qBAA0C;AACxC,SAAO,EAAE;;CAQX,aAAqB;AACnB,SAAO;;;;;;;;;;AAWX,IAAsB,MAAtB,cAEU,QAAqB;CAU7B,MAAM,UACJ,SACA,SACA,YACoB;AAQpB,SAAO,EAAE,aAP2B,MAAM,QAAQ,IAChD,QAAQ,KAAK,QAAQ,gBACnB,KAAK,MAAM,QAAQ;GAAE,GAAG;GAAS;GAAa,EAAE,WAAW,CAAC,MACzD,SAAS,CAAC,EAAE,MAAM,CAAC,CACrB,CACF,CACF,EACqB"}
|
|
@@ -11,8 +11,12 @@ function castStandardMessageContent(message) {
|
|
|
11
11
|
}
|
|
12
12
|
});
|
|
13
13
|
}
|
|
14
|
+
function parseMetadataInvocationParams(invocationParams) {
|
|
15
|
+
return Object.fromEntries(Object.entries(invocationParams).filter(([k, v]) => k !== "tools" && v !== null && v !== void 0));
|
|
16
|
+
}
|
|
14
17
|
//#endregion
|
|
15
18
|
exports.castStandardMessageContent = castStandardMessageContent;
|
|
16
19
|
exports.iife = iife;
|
|
20
|
+
exports.parseMetadataInvocationParams = parseMetadataInvocationParams;
|
|
17
21
|
|
|
18
22
|
//# sourceMappingURL=utils.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.cjs","names":[],"sources":["../../src/language_models/utils.ts"],"sourcesContent":["import { BaseMessage } from \"../messages/base.js\";\n\ntype Constructor<T> = new (...args: unknown[]) => T;\n\nexport const iife = <T>(fn: () => T): T => fn();\n\nfunction castStandardMessageContent<T extends BaseMessage>(message: T) {\n const Cls = message.constructor as Constructor<T>;\n return new Cls({\n ...message,\n content: message.contentBlocks,\n response_metadata: {\n ...message.response_metadata,\n output_version: \"v1\",\n },\n });\n}\n\nexport { castStandardMessageContent };\n"],"mappings":";AAIA,MAAa,QAAW,OAAmB,IAAI;AAE/C,SAAS,2BAAkD,SAAY;CACrE,MAAM,MAAM,QAAQ;AACpB,QAAO,IAAI,IAAI;EACb,GAAG;EACH,SAAS,QAAQ;EACjB,mBAAmB;GACjB,GAAG,QAAQ;GACX,gBAAgB;GACjB;EACF,CAAC"}
|
|
1
|
+
{"version":3,"file":"utils.cjs","names":[],"sources":["../../src/language_models/utils.ts"],"sourcesContent":["import { BaseMessage } from \"../messages/base.js\";\n\ntype Constructor<T> = new (...args: unknown[]) => T;\n\nexport const iife = <T>(fn: () => T): T => fn();\n\nfunction castStandardMessageContent<T extends BaseMessage>(message: T) {\n const Cls = message.constructor as Constructor<T>;\n return new Cls({\n ...message,\n content: message.contentBlocks,\n response_metadata: {\n ...message.response_metadata,\n output_version: \"v1\",\n },\n });\n}\n\nexport function parseMetadataInvocationParams(\n invocationParams: Record<string, unknown>\n) {\n return Object.fromEntries(\n Object.entries(invocationParams).filter(\n ([k, v]) => k !== \"tools\" && v !== null && v !== undefined\n )\n );\n}\n\nexport { castStandardMessageContent };\n"],"mappings":";AAIA,MAAa,QAAW,OAAmB,IAAI;AAE/C,SAAS,2BAAkD,SAAY;CACrE,MAAM,MAAM,QAAQ;AACpB,QAAO,IAAI,IAAI;EACb,GAAG;EACH,SAAS,QAAQ;EACjB,mBAAmB;GACjB,GAAG,QAAQ;GACX,gBAAgB;GACjB;EACF,CAAC;;AAGJ,SAAgB,8BACd,kBACA;AACA,QAAO,OAAO,YACZ,OAAO,QAAQ,iBAAiB,CAAC,QAC9B,CAAC,GAAG,OAAO,MAAM,WAAW,MAAM,QAAQ,MAAM,KAAA,EAClD,CACF"}
|
|
@@ -11,7 +11,10 @@ function castStandardMessageContent(message) {
|
|
|
11
11
|
}
|
|
12
12
|
});
|
|
13
13
|
}
|
|
14
|
+
function parseMetadataInvocationParams(invocationParams) {
|
|
15
|
+
return Object.fromEntries(Object.entries(invocationParams).filter(([k, v]) => k !== "tools" && v !== null && v !== void 0));
|
|
16
|
+
}
|
|
14
17
|
//#endregion
|
|
15
|
-
export { castStandardMessageContent, iife };
|
|
18
|
+
export { castStandardMessageContent, iife, parseMetadataInvocationParams };
|
|
16
19
|
|
|
17
20
|
//# sourceMappingURL=utils.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"utils.js","names":[],"sources":["../../src/language_models/utils.ts"],"sourcesContent":["import { BaseMessage } from \"../messages/base.js\";\n\ntype Constructor<T> = new (...args: unknown[]) => T;\n\nexport const iife = <T>(fn: () => T): T => fn();\n\nfunction castStandardMessageContent<T extends BaseMessage>(message: T) {\n const Cls = message.constructor as Constructor<T>;\n return new Cls({\n ...message,\n content: message.contentBlocks,\n response_metadata: {\n ...message.response_metadata,\n output_version: \"v1\",\n },\n });\n}\n\nexport { castStandardMessageContent };\n"],"mappings":";AAIA,MAAa,QAAW,OAAmB,IAAI;AAE/C,SAAS,2BAAkD,SAAY;CACrE,MAAM,MAAM,QAAQ;AACpB,QAAO,IAAI,IAAI;EACb,GAAG;EACH,SAAS,QAAQ;EACjB,mBAAmB;GACjB,GAAG,QAAQ;GACX,gBAAgB;GACjB;EACF,CAAC"}
|
|
1
|
+
{"version":3,"file":"utils.js","names":[],"sources":["../../src/language_models/utils.ts"],"sourcesContent":["import { BaseMessage } from \"../messages/base.js\";\n\ntype Constructor<T> = new (...args: unknown[]) => T;\n\nexport const iife = <T>(fn: () => T): T => fn();\n\nfunction castStandardMessageContent<T extends BaseMessage>(message: T) {\n const Cls = message.constructor as Constructor<T>;\n return new Cls({\n ...message,\n content: message.contentBlocks,\n response_metadata: {\n ...message.response_metadata,\n output_version: \"v1\",\n },\n });\n}\n\nexport function parseMetadataInvocationParams(\n invocationParams: Record<string, unknown>\n) {\n return Object.fromEntries(\n Object.entries(invocationParams).filter(\n ([k, v]) => k !== \"tools\" && v !== null && v !== undefined\n )\n );\n}\n\nexport { castStandardMessageContent };\n"],"mappings":";AAIA,MAAa,QAAW,OAAmB,IAAI;AAE/C,SAAS,2BAAkD,SAAY;CACrE,MAAM,MAAM,QAAQ;AACpB,QAAO,IAAI,IAAI;EACb,GAAG;EACH,SAAS,QAAQ;EACjB,mBAAmB;GACjB,GAAG,QAAQ;GACX,gBAAgB;GACjB;EACF,CAAC;;AAGJ,SAAgB,8BACd,kBACA;AACA,QAAO,OAAO,YACZ,OAAO,QAAQ,iBAAiB,CAAC,QAC9B,CAAC,GAAG,OAAO,MAAM,WAAW,MAAM,QAAQ,MAAM,KAAA,EAClD,CACF"}
|
package/dist/messages/utils.cjs
CHANGED
|
@@ -94,6 +94,34 @@ function coerceMessageLikeToMessage(messageLike) {
|
|
|
94
94
|
} else return _constructMessageFromParams(messageLike);
|
|
95
95
|
}
|
|
96
96
|
/**
|
|
97
|
+
* Renders a single content block to a compact string representation.
|
|
98
|
+
* Text blocks are returned as-is; multimodal blocks (image, audio, video, file)
|
|
99
|
+
* become short placeholders like `[image]` so their existence is preserved
|
|
100
|
+
* without inflating token counts with base64 data or metadata.
|
|
101
|
+
*/
|
|
102
|
+
function _contentBlockToString(block) {
|
|
103
|
+
if (typeof block === "string") return block;
|
|
104
|
+
switch (block.type) {
|
|
105
|
+
case "text": return block.text ?? "";
|
|
106
|
+
case "text-plain": return block.text ?? "[text-plain file]";
|
|
107
|
+
case "image":
|
|
108
|
+
case "image_url": return "[image]";
|
|
109
|
+
case "audio":
|
|
110
|
+
case "input_audio": return "[audio]";
|
|
111
|
+
case "video": return "[video]";
|
|
112
|
+
case "file": return "[file]";
|
|
113
|
+
case "reasoning":
|
|
114
|
+
case "tool_call":
|
|
115
|
+
case "tool_call_chunk":
|
|
116
|
+
case "invalid_tool_call":
|
|
117
|
+
case "server_tool_call":
|
|
118
|
+
case "server_tool_call_chunk":
|
|
119
|
+
case "server_tool_call_result":
|
|
120
|
+
case "non_standard": return "";
|
|
121
|
+
default: return block.type ? `[${block.type}]` : "";
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
97
125
|
* This function is used by memory classes to get a string representation
|
|
98
126
|
* of the chat message history, based on the message content and role.
|
|
99
127
|
*
|
|
@@ -117,7 +145,7 @@ function getBufferString(messages, humanPrefix = "Human", aiPrefix = "AI") {
|
|
|
117
145
|
else if (m.type === "generic") role = m.role;
|
|
118
146
|
else throw new Error(`Got unsupported message type: ${m.type}`);
|
|
119
147
|
const nameStr = m.name ? `${m.name}, ` : "";
|
|
120
|
-
const readableContent = m.
|
|
148
|
+
const readableContent = typeof m.content === "string" ? m.content : Array.isArray(m.content) ? m.content.map(_contentBlockToString).filter(Boolean).join("") : "";
|
|
121
149
|
let message = `${role}: ${nameStr}${readableContent}`;
|
|
122
150
|
if (m.type === "ai") {
|
|
123
151
|
const aiMessage = m;
|