@langchain/google-common 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @langchain/google-common
2
2
 
3
+ ## 1.0.1
4
+
5
+ ### Patch Changes
6
+
7
+ - [#9387](https://github.com/langchain-ai/langchainjs/pull/9387) [`ac0d4fe`](https://github.com/langchain-ai/langchainjs/commit/ac0d4fe3807e05eb2185ae8a36da69498e6163d4) Thanks [@hntrl](https://github.com/hntrl)! - Add `ModelProfile` and `.profile` properties to ChatModel
8
+
3
9
  ## 1.0.0
4
10
 
5
11
  This release updates the package for compatibility with LangChain v1.0. See the v1.0 [release notes](https://docs.langchain.com/oss/javascript/releases/langchain-v1) for details on what's new.
@@ -5,6 +5,7 @@ const require_common = require('./utils/common.cjs');
5
5
  const require_failed_handler = require('./utils/failed_handler.cjs');
6
6
  const require_connection = require('./connection.cjs');
7
7
  const require_auth = require('./auth.cjs');
8
+ const require_profiles = require('./profiles.cjs');
8
9
  const __langchain_core_utils_env = require_rolldown_runtime.__toESM(require("@langchain/core/utils/env"));
9
10
  const __langchain_core_language_models_chat_models = require_rolldown_runtime.__toESM(require("@langchain/core/language_models/chat_models"));
10
11
  const __langchain_core_outputs = require_rolldown_runtime.__toESM(require("@langchain/core/outputs"));
@@ -198,6 +199,18 @@ var ChatGoogleBase = class extends __langchain_core_language_models_chat_models.
198
199
  _combineLLMOutput() {
199
200
  return [];
200
201
  }
202
+ /**
203
+ * Return profiling information for the model.
204
+ *
205
+ * Provides information about the model's capabilities and constraints,
206
+ * including token limits, multimodal support, and advanced features like
207
+ * tool calling and structured output.
208
+ *
209
+ * @returns {ModelProfile} An object describing the model's capabilities and constraints
210
+ */
211
+ get profile() {
212
+ return require_profiles.default[this.model] ?? {};
213
+ }
201
214
  withStructuredOutput(outputSchema, config) {
202
215
  const schema = outputSchema;
203
216
  const name = config?.name;
@@ -1 +1 @@
1
- {"version":3,"file":"chat_models.cjs","names":["AbstractGoogleLLMConnection","fields: GoogleAIBaseLLMInput<AuthOptions> | undefined","caller: AsyncCaller","client: GoogleAbstractedClient","streaming: boolean","apiConfig: GeminiAPIConfig","geminiConfig: GeminiAPIConfig","getGeminiAPI","BaseChatModel","fields?: ChatGoogleBaseInput<AuthOptions>","ensureParams","copyAndValidateModelParamsInto","DefaultGeminiSafetyHandler","options: this[\"ParsedCallOptions\"]","apiKey: string","ApiKeyGoogleAuth","fields?: GoogleAIBaseLLMInput<AuthOptions>","fields: GoogleBaseLLMInput<AuthOptions>","tools: GoogleAIToolType[]","kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>","convertToGeminiTools","options?: this[\"ParsedCallOptions\"]","copyAIModelParams","messages: BaseMessage[]","runManager: CallbackManagerForLLMRun | undefined","finalChunk: ChatGenerationChunk | null","chunk","_messages: BaseMessage[]","runManager?: CallbackManagerForLLMRun","usageMetadata: UsageMetadata | undefined","ChatGenerationChunk","AIMessageChunk","outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","config?: StructuredOutputMethodOptions<boolean>","schema: InteropZodType<RunOutput> | Record<string, any>","outputParser: BaseLLMOutputParser<RunOutput>","tools: GeminiTool[]","schemaToGeminiParameters","JsonOutputKeyToolsParser","geminiFunctionDefinition: GeminiFunctionDeclaration","parameters: GeminiJsonSchema","removeAdditionalProperties","RunnablePassthrough","input: any","config","RunnableSequence"],"sources":["../src/chat_models.ts"],"sourcesContent":["import { getEnvironmentVariable } from \"@langchain/core/utils/env\";\nimport { UsageMetadata, type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\n\nimport {\n BaseChatModel,\n LangSmithParams,\n type BaseChatModelParams,\n} from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport {\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"@langchain/core/language_models/base\";\nimport {\n Runnable,\n RunnablePassthrough,\n RunnableSequence,\n} from \"@langchain/core/runnables\";\nimport { JsonOutputKeyToolsParser } from \"@langchain/core/output_parsers/openai_tools\";\nimport { BaseLLMOutputParser } from \"@langchain/core/output_parsers\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { concat } from \"@langchain/core/utils/stream\";\nimport {\n InteropZodType,\n isInteropZodSchema,\n} from \"@langchain/core/utils/types\";\nimport {\n GoogleAIBaseLLMInput,\n GoogleAIModelParams,\n GoogleAISafetySetting,\n GoogleConnectionParams,\n GooglePlatformType,\n GeminiTool,\n GoogleAIBaseLanguageModelCallOptions,\n GoogleAIAPI,\n GoogleAIAPIParams,\n GoogleSearchToolSetting,\n GoogleSpeechConfig,\n GeminiJsonSchema,\n} from \"./types.js\";\nimport {\n convertToGeminiTools,\n copyAIModelParams,\n copyAndValidateModelParamsInto,\n} from \"./utils/common.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { DefaultGeminiSafetyHandler, getGeminiAPI } from \"./utils/gemini.js\";\nimport { ApiKeyGoogleAuth, GoogleAbstractedClient } from \"./auth.js\";\nimport { JsonStream } from \"./utils/stream.js\";\nimport { ensureParams } from \"./utils/failed_handler.js\";\nimport type {\n GoogleBaseLLMInput,\n GoogleAISafetyHandler,\n GoogleAISafetyParams,\n GeminiFunctionDeclaration,\n GeminiFunctionSchema,\n GoogleAIToolType,\n GeminiAPIConfig,\n GoogleAIModelModality,\n} from \"./types.js\";\nimport {\n removeAdditionalProperties,\n schemaToGeminiParameters,\n} from \"./utils/zod_to_gemini_parameters.js\";\n\nexport class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<\n BaseMessage[],\n AuthOptions\n> {\n convertSystemMessageToHumanContent: boolean | undefined;\n\n constructor(\n fields: GoogleAIBaseLLMInput<AuthOptions> | undefined,\n caller: AsyncCaller,\n client: GoogleAbstractedClient,\n streaming: boolean\n ) {\n super(fields, caller, client, streaming);\n this.convertSystemMessageToHumanContent =\n fields?.convertSystemMessageToHumanContent;\n }\n\n get useSystemInstruction(): boolean {\n return typeof this.convertSystemMessageToHumanContent === \"boolean\"\n ? !this.convertSystemMessageToHumanContent\n : this.computeUseSystemInstruction;\n }\n\n get computeUseSystemInstruction(): boolean {\n // This works on models from April 2024 and later\n // Vertex AI: gemini-1.5-pro and gemini-1.0-002 and later\n // AI Studio: gemini-1.5-pro-latest\n if (this.modelFamily === \"palm\") {\n return false;\n } else if (this.modelName === \"gemini-1.0-pro-001\") {\n return false;\n } else if (this.modelName.startsWith(\"gemini-pro-vision\")) {\n return false;\n } else if (this.modelName.startsWith(\"gemini-1.0-pro-vision\")) {\n return false;\n } else if (this.modelName === \"gemini-pro\" && this.platform === \"gai\") {\n // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001\n return false;\n } else if (this.modelFamily === \"gemma\") {\n // At least as of 12 Mar 2025 gemma 3 on AIS, trying to use system instructions yields an error:\n // \"Developer instruction is not enabled for models/gemma-3-27b-it\"\n return false;\n }\n return true;\n }\n\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<\n GoogleSearchToolSetting,\n boolean\n > {\n if (this.modelName.startsWith(\"gemini-1.0\")) {\n return \"googleSearchRetrieval\";\n } else if (this.modelName.startsWith(\"gemini-1.5\")) {\n return \"googleSearchRetrieval\";\n } else {\n return \"googleSearch\";\n }\n }\n\n computeGoogleSearchToolAdjustment(\n apiConfig: GeminiAPIConfig\n ): Exclude<GoogleSearchToolSetting, true> {\n const adj = apiConfig.googleSearchToolAdjustment;\n if (adj === undefined || adj === true) {\n return this.computeGoogleSearchToolAdjustmentFromModel();\n } else {\n return adj;\n }\n }\n\n buildGeminiAPI(): GoogleAIAPI {\n const apiConfig: GeminiAPIConfig =\n (this.apiConfig as GeminiAPIConfig) ?? {};\n const googleSearchToolAdjustment =\n this.computeGoogleSearchToolAdjustment(apiConfig);\n const geminiConfig: GeminiAPIConfig = {\n useSystemInstruction: this.useSystemInstruction,\n googleSearchToolAdjustment,\n ...apiConfig,\n };\n return getGeminiAPI(geminiConfig);\n }\n\n get api(): GoogleAIAPI {\n switch (this.apiName) {\n case \"google\":\n return this.buildGeminiAPI();\n default:\n return super.api;\n }\n }\n}\n\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions>\n extends BaseChatModelParams,\n GoogleConnectionParams<AuthOptions>,\n GoogleAIModelParams,\n GoogleAISafetyParams,\n GoogleAIAPIParams,\n Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {}\n\n/**\n * Integration with a Google chat model.\n */\nexport abstract class ChatGoogleBase<AuthOptions>\n extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk>\n implements ChatGoogleBaseInput<AuthOptions>\n{\n // Used for tracing, replace with the same name as your class\n static lc_name() {\n return \"ChatGoogle\";\n }\n\n get lc_secrets(): { [key: string]: string } | undefined {\n return {\n authOptions: \"GOOGLE_AUTH_OPTIONS\",\n };\n }\n\n lc_serializable = true;\n\n // Set based on modelName\n model: string;\n\n modelName = \"gemini-pro\";\n\n temperature: number;\n\n maxOutputTokens: number;\n\n maxReasoningTokens: number;\n\n topP: number;\n\n topK: number;\n\n seed: number;\n\n presencePenalty: number;\n\n frequencyPenalty: number;\n\n stopSequences: string[] = [];\n\n logprobs: boolean;\n\n topLogprobs: number = 0;\n\n safetySettings: GoogleAISafetySetting[] = [];\n\n responseModalities?: GoogleAIModelModality[];\n\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n\n safetyHandler: GoogleAISafetyHandler;\n\n speechConfig: GoogleSpeechConfig;\n\n streamUsage = true;\n\n streaming = false;\n\n labels?: Record<string, string>;\n\n protected connection: ChatConnection<AuthOptions>;\n\n protected streamedConnection: ChatConnection<AuthOptions>;\n\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>) {\n super(ensureParams(fields));\n\n copyAndValidateModelParamsInto(fields, this);\n this.safetyHandler =\n fields?.safetyHandler ?? new DefaultGeminiSafetyHandler();\n this.streamUsage = fields?.streamUsage ?? this.streamUsage;\n const client = this.buildClient(fields);\n this.buildConnection(fields ?? {}, client);\n }\n\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams {\n const params = this.invocationParams(options);\n return {\n ls_provider: \"google_vertexai\",\n ls_model_name: this.model,\n ls_model_type: \"chat\",\n ls_temperature: params.temperature ?? undefined,\n ls_max_tokens: params.maxOutputTokens ?? undefined,\n ls_stop: options.stop,\n };\n }\n\n abstract buildAbstractedClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient;\n\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient {\n return new ApiKeyGoogleAuth(apiKey);\n }\n\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined {\n return fields?.apiKey ?? getEnvironmentVariable(\"GOOGLE_API_KEY\");\n }\n\n buildClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient {\n const apiKey = this.buildApiKey(fields);\n if (apiKey) {\n return this.buildApiKeyClient(apiKey);\n } else {\n return this.buildAbstractedClient(fields);\n }\n }\n\n buildConnection(\n fields: GoogleBaseLLMInput<AuthOptions>,\n client: GoogleAbstractedClient\n ) {\n this.connection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n false\n );\n\n this.streamedConnection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n true\n );\n }\n\n get platform(): GooglePlatformType {\n return this.connection.platform;\n }\n\n override bindTools(\n tools: GoogleAIToolType[],\n kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>\n ): Runnable<\n BaseLanguageModelInput,\n AIMessageChunk,\n GoogleAIBaseLanguageModelCallOptions\n > {\n return this.withConfig({ tools: convertToGeminiTools(tools), ...kwargs });\n }\n\n // Replace\n _llmType() {\n return \"chat_integration\";\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n override invocationParams(options?: this[\"ParsedCallOptions\"]) {\n return copyAIModelParams(this, options);\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager: CallbackManagerForLLMRun | undefined\n ): Promise<ChatResult> {\n const parameters = this.invocationParams(options);\n if (this.streaming) {\n const stream = this._streamResponseChunks(messages, options, runManager);\n let finalChunk: ChatGenerationChunk | null = null;\n for await (const chunk of stream) {\n finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk);\n }\n if (!finalChunk) {\n throw new Error(\"No chunks were returned from the stream.\");\n }\n return {\n generations: [finalChunk],\n };\n }\n\n const response = await this.connection.request(\n messages,\n parameters,\n options,\n runManager\n );\n const ret = this.connection.api.responseToChatResult(response);\n const chunk = ret?.generations?.[0];\n if (chunk) {\n await runManager?.handleLLMNewToken(chunk.text || \"\");\n }\n return ret;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n // Make the call as a streaming request\n const parameters = this.invocationParams(options);\n const response = await this.streamedConnection.request(\n _messages,\n parameters,\n options,\n runManager\n );\n\n // Get the streaming parser of the response\n const stream = response.data as JsonStream;\n let usageMetadata: UsageMetadata | undefined;\n // Loop until the end of the stream\n // During the loop, yield each time we get a chunk from the streaming parser\n // that is either available or added to the queue\n while (!stream.streamDone) {\n const output = await stream.nextChunk();\n await runManager?.handleCustomEvent(\n `google-chunk-${this.constructor.name}`,\n {\n output,\n }\n );\n if (\n output &&\n output.usageMetadata &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n usageMetadata = {\n input_tokens: output.usageMetadata.promptTokenCount,\n output_tokens: output.usageMetadata.candidatesTokenCount,\n total_tokens: output.usageMetadata.totalTokenCount,\n };\n }\n const chunk =\n output !== null\n ? this.connection.api.responseToChatGeneration({ data: output })\n : new ChatGenerationChunk({\n text: \"\",\n generationInfo: { finishReason: \"stop\" },\n message: new AIMessageChunk({\n content: \"\",\n usage_metadata: usageMetadata,\n }),\n });\n if (chunk) {\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text ?? \"\",\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n }\n }\n\n /** @ignore */\n _combineLLMOutput() {\n return [];\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const schema: InteropZodType<RunOutput> | Record<string, any> =\n outputSchema;\n const name = config?.name;\n const method = config?.method;\n const includeRaw = config?.includeRaw;\n if (method === \"jsonMode\") {\n throw new Error(`Google only supports \"functionCalling\" as a method.`);\n }\n\n let functionName = name ?? \"extract\";\n let outputParser: BaseLLMOutputParser<RunOutput>;\n let tools: GeminiTool[];\n if (isInteropZodSchema(schema)) {\n const jsonSchema = schemaToGeminiParameters(schema);\n tools = [\n {\n functionDeclarations: [\n {\n name: functionName,\n description:\n jsonSchema.description ?? \"A function available to call.\",\n parameters: jsonSchema as GeminiFunctionSchema,\n },\n ],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser({\n returnSingle: true,\n keyName: functionName,\n zodSchema: schema,\n });\n } else {\n let geminiFunctionDefinition: GeminiFunctionDeclaration;\n if (\n typeof schema.name === \"string\" &&\n typeof schema.parameters === \"object\" &&\n schema.parameters != null\n ) {\n geminiFunctionDefinition = schema as GeminiFunctionDeclaration;\n functionName = schema.name;\n } else {\n // We are providing the schema for *just* the parameters, probably\n const parameters: GeminiJsonSchema = removeAdditionalProperties(schema);\n geminiFunctionDefinition = {\n name: functionName,\n description: schema.description ?? \"\",\n parameters,\n };\n }\n tools = [\n {\n functionDeclarations: [geminiFunctionDefinition],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser<RunOutput>({\n returnSingle: true,\n keyName: functionName,\n });\n }\n const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });\n\n if (!includeRaw) {\n return llm.pipe(outputParser).withConfig({\n runName: \"ChatGoogleStructuredOutput\",\n }) as Runnable<BaseLanguageModelInput, RunOutput>;\n }\n\n const parserAssign = RunnablePassthrough.assign({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsed: (input: any, config) => outputParser.invoke(input.raw, config),\n });\n const parserNone = RunnablePassthrough.assign({\n parsed: () => null,\n });\n const parsedWithFallback = parserAssign.withFallbacks({\n fallbacks: [parserNone],\n });\n return RunnableSequence.from<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n >([\n {\n raw: llm,\n },\n parsedWithFallback,\n ]).withConfig({\n runName: \"StructuredOutputRunnable\",\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAmEA,IAAa,iBAAb,cAAiDA,+CAG/C;CACA;CAEA,YACEC,QACAC,QACAC,QACAC,WACA;EACA,MAAM,QAAQ,QAAQ,QAAQ,UAAU;EACxC,KAAK,qCACH,QAAQ;CACX;CAED,IAAI,uBAAgC;AAClC,SAAO,OAAO,KAAK,uCAAuC,YACtD,CAAC,KAAK,qCACN,KAAK;CACV;CAED,IAAI,8BAAuC;AAIzC,MAAI,KAAK,gBAAgB,OACvB,QAAO;WACE,KAAK,cAAc,qBAC5B,QAAO;WACE,KAAK,UAAU,WAAW,oBAAoB,CACvD,QAAO;WACE,KAAK,UAAU,WAAW,wBAAwB,CAC3D,QAAO;WACE,KAAK,cAAc,gBAAgB,KAAK,aAAa,MAE9D,QAAO;WACE,KAAK,gBAAgB,QAG9B,QAAO;AAET,SAAO;CACR;CAED,6CAGE;AACA,MAAI,KAAK,UAAU,WAAW,aAAa,CACzC,QAAO;WACE,KAAK,UAAU,WAAW,aAAa,CAChD,QAAO;MAEP,QAAO;CAEV;CAED,kCACEC,WACwC;EACxC,MAAM,MAAM,UAAU;AACtB,MAAI,QAAQ,UAAa,QAAQ,KAC/B,QAAO,KAAK,4CAA4C;MAExD,QAAO;CAEV;CAED,iBAA8B;EAC5B,MAAMA,YACH,KAAK,aAAiC,CAAE;EAC3C,MAAM,6BACJ,KAAK,kCAAkC,UAAU;EACnD,MAAMC,eAAgC;GACpC,sBAAsB,KAAK;GAC3B;GACA,GAAG;EACJ;AACD,SAAOC,4BAAa,aAAa;CAClC;CAED,IAAI,MAAmB;AACrB,UAAQ,KAAK,SAAb;GACE,KAAK,SACH,QAAO,KAAK,gBAAgB;GAC9B,QACE,QAAO,MAAM;EAChB;CACF;AACF;;;;AAgBD,IAAsB,iBAAtB,cACUC,2DAEV;CAEE,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,aAAoD;AACtD,SAAO,EACL,aAAa,sBACd;CACF;CAED,kBAAkB;CAGlB;CAEA,YAAY;CAEZ;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA,gBAA0B,CAAE;CAE5B;CAEA,cAAsB;CAEtB,iBAA0C,CAAE;CAE5C;CAGA;CAEA;CAEA;CAEA,cAAc;CAEd,YAAY;CAEZ;CAEA,AAAU;CAEV,AAAU;CAEV,YAAYC,QAA2C;EACrD,MAAMC,oCAAa,OAAO,CAAC;EAE3BC,8CAA+B,QAAQ,KAAK;EAC5C,KAAK,gBACH,QAAQ,iBAAiB,IAAIC;EAC/B,KAAK,cAAc,QAAQ,eAAe,KAAK;EAC/C,MAAM,SAAS,KAAK,YAAY,OAAO;EACvC,KAAK,gBAAgB,UAAU,CAAE,GAAE,OAAO;CAC3C;CAED,YAAYC,SAAqD;EAC/D,MAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,SAAO;GACL,aAAa;GACb,eAAe,KAAK;GACpB,eAAe;GACf,gBAAgB,OAAO,eAAe;GACtC,eAAe,OAAO,mBAAmB;GACzC,SAAS,QAAQ;EAClB;CACF;CAMD,kBAAkBC,QAAwC;AACxD,SAAO,IAAIC,8BAAiB;CAC7B;CAED,YAAYC,QAAgE;AAC1E,SAAO,QAAQ,iEAAiC,iBAAiB;CAClE;CAED,YACEA,QACwB;EACxB,MAAM,SAAS,KAAK,YAAY,OAAO;AACvC,MAAI,OACF,QAAO,KAAK,kBAAkB,OAAO;MAErC,QAAO,KAAK,sBAAsB,OAAO;CAE5C;CAED,gBACEC,QACAd,QACA;EACA,KAAK,aAAa,IAAI,eACpB;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;EAGF,KAAK,qBAAqB,IAAI,eAC5B;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;CAEH;CAED,IAAI,WAA+B;AACjC,SAAO,KAAK,WAAW;CACxB;CAED,AAAS,UACPe,OACAC,QAKA;AACA,SAAO,KAAK,WAAW;GAAE,OAAOC,oCAAqB,MAAM;GAAE,GAAG;EAAQ,EAAC;CAC1E;CAGD,WAAW;AACT,SAAO;CACR;;;;CAKD,AAAS,iBAAiBC,SAAqC;AAC7D,SAAOC,iCAAkB,MAAM,QAAQ;CACxC;CAED,MAAM,UACJC,UACAV,SACAW,YACqB;EACrB,MAAM,aAAa,KAAK,iBAAiB,QAAQ;AACjD,MAAI,KAAK,WAAW;GAClB,MAAM,SAAS,KAAK,sBAAsB,UAAU,SAAS,WAAW;GACxE,IAAIC,aAAyC;AAC7C,cAAW,MAAMC,WAAS,QACxB,aAAa,CAAC,aAAaA,oDAAe,YAAYA,QAAM;AAE9D,OAAI,CAAC,WACH,OAAM,IAAI,MAAM;AAElB,UAAO,EACL,aAAa,CAAC,UAAW,EAC1B;EACF;EAED,MAAM,WAAW,MAAM,KAAK,WAAW,QACrC,UACA,YACA,SACA,WACD;EACD,MAAM,MAAM,KAAK,WAAW,IAAI,qBAAqB,SAAS;EAC9D,MAAM,QAAQ,KAAK,cAAc;AACjC,MAAI,OACF,MAAM,YAAY,kBAAkB,MAAM,QAAQ,GAAG;AAEvD,SAAO;CACR;CAED,OAAO,sBACLC,WACAd,SACAe,YACqC;EAErC,MAAM,aAAa,KAAK,iBAAiB,QAAQ;EACjD,MAAM,WAAW,MAAM,KAAK,mBAAmB,QAC7C,WACA,YACA,SACA,WACD;EAGD,MAAM,SAAS,SAAS;EACxB,IAAIC;AAIJ,SAAO,CAAC,OAAO,YAAY;GACzB,MAAM,SAAS,MAAM,OAAO,WAAW;GACvC,MAAM,YAAY,kBAChB,CAAC,aAAa,EAAE,KAAK,YAAY,MAAM,EACvC,EACE,OACD,EACF;AACD,OACE,UACA,OAAO,iBACP,KAAK,gBAAgB,SACrB,QAAQ,gBAAgB,OAExB,gBAAgB;IACd,cAAc,OAAO,cAAc;IACnC,eAAe,OAAO,cAAc;IACpC,cAAc,OAAO,cAAc;GACpC;GAEH,MAAM,QACJ,WAAW,OACP,KAAK,WAAW,IAAI,yBAAyB,EAAE,MAAM,OAAQ,EAAC,GAC9D,IAAIC,6CAAoB;IACtB,MAAM;IACN,gBAAgB,EAAE,cAAc,OAAQ;IACxC,SAAS,IAAIC,yCAAe;KAC1B,SAAS;KACT,gBAAgB;IACjB;GACF;AACP,OAAI,OAAO;IACT,MAAM;IACN,MAAM,YAAY,kBAChB,MAAM,QAAQ,IACd,QACA,QACA,QACA,QACA,EAAE,MAAO,EACV;GACF;EACF;CACF;;CAGD,oBAAoB;AAClB,SAAO,CAAE;CACV;CAwBD,qBAIEC,cAIAC,QAMI;EAEJ,MAAMC,SACJ;EACF,MAAM,OAAO,QAAQ;EACrB,MAAM,SAAS,QAAQ;EACvB,MAAM,aAAa,QAAQ;AAC3B,MAAI,WAAW,WACb,OAAM,IAAI,MAAM,CAAC,mDAAmD,CAAC;EAGvE,IAAI,eAAe,QAAQ;EAC3B,IAAIC;EACJ,IAAIC;AACJ,2DAAuB,OAAO,EAAE;GAC9B,MAAM,aAAaC,0DAAyB,OAAO;GACnD,QAAQ,CACN,EACE,sBAAsB,CACpB;IACE,MAAM;IACN,aACE,WAAW,eAAe;IAC5B,YAAY;GACb,CACF,EACF,CACF;GACD,eAAe,IAAIC,sEAAyB;IAC1C,cAAc;IACd,SAAS;IACT,WAAW;GACZ;EACF,OAAM;GACL,IAAIC;AACJ,OACE,OAAO,OAAO,SAAS,YACvB,OAAO,OAAO,eAAe,YAC7B,OAAO,cAAc,MACrB;IACA,2BAA2B;IAC3B,eAAe,OAAO;GACvB,OAAM;IAEL,MAAMC,aAA+BC,4DAA2B,OAAO;IACvE,2BAA2B;KACzB,MAAM;KACN,aAAa,OAAO,eAAe;KACnC;IACD;GACF;GACD,QAAQ,CACN,EACE,sBAAsB,CAAC,wBAAyB,EACjD,CACF;GACD,eAAe,IAAIH,sEAAoC;IACrD,cAAc;IACd,SAAS;GACV;EACF;EACD,MAAM,MAAM,KAAK,UAAU,MAAM,CAAC,WAAW,EAAE,aAAa,aAAc,EAAC;AAE3E,MAAI,CAAC,WACH,QAAO,IAAI,KAAK,aAAa,CAAC,WAAW,EACvC,SAAS,6BACV,EAAC;EAGJ,MAAM,eAAeI,+CAAoB,OAAO,EAE9C,QAAQ,CAACC,OAAYC,aAAW,aAAa,OAAO,MAAM,KAAKA,SAAO,CACvE,EAAC;EACF,MAAM,aAAaF,+CAAoB,OAAO,EAC5C,QAAQ,MAAM,KACf,EAAC;EACF,MAAM,qBAAqB,aAAa,cAAc,EACpD,WAAW,CAAC,UAAW,EACxB,EAAC;AACF,SAAOG,4CAAiB,KAGtB,CACA,EACE,KAAK,IACN,GACD,kBACD,EAAC,CAAC,WAAW,EACZ,SAAS,2BACV,EAAC;CACH;AACF"}
1
+ {"version":3,"file":"chat_models.cjs","names":["AbstractGoogleLLMConnection","fields: GoogleAIBaseLLMInput<AuthOptions> | undefined","caller: AsyncCaller","client: GoogleAbstractedClient","streaming: boolean","apiConfig: GeminiAPIConfig","geminiConfig: GeminiAPIConfig","getGeminiAPI","BaseChatModel","fields?: ChatGoogleBaseInput<AuthOptions>","ensureParams","copyAndValidateModelParamsInto","DefaultGeminiSafetyHandler","options: this[\"ParsedCallOptions\"]","apiKey: string","ApiKeyGoogleAuth","fields?: GoogleAIBaseLLMInput<AuthOptions>","fields: GoogleBaseLLMInput<AuthOptions>","tools: GoogleAIToolType[]","kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>","convertToGeminiTools","options?: this[\"ParsedCallOptions\"]","copyAIModelParams","messages: BaseMessage[]","runManager: CallbackManagerForLLMRun | undefined","finalChunk: ChatGenerationChunk | null","chunk","_messages: BaseMessage[]","runManager?: CallbackManagerForLLMRun","usageMetadata: UsageMetadata | undefined","ChatGenerationChunk","AIMessageChunk","PROFILES","outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","config?: StructuredOutputMethodOptions<boolean>","schema: InteropZodType<RunOutput> | Record<string, any>","outputParser: BaseLLMOutputParser<RunOutput>","tools: GeminiTool[]","schemaToGeminiParameters","JsonOutputKeyToolsParser","geminiFunctionDefinition: GeminiFunctionDeclaration","parameters: GeminiJsonSchema","removeAdditionalProperties","RunnablePassthrough","input: any","config","RunnableSequence"],"sources":["../src/chat_models.ts"],"sourcesContent":["import { getEnvironmentVariable } from \"@langchain/core/utils/env\";\nimport { UsageMetadata, type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\n\nimport {\n BaseChatModel,\n LangSmithParams,\n type BaseChatModelParams,\n} from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport {\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"@langchain/core/language_models/base\";\nimport { type ModelProfile } from \"@langchain/core/language_models/profile\";\nimport {\n Runnable,\n RunnablePassthrough,\n RunnableSequence,\n} from \"@langchain/core/runnables\";\nimport { JsonOutputKeyToolsParser } from \"@langchain/core/output_parsers/openai_tools\";\nimport { BaseLLMOutputParser } from \"@langchain/core/output_parsers\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { concat } from \"@langchain/core/utils/stream\";\nimport {\n InteropZodType,\n isInteropZodSchema,\n} from \"@langchain/core/utils/types\";\nimport {\n GoogleAIBaseLLMInput,\n GoogleAIModelParams,\n GoogleAISafetySetting,\n GoogleConnectionParams,\n GooglePlatformType,\n GeminiTool,\n GoogleAIBaseLanguageModelCallOptions,\n GoogleAIAPI,\n GoogleAIAPIParams,\n GoogleSearchToolSetting,\n GoogleSpeechConfig,\n GeminiJsonSchema,\n} from \"./types.js\";\nimport {\n convertToGeminiTools,\n copyAIModelParams,\n copyAndValidateModelParamsInto,\n} from \"./utils/common.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { DefaultGeminiSafetyHandler, getGeminiAPI } from \"./utils/gemini.js\";\nimport { ApiKeyGoogleAuth, GoogleAbstractedClient } from \"./auth.js\";\nimport { JsonStream } from \"./utils/stream.js\";\nimport { ensureParams } from \"./utils/failed_handler.js\";\nimport type {\n GoogleBaseLLMInput,\n GoogleAISafetyHandler,\n GoogleAISafetyParams,\n GeminiFunctionDeclaration,\n GeminiFunctionSchema,\n GoogleAIToolType,\n GeminiAPIConfig,\n GoogleAIModelModality,\n} from \"./types.js\";\nimport {\n removeAdditionalProperties,\n schemaToGeminiParameters,\n} from \"./utils/zod_to_gemini_parameters.js\";\nimport PROFILES from \"./profiles.js\";\n\nexport class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<\n BaseMessage[],\n AuthOptions\n> {\n convertSystemMessageToHumanContent: boolean | undefined;\n\n constructor(\n fields: GoogleAIBaseLLMInput<AuthOptions> | undefined,\n caller: AsyncCaller,\n client: GoogleAbstractedClient,\n streaming: boolean\n ) {\n super(fields, caller, client, streaming);\n this.convertSystemMessageToHumanContent =\n fields?.convertSystemMessageToHumanContent;\n }\n\n get useSystemInstruction(): boolean {\n return typeof this.convertSystemMessageToHumanContent === \"boolean\"\n ? !this.convertSystemMessageToHumanContent\n : this.computeUseSystemInstruction;\n }\n\n get computeUseSystemInstruction(): boolean {\n // This works on models from April 2024 and later\n // Vertex AI: gemini-1.5-pro and gemini-1.0-002 and later\n // AI Studio: gemini-1.5-pro-latest\n if (this.modelFamily === \"palm\") {\n return false;\n } else if (this.modelName === \"gemini-1.0-pro-001\") {\n return false;\n } else if (this.modelName.startsWith(\"gemini-pro-vision\")) {\n return false;\n } else if (this.modelName.startsWith(\"gemini-1.0-pro-vision\")) {\n return false;\n } else if (this.modelName === \"gemini-pro\" && this.platform === \"gai\") {\n // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001\n return false;\n } else if (this.modelFamily === \"gemma\") {\n // At least as of 12 Mar 2025 gemma 3 on AIS, trying to use system instructions yields an error:\n // \"Developer instruction is not enabled for models/gemma-3-27b-it\"\n return false;\n }\n return true;\n }\n\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<\n GoogleSearchToolSetting,\n boolean\n > {\n if (this.modelName.startsWith(\"gemini-1.0\")) {\n return \"googleSearchRetrieval\";\n } else if (this.modelName.startsWith(\"gemini-1.5\")) {\n return \"googleSearchRetrieval\";\n } else {\n return \"googleSearch\";\n }\n }\n\n computeGoogleSearchToolAdjustment(\n apiConfig: GeminiAPIConfig\n ): Exclude<GoogleSearchToolSetting, true> {\n const adj = apiConfig.googleSearchToolAdjustment;\n if (adj === undefined || adj === true) {\n return this.computeGoogleSearchToolAdjustmentFromModel();\n } else {\n return adj;\n }\n }\n\n buildGeminiAPI(): GoogleAIAPI {\n const apiConfig: GeminiAPIConfig =\n (this.apiConfig as GeminiAPIConfig) ?? {};\n const googleSearchToolAdjustment =\n this.computeGoogleSearchToolAdjustment(apiConfig);\n const geminiConfig: GeminiAPIConfig = {\n useSystemInstruction: this.useSystemInstruction,\n googleSearchToolAdjustment,\n ...apiConfig,\n };\n return getGeminiAPI(geminiConfig);\n }\n\n get api(): GoogleAIAPI {\n switch (this.apiName) {\n case \"google\":\n return this.buildGeminiAPI();\n default:\n return super.api;\n }\n }\n}\n\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions>\n extends BaseChatModelParams,\n GoogleConnectionParams<AuthOptions>,\n GoogleAIModelParams,\n GoogleAISafetyParams,\n GoogleAIAPIParams,\n Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {}\n\n/**\n * Integration with a Google chat model.\n */\nexport abstract class ChatGoogleBase<AuthOptions>\n extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk>\n implements ChatGoogleBaseInput<AuthOptions>\n{\n // Used for tracing, replace with the same name as your class\n static lc_name() {\n return \"ChatGoogle\";\n }\n\n get lc_secrets(): { [key: string]: string } | undefined {\n return {\n authOptions: \"GOOGLE_AUTH_OPTIONS\",\n };\n }\n\n lc_serializable = true;\n\n // Set based on modelName\n model: string;\n\n modelName = \"gemini-pro\";\n\n temperature: number;\n\n maxOutputTokens: number;\n\n maxReasoningTokens: number;\n\n topP: number;\n\n topK: number;\n\n seed: number;\n\n presencePenalty: number;\n\n frequencyPenalty: number;\n\n stopSequences: string[] = [];\n\n logprobs: boolean;\n\n topLogprobs: number = 0;\n\n safetySettings: GoogleAISafetySetting[] = [];\n\n responseModalities?: GoogleAIModelModality[];\n\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n\n safetyHandler: GoogleAISafetyHandler;\n\n speechConfig: GoogleSpeechConfig;\n\n streamUsage = true;\n\n streaming = false;\n\n labels?: Record<string, string>;\n\n protected connection: ChatConnection<AuthOptions>;\n\n protected streamedConnection: ChatConnection<AuthOptions>;\n\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>) {\n super(ensureParams(fields));\n\n copyAndValidateModelParamsInto(fields, this);\n this.safetyHandler =\n fields?.safetyHandler ?? new DefaultGeminiSafetyHandler();\n this.streamUsage = fields?.streamUsage ?? this.streamUsage;\n const client = this.buildClient(fields);\n this.buildConnection(fields ?? {}, client);\n }\n\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams {\n const params = this.invocationParams(options);\n return {\n ls_provider: \"google_vertexai\",\n ls_model_name: this.model,\n ls_model_type: \"chat\",\n ls_temperature: params.temperature ?? undefined,\n ls_max_tokens: params.maxOutputTokens ?? undefined,\n ls_stop: options.stop,\n };\n }\n\n abstract buildAbstractedClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient;\n\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient {\n return new ApiKeyGoogleAuth(apiKey);\n }\n\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined {\n return fields?.apiKey ?? getEnvironmentVariable(\"GOOGLE_API_KEY\");\n }\n\n buildClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient {\n const apiKey = this.buildApiKey(fields);\n if (apiKey) {\n return this.buildApiKeyClient(apiKey);\n } else {\n return this.buildAbstractedClient(fields);\n }\n }\n\n buildConnection(\n fields: GoogleBaseLLMInput<AuthOptions>,\n client: GoogleAbstractedClient\n ) {\n this.connection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n false\n );\n\n this.streamedConnection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n true\n );\n }\n\n get platform(): GooglePlatformType {\n return this.connection.platform;\n }\n\n override bindTools(\n tools: GoogleAIToolType[],\n kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>\n ): Runnable<\n BaseLanguageModelInput,\n AIMessageChunk,\n GoogleAIBaseLanguageModelCallOptions\n > {\n return this.withConfig({ tools: convertToGeminiTools(tools), ...kwargs });\n }\n\n // Replace\n _llmType() {\n return \"chat_integration\";\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n override invocationParams(options?: this[\"ParsedCallOptions\"]) {\n return copyAIModelParams(this, options);\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager: CallbackManagerForLLMRun | undefined\n ): Promise<ChatResult> {\n const parameters = this.invocationParams(options);\n if (this.streaming) {\n const stream = this._streamResponseChunks(messages, options, runManager);\n let finalChunk: ChatGenerationChunk | null = null;\n for await (const chunk of stream) {\n finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk);\n }\n if (!finalChunk) {\n throw new Error(\"No chunks were returned from the stream.\");\n }\n return {\n generations: [finalChunk],\n };\n }\n\n const response = await this.connection.request(\n messages,\n parameters,\n options,\n runManager\n );\n const ret = this.connection.api.responseToChatResult(response);\n const chunk = ret?.generations?.[0];\n if (chunk) {\n await runManager?.handleLLMNewToken(chunk.text || \"\");\n }\n return ret;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n // Make the call as a streaming request\n const parameters = this.invocationParams(options);\n const response = await this.streamedConnection.request(\n _messages,\n parameters,\n options,\n runManager\n );\n\n // Get the streaming parser of the response\n const stream = response.data as JsonStream;\n let usageMetadata: UsageMetadata | undefined;\n // Loop until the end of the stream\n // During the loop, yield each time we get a chunk from the streaming parser\n // that is either available or added to the queue\n while (!stream.streamDone) {\n const output = await stream.nextChunk();\n await runManager?.handleCustomEvent(\n `google-chunk-${this.constructor.name}`,\n {\n output,\n }\n );\n if (\n output &&\n output.usageMetadata &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n usageMetadata = {\n input_tokens: output.usageMetadata.promptTokenCount,\n output_tokens: output.usageMetadata.candidatesTokenCount,\n total_tokens: output.usageMetadata.totalTokenCount,\n };\n }\n const chunk =\n output !== null\n ? this.connection.api.responseToChatGeneration({ data: output })\n : new ChatGenerationChunk({\n text: \"\",\n generationInfo: { finishReason: \"stop\" },\n message: new AIMessageChunk({\n content: \"\",\n usage_metadata: usageMetadata,\n }),\n });\n if (chunk) {\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text ?? \"\",\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n }\n }\n\n /** @ignore */\n _combineLLMOutput() {\n return [];\n }\n\n /**\n * Return profiling information for the model.\n *\n * Provides information about the model's capabilities and constraints,\n * including token limits, multimodal support, and advanced features like\n * tool calling and structured output.\n *\n * @returns {ModelProfile} An object describing the model's capabilities and constraints\n */\n get profile(): ModelProfile {\n return PROFILES[this.model] ?? {};\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const schema: InteropZodType<RunOutput> | Record<string, any> =\n outputSchema;\n const name = config?.name;\n const method = config?.method;\n const includeRaw = config?.includeRaw;\n if (method === \"jsonMode\") {\n throw new Error(`Google only supports \"functionCalling\" as a method.`);\n }\n\n let functionName = name ?? \"extract\";\n let outputParser: BaseLLMOutputParser<RunOutput>;\n let tools: GeminiTool[];\n if (isInteropZodSchema(schema)) {\n const jsonSchema = schemaToGeminiParameters(schema);\n tools = [\n {\n functionDeclarations: [\n {\n name: functionName,\n description:\n jsonSchema.description ?? \"A function available to call.\",\n parameters: jsonSchema as GeminiFunctionSchema,\n },\n ],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser({\n returnSingle: true,\n keyName: functionName,\n zodSchema: schema,\n });\n } else {\n let geminiFunctionDefinition: GeminiFunctionDeclaration;\n if (\n typeof schema.name === \"string\" &&\n typeof schema.parameters === \"object\" &&\n schema.parameters != null\n ) {\n geminiFunctionDefinition = schema as GeminiFunctionDeclaration;\n functionName = schema.name;\n } else {\n // We are providing the schema for *just* the parameters, probably\n const parameters: GeminiJsonSchema = removeAdditionalProperties(schema);\n geminiFunctionDefinition = {\n name: functionName,\n description: schema.description ?? \"\",\n parameters,\n };\n }\n tools = [\n {\n functionDeclarations: [geminiFunctionDefinition],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser<RunOutput>({\n returnSingle: true,\n keyName: functionName,\n });\n }\n const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });\n\n if (!includeRaw) {\n return llm.pipe(outputParser).withConfig({\n runName: \"ChatGoogleStructuredOutput\",\n }) as Runnable<BaseLanguageModelInput, RunOutput>;\n }\n\n const parserAssign = RunnablePassthrough.assign({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsed: (input: any, config) => outputParser.invoke(input.raw, config),\n });\n const parserNone = RunnablePassthrough.assign({\n parsed: () => null,\n });\n const parsedWithFallback = parserAssign.withFallbacks({\n fallbacks: [parserNone],\n });\n return RunnableSequence.from<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n >([\n {\n raw: llm,\n },\n parsedWithFallback,\n ]).withConfig({\n runName: \"StructuredOutputRunnable\",\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAqEA,IAAa,iBAAb,cAAiDA,+CAG/C;CACA;CAEA,YACEC,QACAC,QACAC,QACAC,WACA;EACA,MAAM,QAAQ,QAAQ,QAAQ,UAAU;EACxC,KAAK,qCACH,QAAQ;CACX;CAED,IAAI,uBAAgC;AAClC,SAAO,OAAO,KAAK,uCAAuC,YACtD,CAAC,KAAK,qCACN,KAAK;CACV;CAED,IAAI,8BAAuC;AAIzC,MAAI,KAAK,gBAAgB,OACvB,QAAO;WACE,KAAK,cAAc,qBAC5B,QAAO;WACE,KAAK,UAAU,WAAW,oBAAoB,CACvD,QAAO;WACE,KAAK,UAAU,WAAW,wBAAwB,CAC3D,QAAO;WACE,KAAK,cAAc,gBAAgB,KAAK,aAAa,MAE9D,QAAO;WACE,KAAK,gBAAgB,QAG9B,QAAO;AAET,SAAO;CACR;CAED,6CAGE;AACA,MAAI,KAAK,UAAU,WAAW,aAAa,CACzC,QAAO;WACE,KAAK,UAAU,WAAW,aAAa,CAChD,QAAO;MAEP,QAAO;CAEV;CAED,kCACEC,WACwC;EACxC,MAAM,MAAM,UAAU;AACtB,MAAI,QAAQ,UAAa,QAAQ,KAC/B,QAAO,KAAK,4CAA4C;MAExD,QAAO;CAEV;CAED,iBAA8B;EAC5B,MAAMA,YACH,KAAK,aAAiC,CAAE;EAC3C,MAAM,6BACJ,KAAK,kCAAkC,UAAU;EACnD,MAAMC,eAAgC;GACpC,sBAAsB,KAAK;GAC3B;GACA,GAAG;EACJ;AACD,SAAOC,4BAAa,aAAa;CAClC;CAED,IAAI,MAAmB;AACrB,UAAQ,KAAK,SAAb;GACE,KAAK,SACH,QAAO,KAAK,gBAAgB;GAC9B,QACE,QAAO,MAAM;EAChB;CACF;AACF;;;;AAgBD,IAAsB,iBAAtB,cACUC,2DAEV;CAEE,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,aAAoD;AACtD,SAAO,EACL,aAAa,sBACd;CACF;CAED,kBAAkB;CAGlB;CAEA,YAAY;CAEZ;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA,gBAA0B,CAAE;CAE5B;CAEA,cAAsB;CAEtB,iBAA0C,CAAE;CAE5C;CAGA;CAEA;CAEA;CAEA,cAAc;CAEd,YAAY;CAEZ;CAEA,AAAU;CAEV,AAAU;CAEV,YAAYC,QAA2C;EACrD,MAAMC,oCAAa,OAAO,CAAC;EAE3BC,8CAA+B,QAAQ,KAAK;EAC5C,KAAK,gBACH,QAAQ,iBAAiB,IAAIC;EAC/B,KAAK,cAAc,QAAQ,eAAe,KAAK;EAC/C,MAAM,SAAS,KAAK,YAAY,OAAO;EACvC,KAAK,gBAAgB,UAAU,CAAE,GAAE,OAAO;CAC3C;CAED,YAAYC,SAAqD;EAC/D,MAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,SAAO;GACL,aAAa;GACb,eAAe,KAAK;GACpB,eAAe;GACf,gBAAgB,OAAO,eAAe;GACtC,eAAe,OAAO,mBAAmB;GACzC,SAAS,QAAQ;EAClB;CACF;CAMD,kBAAkBC,QAAwC;AACxD,SAAO,IAAIC,8BAAiB;CAC7B;CAED,YAAYC,QAAgE;AAC1E,SAAO,QAAQ,iEAAiC,iBAAiB;CAClE;CAED,YACEA,QACwB;EACxB,MAAM,SAAS,KAAK,YAAY,OAAO;AACvC,MAAI,OACF,QAAO,KAAK,kBAAkB,OAAO;MAErC,QAAO,KAAK,sBAAsB,OAAO;CAE5C;CAED,gBACEC,QACAd,QACA;EACA,KAAK,aAAa,IAAI,eACpB;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;EAGF,KAAK,qBAAqB,IAAI,eAC5B;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;CAEH;CAED,IAAI,WAA+B;AACjC,SAAO,KAAK,WAAW;CACxB;CAED,AAAS,UACPe,OACAC,QAKA;AACA,SAAO,KAAK,WAAW;GAAE,OAAOC,oCAAqB,MAAM;GAAE,GAAG;EAAQ,EAAC;CAC1E;CAGD,WAAW;AACT,SAAO;CACR;;;;CAKD,AAAS,iBAAiBC,SAAqC;AAC7D,SAAOC,iCAAkB,MAAM,QAAQ;CACxC;CAED,MAAM,UACJC,UACAV,SACAW,YACqB;EACrB,MAAM,aAAa,KAAK,iBAAiB,QAAQ;AACjD,MAAI,KAAK,WAAW;GAClB,MAAM,SAAS,KAAK,sBAAsB,UAAU,SAAS,WAAW;GACxE,IAAIC,aAAyC;AAC7C,cAAW,MAAMC,WAAS,QACxB,aAAa,CAAC,aAAaA,oDAAe,YAAYA,QAAM;AAE9D,OAAI,CAAC,WACH,OAAM,IAAI,MAAM;AAElB,UAAO,EACL,aAAa,CAAC,UAAW,EAC1B;EACF;EAED,MAAM,WAAW,MAAM,KAAK,WAAW,QACrC,UACA,YACA,SACA,WACD;EACD,MAAM,MAAM,KAAK,WAAW,IAAI,qBAAqB,SAAS;EAC9D,MAAM,QAAQ,KAAK,cAAc;AACjC,MAAI,OACF,MAAM,YAAY,kBAAkB,MAAM,QAAQ,GAAG;AAEvD,SAAO;CACR;CAED,OAAO,sBACLC,WACAd,SACAe,YACqC;EAErC,MAAM,aAAa,KAAK,iBAAiB,QAAQ;EACjD,MAAM,WAAW,MAAM,KAAK,mBAAmB,QAC7C,WACA,YACA,SACA,WACD;EAGD,MAAM,SAAS,SAAS;EACxB,IAAIC;AAIJ,SAAO,CAAC,OAAO,YAAY;GACzB,MAAM,SAAS,MAAM,OAAO,WAAW;GACvC,MAAM,YAAY,kBAChB,CAAC,aAAa,EAAE,KAAK,YAAY,MAAM,EACvC,EACE,OACD,EACF;AACD,OACE,UACA,OAAO,iBACP,KAAK,gBAAgB,SACrB,QAAQ,gBAAgB,OAExB,gBAAgB;IACd,cAAc,OAAO,cAAc;IACnC,eAAe,OAAO,cAAc;IACpC,cAAc,OAAO,cAAc;GACpC;GAEH,MAAM,QACJ,WAAW,OACP,KAAK,WAAW,IAAI,yBAAyB,EAAE,MAAM,OAAQ,EAAC,GAC9D,IAAIC,6CAAoB;IACtB,MAAM;IACN,gBAAgB,EAAE,cAAc,OAAQ;IACxC,SAAS,IAAIC,yCAAe;KAC1B,SAAS;KACT,gBAAgB;IACjB;GACF;AACP,OAAI,OAAO;IACT,MAAM;IACN,MAAM,YAAY,kBAChB,MAAM,QAAQ,IACd,QACA,QACA,QACA,QACA,EAAE,MAAO,EACV;GACF;EACF;CACF;;CAGD,oBAAoB;AAClB,SAAO,CAAE;CACV;;;;;;;;;;CAWD,IAAI,UAAwB;AAC1B,SAAOC,yBAAS,KAAK,UAAU,CAAE;CAClC;CAwBD,qBAIEC,cAIAC,QAMI;EAEJ,MAAMC,SACJ;EACF,MAAM,OAAO,QAAQ;EACrB,MAAM,SAAS,QAAQ;EACvB,MAAM,aAAa,QAAQ;AAC3B,MAAI,WAAW,WACb,OAAM,IAAI,MAAM,CAAC,mDAAmD,CAAC;EAGvE,IAAI,eAAe,QAAQ;EAC3B,IAAIC;EACJ,IAAIC;AACJ,2DAAuB,OAAO,EAAE;GAC9B,MAAM,aAAaC,0DAAyB,OAAO;GACnD,QAAQ,CACN,EACE,sBAAsB,CACpB;IACE,MAAM;IACN,aACE,WAAW,eAAe;IAC5B,YAAY;GACb,CACF,EACF,CACF;GACD,eAAe,IAAIC,sEAAyB;IAC1C,cAAc;IACd,SAAS;IACT,WAAW;GACZ;EACF,OAAM;GACL,IAAIC;AACJ,OACE,OAAO,OAAO,SAAS,YACvB,OAAO,OAAO,eAAe,YAC7B,OAAO,cAAc,MACrB;IACA,2BAA2B;IAC3B,eAAe,OAAO;GACvB,OAAM;IAEL,MAAMC,aAA+BC,4DAA2B,OAAO;IACvE,2BAA2B;KACzB,MAAM;KACN,aAAa,OAAO,eAAe;KACnC;IACD;GACF;GACD,QAAQ,CACN,EACE,sBAAsB,CAAC,wBAAyB,EACjD,CACF;GACD,eAAe,IAAIH,sEAAoC;IACrD,cAAc;IACd,SAAS;GACV;EACF;EACD,MAAM,MAAM,KAAK,UAAU,MAAM,CAAC,WAAW,EAAE,aAAa,aAAc,EAAC;AAE3E,MAAI,CAAC,WACH,QAAO,IAAI,KAAK,aAAa,CAAC,WAAW,EACvC,SAAS,6BACV,EAAC;EAGJ,MAAM,eAAeI,+CAAoB,OAAO,EAE9C,QAAQ,CAACC,OAAYC,aAAW,aAAa,OAAO,MAAM,KAAKA,SAAO,CACvE,EAAC;EACF,MAAM,aAAaF,+CAAoB,OAAO,EAC5C,QAAQ,MAAM,KACf,EAAC;EACF,MAAM,qBAAqB,aAAa,cAAc,EACpD,WAAW,CAAC,UAAW,EACxB,EAAC;AACF,SAAOG,4CAAiB,KAGtB,CACA,EACE,KAAK,IACN,GACD,kBACD,EAAC,CAAC,WAAW,EACZ,SAAS,2BACV,EAAC;CACH;AACF"}
@@ -7,6 +7,7 @@ import { AIMessageChunk, BaseMessage } from "@langchain/core/messages";
7
7
  import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
8
8
  import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
9
9
  import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
10
+ import { ModelProfile } from "@langchain/core/language_models/profile";
10
11
  import { Runnable } from "@langchain/core/runnables";
11
12
  import { InteropZodType } from "@langchain/core/utils/types";
12
13
 
@@ -79,6 +80,16 @@ declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleA
79
80
  _streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
80
81
  /** @ignore */
81
82
  _combineLLMOutput(): never[];
83
+ /**
84
+ * Return profiling information for the model.
85
+ *
86
+ * Provides information about the model's capabilities and constraints,
87
+ * including token limits, multimodal support, and advanced features like
88
+ * tool calling and structured output.
89
+ *
90
+ * @returns {ModelProfile} An object describing the model's capabilities and constraints
91
+ */
92
+ get profile(): ModelProfile;
82
93
  withStructuredOutput<
83
94
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
84
95
  RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>
@@ -1 +1 @@
1
- {"version":3,"file":"chat_models.d.cts","names":["BaseMessage","CallbackManagerForLLMRun","BaseChatModel","LangSmithParams","BaseChatModelParams","ChatGenerationChunk","ChatResult","AIMessageChunk","BaseLanguageModelInput","StructuredOutputMethodOptions","Runnable","AsyncCaller","InteropZodType","GoogleAIBaseLLMInput","GoogleAIModelParams","GoogleAISafetySetting","GoogleConnectionParams","GooglePlatformType","GoogleAIBaseLanguageModelCallOptions","GoogleAIAPI","GoogleAIAPIParams","GoogleSearchToolSetting","GoogleSpeechConfig","AbstractGoogleLLMConnection","GoogleAbstractedClient","GoogleBaseLLMInput","GoogleAISafetyHandler","GoogleAISafetyParams","GoogleAIToolType","GeminiAPIConfig","GoogleAIModelModality","ChatConnection","AuthOptions","Exclude","ChatGoogleBaseInput","Pick","ChatGoogleBase","Record","Partial","__types_js0","GoogleAIModelRequestParams","Promise","AsyncGenerator","RunOutput"],"sources":["../src/chat_models.d.ts"],"sourcesContent":["import { type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { BaseChatModel, LangSmithParams, type BaseChatModelParams } from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport { BaseLanguageModelInput, StructuredOutputMethodOptions } from \"@langchain/core/language_models/base\";\nimport { Runnable } from \"@langchain/core/runnables\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { InteropZodType } from \"@langchain/core/utils/types\";\nimport { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GoogleAIBaseLanguageModelCallOptions, GoogleAIAPI, GoogleAIAPIParams, GoogleSearchToolSetting, GoogleSpeechConfig } from \"./types.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { GoogleAbstractedClient } from \"./auth.js\";\nimport type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType, GeminiAPIConfig, GoogleAIModelModality } from \"./types.js\";\nexport declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {\n convertSystemMessageToHumanContent: boolean | undefined;\n constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);\n get useSystemInstruction(): boolean;\n get computeUseSystemInstruction(): boolean;\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<GoogleSearchToolSetting, boolean>;\n computeGoogleSearchToolAdjustment(apiConfig: GeminiAPIConfig): Exclude<GoogleSearchToolSetting, true>;\n buildGeminiAPI(): GoogleAIAPI;\n get api(): GoogleAIAPI;\n}\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, GoogleAIAPIParams, Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {\n}\n/**\n * Integration with a Google chat model.\n */\nexport declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk> implements ChatGoogleBaseInput<AuthOptions> {\n // Used for tracing, replace with the same name as your class\n static lc_name(): string;\n get lc_secrets(): {\n [key: string]: string;\n } | undefined;\n lc_serializable: boolean;\n // Set based on modelName\n model: string;\n modelName: string;\n temperature: number;\n maxOutputTokens: number;\n maxReasoningTokens: number;\n topP: number;\n topK: number;\n seed: number;\n presencePenalty: number;\n frequencyPenalty: number;\n stopSequences: string[];\n logprobs: boolean;\n topLogprobs: number;\n safetySettings: GoogleAISafetySetting[];\n responseModalities?: GoogleAIModelModality[];\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n safetyHandler: GoogleAISafetyHandler;\n speechConfig: GoogleSpeechConfig;\n streamUsage: boolean;\n streaming: boolean;\n labels?: Record<string, string>;\n protected connection: ChatConnection<AuthOptions>;\n protected streamedConnection: ChatConnection<AuthOptions>;\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>);\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams;\n abstract buildAbstractedClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient;\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined;\n buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;\n get platform(): GooglePlatformType;\n bindTools(tools: GoogleAIToolType[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;\n // Replace\n _llmType(): string;\n /**\n * Get the parameters used to invoke the model\n */\n invocationParams(options?: this[\"ParsedCallOptions\"]): import(\"./types.js\").GoogleAIModelRequestParams;\n _generate(messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n /** @ignore */\n _combineLLMOutput(): never[];\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n"],"mappings":";;;;;;;;;;;;;AAaqB+B,cAAAA,cAAc,CAAA,WAAA,CAAA,SAAsBR,2BAAtB,CAAkDvB,WAAlD,EAAA,EAAiEgC,WAAjE,CAAA,CAAA;EAAA,kCAAA,EAAA,OAAA,GAAA,SAAA;EAAA,WAAkDhC,CAAAA,MAAAA,EAE7Da,oBAF6Db,CAExCgC,WAFwChC,CAAAA,GAAAA,SAAAA,EAAAA,MAAAA,EAENW,WAFMX,EAAAA,MAAAA,EAEewB,sBAFfxB,EAAAA,SAAAA,EAAAA,OAAAA;EAAW,IAAIgC,oBAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,IAElEA,2BAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,0CAAhCnB,CAAAA,CAAAA,EAG0BoB,OAH1BpB,CAGkCQ,uBAHlCR,EAAAA,OAAAA,CAAAA;EAAoB,iCAAmCF,CAAAA,SAAAA,EAI9BkB,eAJ8BlB,CAAAA,EAIZsB,OAJYtB,CAIJU,uBAJIV,EAAAA,IAAAA,CAAAA;EAAW,cAAUa,CAAAA,CAAAA,EAK9EL,WAL8EK;EAAsB,IAGhEH,GAAAA,CAAAA,CAAAA,EAG3CF,WAH2CE;;;;;AAEpCF,UAMLe,mBANKf,CAAAA,WAAAA,CAAAA,SAMoCf,mBANpCe,EAMyDH,sBANzDG,CAMgFa,WANhFb,CAAAA,EAM8FL,mBAN9FK,EAMmHQ,oBANnHR,EAMyIC,iBANzID,EAM4JgB,IAN5JhB,CAMiKD,oCANjKC,EAAAA,aAAAA,CAAAA,CAAAA;;AAP8D;AAapF;AAAoC,uBAKNiB,cALM,CAAA,WAAA,CAAA,SAK8BlC,aAL9B,CAK4CgB,oCAL5C,EAKkFX,cALlF,CAAA,YAK6G2B,mBAL7G,CAKiIF,WALjI,CAAA,CAAA;EAAA;EAA6E,OAAsEd,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAoC,IAAjKd,UAAAA,CAAAA,CAAAA,EAAAA;IAAqBY,CAAAA,GAAAA,EAAAA,MAAAA,CAAAA,EAAAA,MAAAA;EAAsB,CAAA,GAAeF,SAAAA;EAAmB,eAAEa,EAAAA,OAAAA;EAAoB;EAAmB,KAAEQ,EAAAA,MAAAA;EAAI,SAAA,EAAA,MAAA;EAKxJC,WAAAA,EAAAA,MAAc;EAAA,eAAA,EAAA,MAAA;EAAA,kBAAoClB,EAAAA,MAAAA;EAAoC,IAAEX,EAAAA,MAAAA;EAAc,IAAiCyB,EAAAA,MAAAA;EAAW,IAqB5JjB,EAAAA,MAAAA;EAAqB,eAChBe,EAAAA,MAAAA;EAAqB,gBAG3BJ,EAAAA,MAAAA;EAAqB,aACtBJ,EAAAA,MAAAA,EAAAA;EAAkB,QAGvBe,EAAAA,OAAAA;EAAM,WACsBL,EAAAA,MAAAA;EAAW,cAA1BD,EATNhB,qBASMgB,EAAAA;EAAc,kBACSC,CAAAA,EATxBF,qBASwBE,EAAAA;EAAW;EAAZ,kCACHA,EAAAA,OAAAA,GAAAA,SAAAA;EAAW,aAA/BE,EAPNR,qBAOMQ;EAAmB,YACS/B,EAPnCmB,kBAOmCnB;EAAe,WACH6B,EAAAA,OAAAA;EAAW,SAAhCnB,EAAAA,OAAAA;EAAoB,MAAgBW,CAAAA,EALnEa,MAKmEb,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA;EAAsB,UAC/DA,UAAAA,EALbO,cAKaP,CALEQ,WAKFR,CAAAA;EAAsB,UACfQ,kBAAAA,EALZD,cAKYC,CALGA,WAKHA,CAAAA;EAAW,WAAhCnB,CAAAA,MAAAA,CAAAA,EAJAqB,mBAIArB,CAJoBmB,WAIpBnB,CAAAA;EAAoB,WACCmB,CAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAJO7B,eAIP6B;EAAW,SAAhCnB,qBAAAA,CAAAA,MAAAA,CAAAA,EAHmBA,oBAGnBA,CAHwCmB,WAGxCnB,CAAAA,CAAAA,EAHuDW,sBAGvDX;EAAoB,iBAAgBW,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA,EAFtBA,sBAEsBA;EAAsB,WACpCQ,CAAAA,MAAAA,CAAAA,EAFtBnB,oBAEsBmB,CAFDA,WAECA,CAAAA,CAAAA,EAAAA,MAAAA,GAAAA,SAAAA;EAAW,WAA9BP,CAAAA,MAAAA,CAAAA,EADHZ,oBACGY,CADkBO,WAClBP,CAAAA,CAAAA,EADiCD,sBACjCC;EAAkB,eAAuBD,CAAAA,MAAAA,EAAzCC,kBAAyCD,CAAtBQ,WAAsBR,CAAAA,EAAAA,MAAAA,EAAAA,sBAAAA,CAAAA,EAAAA,IAAAA;EAAsB,IACvEP,QAAAA,CAAAA,CAAAA,EAAAA,kBAAAA;EAAkB,SACjBW,CAAAA,KAAAA,EAAAA,gBAAAA,EAAAA,EAAAA,MAAAA,CAAAA,EAA6BU,OAA7BV,CAAqCV,oCAArCU,CAAAA,CAAAA,EAA6ElB,QAA7EkB,CAAsFpB,sBAAtFoB,EAA8GrB,cAA9GqB,EAA8HV,oCAA9HU,CAAAA;EAAgB;EAAyD,QAA5CU,CAAAA,CAAAA,EAAAA,MAAAA;EAAO;;;EAA8H,gBAArF5B,CAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAQ,0BAARA;EAAQ,SAAA,CAAA,QAAA,EAOlFV,WAPkF,EAAA,EAAA,OAAA,EAAA,IAAA,CAAA,mBAAA,CAAA,EAAA,UAAA,EAOnBC,wBAPmB,GAAA,SAAA,CAAA,EAOoBwC,OAPpB,CAO4BnC,UAP5B,CAAA;EAMA,qBAClFN,CAAAA,SAAAA,EACaA,WADbA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAC6EC,wBAD7ED,CAAAA,EACwG0C,cADxG1C,CACuHK,mBADvHL,CAAAA;EAAW;EAA4E,iBAAuBM,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAAU,oBAAlBmC;EAAO;EACrF,kBAK1BJ,MAL+EpC,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAKzDoC,MALyDpC,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAKtBW,cALsBX,CAKP0C,SALO1C;EAAwB;EAAA,EAOtHoC,MAPyHK,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAO3FjC,6BAP2FiC,CAAAA,KAAAA,CAAAA,CAAAA,EAOpDhC,QAPoDgC,CAO3ClC,sBAP2CkC,EAOnBC,SAPmBD,CAAAA;EAAc,oBAKxHL;EAAM;EAAsB,kBAK5BA,MALwEM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAKlDN,MALkDM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAKf/B,cALe+B,CAKAA,SALAA;EAAS;EAAA,EAOhGN,MALAA,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAK8B5B,6BAL9B4B,CAAAA,IAAAA,CAAAA,CAAAA,EAKoE3B,QALpE2B,CAK6E7B,sBAL7E6B,EAAAA;IAA8B5B,GAAAA,EAMxBT,WANwBS;IAAgDD,MAAAA,EAOrEmC,SAPqEnC;EAAsB,CAAA,CAAA"}
1
+ {"version":3,"file":"chat_models.d.cts","names":["BaseMessage","CallbackManagerForLLMRun","BaseChatModel","LangSmithParams","BaseChatModelParams","ChatGenerationChunk","ChatResult","AIMessageChunk","BaseLanguageModelInput","StructuredOutputMethodOptions","ModelProfile","Runnable","AsyncCaller","InteropZodType","GoogleAIBaseLLMInput","GoogleAIModelParams","GoogleAISafetySetting","GoogleConnectionParams","GooglePlatformType","GoogleAIBaseLanguageModelCallOptions","GoogleAIAPI","GoogleAIAPIParams","GoogleSearchToolSetting","GoogleSpeechConfig","AbstractGoogleLLMConnection","GoogleAbstractedClient","GoogleBaseLLMInput","GoogleAISafetyHandler","GoogleAISafetyParams","GoogleAIToolType","GeminiAPIConfig","GoogleAIModelModality","ChatConnection","AuthOptions","Exclude","ChatGoogleBaseInput","Pick","ChatGoogleBase","Record","Partial","__types_js0","GoogleAIModelRequestParams","Promise","AsyncGenerator","RunOutput"],"sources":["../src/chat_models.d.ts"],"sourcesContent":["import { type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { BaseChatModel, LangSmithParams, type BaseChatModelParams } from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport { BaseLanguageModelInput, StructuredOutputMethodOptions } from \"@langchain/core/language_models/base\";\nimport { type ModelProfile } from \"@langchain/core/language_models/profile\";\nimport { Runnable } from \"@langchain/core/runnables\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { InteropZodType } from \"@langchain/core/utils/types\";\nimport { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GoogleAIBaseLanguageModelCallOptions, GoogleAIAPI, GoogleAIAPIParams, GoogleSearchToolSetting, GoogleSpeechConfig } from \"./types.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { GoogleAbstractedClient } from \"./auth.js\";\nimport type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType, GeminiAPIConfig, GoogleAIModelModality } from \"./types.js\";\nexport declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {\n convertSystemMessageToHumanContent: boolean | undefined;\n constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);\n get useSystemInstruction(): boolean;\n get computeUseSystemInstruction(): boolean;\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<GoogleSearchToolSetting, boolean>;\n computeGoogleSearchToolAdjustment(apiConfig: GeminiAPIConfig): Exclude<GoogleSearchToolSetting, true>;\n buildGeminiAPI(): GoogleAIAPI;\n get api(): GoogleAIAPI;\n}\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, GoogleAIAPIParams, Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {\n}\n/**\n * Integration with a Google chat model.\n */\nexport declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk> implements ChatGoogleBaseInput<AuthOptions> {\n // Used for tracing, replace with the same name as your class\n static lc_name(): string;\n get lc_secrets(): {\n [key: string]: string;\n } | undefined;\n lc_serializable: boolean;\n // Set based on modelName\n model: string;\n modelName: string;\n temperature: number;\n maxOutputTokens: number;\n maxReasoningTokens: number;\n topP: number;\n topK: number;\n seed: number;\n presencePenalty: number;\n frequencyPenalty: number;\n stopSequences: string[];\n logprobs: boolean;\n topLogprobs: number;\n safetySettings: GoogleAISafetySetting[];\n responseModalities?: GoogleAIModelModality[];\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n safetyHandler: GoogleAISafetyHandler;\n speechConfig: GoogleSpeechConfig;\n streamUsage: boolean;\n streaming: boolean;\n labels?: Record<string, string>;\n protected connection: ChatConnection<AuthOptions>;\n protected streamedConnection: ChatConnection<AuthOptions>;\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>);\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams;\n abstract buildAbstractedClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient;\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined;\n buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;\n get platform(): GooglePlatformType;\n bindTools(tools: GoogleAIToolType[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;\n // Replace\n _llmType(): string;\n /**\n * Get the parameters used to invoke the model\n */\n invocationParams(options?: this[\"ParsedCallOptions\"]): import(\"./types.js\").GoogleAIModelRequestParams;\n _generate(messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n /** @ignore */\n _combineLLMOutput(): never[];\n /**\n * Return profiling information for the model.\n *\n * Provides information about the model's capabilities and constraints,\n * including token limits, multimodal support, and advanced features like\n * tool calling and structured output.\n *\n * @returns {ModelProfile} An object describing the model's capabilities and constraints\n */\n get profile(): ModelProfile;\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n"],"mappings":";;;;;;;;;;;;;;AAcqBgC,cAAAA,cAAc,CAAA,WAAA,CAAA,SAAsBR,2BAAtB,CAAkDxB,WAAlD,EAAA,EAAiEiC,WAAjE,CAAA,CAAA;EAAA,kCAAA,EAAA,OAAA,GAAA,SAAA;EAAA,WAAkDjC,CAAAA,MAAAA,EAE7Dc,oBAF6Dd,CAExCiC,WAFwCjC,CAAAA,GAAAA,SAAAA,EAAAA,MAAAA,EAENY,WAFMZ,EAAAA,MAAAA,EAEeyB,sBAFfzB,EAAAA,SAAAA,EAAAA,OAAAA;EAAW,IAAIiC,oBAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,IAElEA,2BAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,0CAAhCnB,CAAAA,CAAAA,EAG0BoB,OAH1BpB,CAGkCQ,uBAHlCR,EAAAA,OAAAA,CAAAA;EAAoB,iCAAmCF,CAAAA,SAAAA,EAI9BkB,eAJ8BlB,CAAAA,EAIZsB,OAJYtB,CAIJU,uBAJIV,EAAAA,IAAAA,CAAAA;EAAW,cAAUa,CAAAA,CAAAA,EAK9EL,WAL8EK;EAAsB,IAGhEH,GAAAA,CAAAA,CAAAA,EAG3CF,WAH2CE;;;;;AAEpCF,UAMLe,mBANKf,CAAAA,WAAAA,CAAAA,SAMoChB,mBANpCgB,EAMyDH,sBANzDG,CAMgFa,WANhFb,CAAAA,EAM8FL,mBAN9FK,EAMmHQ,oBANnHR,EAMyIC,iBANzID,EAM4JgB,IAN5JhB,CAMiKD,oCANjKC,EAAAA,aAAAA,CAAAA,CAAAA;;AAP8D;AAapF;AAAoC,uBAKNiB,cALM,CAAA,WAAA,CAAA,SAK8BnC,aAL9B,CAK4CiB,oCAL5C,EAKkFZ,cALlF,CAAA,YAK6G4B,mBAL7G,CAKiIF,WALjI,CAAA,CAAA;EAAA;EAA6E,OAAsEd,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAoC,IAAjKf,UAAAA,CAAAA,CAAAA,EAAAA;IAAqBa,CAAAA,GAAAA,EAAAA,MAAAA,CAAAA,EAAAA,MAAAA;EAAsB,CAAA,GAAeF,SAAAA;EAAmB,eAAEa,EAAAA,OAAAA;EAAoB;EAAmB,KAAEQ,EAAAA,MAAAA;EAAI,SAAA,EAAA,MAAA;EAKxJC,WAAAA,EAAAA,MAAc;EAAA,eAAA,EAAA,MAAA;EAAA,kBAAoClB,EAAAA,MAAAA;EAAoC,IAAEZ,EAAAA,MAAAA;EAAc,IAAiC0B,EAAAA,MAAAA;EAAW,IAqB5JjB,EAAAA,MAAAA;EAAqB,eAChBe,EAAAA,MAAAA;EAAqB,gBAG3BJ,EAAAA,MAAAA;EAAqB,aACtBJ,EAAAA,MAAAA,EAAAA;EAAkB,QAGvBe,EAAAA,OAAAA;EAAM,WACsBL,EAAAA,MAAAA;EAAW,cAA1BD,EATNhB,qBASMgB,EAAAA;EAAc,kBACSC,CAAAA,EATxBF,qBASwBE,EAAAA;EAAW;EAAZ,kCACHA,EAAAA,OAAAA,GAAAA,SAAAA;EAAW,aAA/BE,EAPNR,qBAOMQ;EAAmB,YACShC,EAPnCoB,kBAOmCpB;EAAe,WACH8B,EAAAA,OAAAA;EAAW,SAAhCnB,EAAAA,OAAAA;EAAoB,MAAgBW,CAAAA,EALnEa,MAKmEb,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA;EAAsB,UAC/DA,UAAAA,EALbO,cAKaP,CALEQ,WAKFR,CAAAA;EAAsB,UACfQ,kBAAAA,EALZD,cAKYC,CALGA,WAKHA,CAAAA;EAAW,WAAhCnB,CAAAA,MAAAA,CAAAA,EAJAqB,mBAIArB,CAJoBmB,WAIpBnB,CAAAA;EAAoB,WACCmB,CAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAJO9B,eAIP8B;EAAW,SAAhCnB,qBAAAA,CAAAA,MAAAA,CAAAA,EAHmBA,oBAGnBA,CAHwCmB,WAGxCnB,CAAAA,CAAAA,EAHuDW,sBAGvDX;EAAoB,iBAAgBW,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA,EAFtBA,sBAEsBA;EAAsB,WACpCQ,CAAAA,MAAAA,CAAAA,EAFtBnB,oBAEsBmB,CAFDA,WAECA,CAAAA,CAAAA,EAAAA,MAAAA,GAAAA,SAAAA;EAAW,WAA9BP,CAAAA,MAAAA,CAAAA,EADHZ,oBACGY,CADkBO,WAClBP,CAAAA,CAAAA,EADiCD,sBACjCC;EAAkB,eAAuBD,CAAAA,MAAAA,EAAzCC,kBAAyCD,CAAtBQ,WAAsBR,CAAAA,EAAAA,MAAAA,EAAAA,sBAAAA,CAAAA,EAAAA,IAAAA;EAAsB,IACvEP,QAAAA,CAAAA,CAAAA,EAAAA,kBAAAA;EAAkB,SACjBW,CAAAA,KAAAA,EAAAA,gBAAAA,EAAAA,EAAAA,MAAAA,CAAAA,EAA6BU,OAA7BV,CAAqCV,oCAArCU,CAAAA,CAAAA,EAA6ElB,QAA7EkB,CAAsFrB,sBAAtFqB,EAA8GtB,cAA9GsB,EAA8HV,oCAA9HU,CAAAA;EAAgB;EAAyD,QAA5CU,CAAAA,CAAAA,EAAAA,MAAAA;EAAO;;;EAA8H,gBAArF5B,CAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAQ,0BAARA;EAAQ,SAAA,CAAA,QAAA,EAOlFX,WAPkF,EAAA,EAAA,OAAA,EAAA,IAAA,CAAA,mBAAA,CAAA,EAAA,UAAA,EAOnBC,wBAPmB,GAAA,SAAA,CAAA,EAOoByC,OAPpB,CAO4BpC,UAP5B,CAAA;EAMA,qBAClFN,CAAAA,SAAAA,EACaA,WADbA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAC6EC,wBAD7ED,CAAAA,EACwG2C,cADxG3C,CACuHK,mBADvHL,CAAAA;EAAW;EAA4E,iBAAuBM,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAAU;;;;;;;;;EAgBzC,IAAxBO,OAAAA,CAAAA,CAAAA,EAH5DH,YAG4DG;EAAc,oBAEtFyB;EAAM;EAAqD,kBAF5CA,MAE+D9B,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAFzC8B,MAEyC9B,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAFNK,cAEML,CAFSoC,SAETpC;EAAsB;EAAA,EAApG8B,MAAqE3B,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAAvCF,6BAAuCE,CAAAA,KAAAA,CAAAA,CAAAA,EAAAA,QAAAA,CAASH,sBAATG,EAAiCiC,SAAjCjC,CAAAA;EAAQ,oBAG9D2B;EAAM;EAAsB,kBAA5BA,MAAwEM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAAlDN,MAAkDM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAAf/B,cAAe+B,CAAAA,SAAAA;EAAS;EAAA,EAEhGN,MAAAA,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAA8B7B,6BAA9B6B,CAAAA,IAAAA,CAAAA,CAAAA,EAAoE3B,QAApE2B,CAA6E9B,sBAA7E8B,EAAAA;IAA8B7B,GAAAA,EACxBT,WADwBS;IAA+CD,MAAAA,EAEpEoC,SAFoEpC;EAAsB,CAAA,CAAA"}
@@ -9,6 +9,7 @@ import { InteropZodType } from "@langchain/core/utils/types";
9
9
  import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
10
10
  import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
11
11
  import { AsyncCaller } from "@langchain/core/utils/async_caller";
12
+ import { ModelProfile } from "@langchain/core/language_models/profile";
12
13
 
13
14
  //#region src/chat_models.d.ts
14
15
  declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {
@@ -79,6 +80,16 @@ declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleA
79
80
  _streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
80
81
  /** @ignore */
81
82
  _combineLLMOutput(): never[];
83
+ /**
84
+ * Return profiling information for the model.
85
+ *
86
+ * Provides information about the model's capabilities and constraints,
87
+ * including token limits, multimodal support, and advanced features like
88
+ * tool calling and structured output.
89
+ *
90
+ * @returns {ModelProfile} An object describing the model's capabilities and constraints
91
+ */
92
+ get profile(): ModelProfile;
82
93
  withStructuredOutput<
83
94
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
84
95
  RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>
@@ -1 +1 @@
1
- {"version":3,"file":"chat_models.d.ts","names":["BaseMessage","CallbackManagerForLLMRun","BaseChatModel","LangSmithParams","BaseChatModelParams","ChatGenerationChunk","ChatResult","AIMessageChunk","BaseLanguageModelInput","StructuredOutputMethodOptions","Runnable","AsyncCaller","InteropZodType","GoogleAIBaseLLMInput","GoogleAIModelParams","GoogleAISafetySetting","GoogleConnectionParams","GooglePlatformType","GoogleAIBaseLanguageModelCallOptions","GoogleAIAPI","GoogleAIAPIParams","GoogleSearchToolSetting","GoogleSpeechConfig","AbstractGoogleLLMConnection","GoogleAbstractedClient","GoogleBaseLLMInput","GoogleAISafetyHandler","GoogleAISafetyParams","GoogleAIToolType","GeminiAPIConfig","GoogleAIModelModality","ChatConnection","AuthOptions","Exclude","ChatGoogleBaseInput","Pick","ChatGoogleBase","Record","Partial","__types_js0","GoogleAIModelRequestParams","Promise","AsyncGenerator","RunOutput"],"sources":["../src/chat_models.d.ts"],"sourcesContent":["import { type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { BaseChatModel, LangSmithParams, type BaseChatModelParams } from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport { BaseLanguageModelInput, StructuredOutputMethodOptions } from \"@langchain/core/language_models/base\";\nimport { Runnable } from \"@langchain/core/runnables\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { InteropZodType } from \"@langchain/core/utils/types\";\nimport { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GoogleAIBaseLanguageModelCallOptions, GoogleAIAPI, GoogleAIAPIParams, GoogleSearchToolSetting, GoogleSpeechConfig } from \"./types.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { GoogleAbstractedClient } from \"./auth.js\";\nimport type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType, GeminiAPIConfig, GoogleAIModelModality } from \"./types.js\";\nexport declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {\n convertSystemMessageToHumanContent: boolean | undefined;\n constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);\n get useSystemInstruction(): boolean;\n get computeUseSystemInstruction(): boolean;\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<GoogleSearchToolSetting, boolean>;\n computeGoogleSearchToolAdjustment(apiConfig: GeminiAPIConfig): Exclude<GoogleSearchToolSetting, true>;\n buildGeminiAPI(): GoogleAIAPI;\n get api(): GoogleAIAPI;\n}\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, GoogleAIAPIParams, Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {\n}\n/**\n * Integration with a Google chat model.\n */\nexport declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk> implements ChatGoogleBaseInput<AuthOptions> {\n // Used for tracing, replace with the same name as your class\n static lc_name(): string;\n get lc_secrets(): {\n [key: string]: string;\n } | undefined;\n lc_serializable: boolean;\n // Set based on modelName\n model: string;\n modelName: string;\n temperature: number;\n maxOutputTokens: number;\n maxReasoningTokens: number;\n topP: number;\n topK: number;\n seed: number;\n presencePenalty: number;\n frequencyPenalty: number;\n stopSequences: string[];\n logprobs: boolean;\n topLogprobs: number;\n safetySettings: GoogleAISafetySetting[];\n responseModalities?: GoogleAIModelModality[];\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n safetyHandler: GoogleAISafetyHandler;\n speechConfig: GoogleSpeechConfig;\n streamUsage: boolean;\n streaming: boolean;\n labels?: Record<string, string>;\n protected connection: ChatConnection<AuthOptions>;\n protected streamedConnection: ChatConnection<AuthOptions>;\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>);\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams;\n abstract buildAbstractedClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient;\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined;\n buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;\n get platform(): GooglePlatformType;\n bindTools(tools: GoogleAIToolType[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;\n // Replace\n _llmType(): string;\n /**\n * Get the parameters used to invoke the model\n */\n invocationParams(options?: this[\"ParsedCallOptions\"]): import(\"./types.js\").GoogleAIModelRequestParams;\n _generate(messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n /** @ignore */\n _combineLLMOutput(): never[];\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n"],"mappings":";;;;;;;;;;;;;AAaqB+B,cAAAA,cAAc,CAAA,WAAA,CAAA,SAAsBR,2BAAtB,CAAkDvB,WAAlD,EAAA,EAAiEgC,WAAjE,CAAA,CAAA;EAAA,kCAAA,EAAA,OAAA,GAAA,SAAA;EAAA,WAAkDhC,CAAAA,MAAAA,EAE7Da,oBAF6Db,CAExCgC,WAFwChC,CAAAA,GAAAA,SAAAA,EAAAA,MAAAA,EAENW,WAFMX,EAAAA,MAAAA,EAEewB,sBAFfxB,EAAAA,SAAAA,EAAAA,OAAAA;EAAW,IAAIgC,oBAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,IAElEA,2BAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,0CAAhCnB,CAAAA,CAAAA,EAG0BoB,OAH1BpB,CAGkCQ,uBAHlCR,EAAAA,OAAAA,CAAAA;EAAoB,iCAAmCF,CAAAA,SAAAA,EAI9BkB,eAJ8BlB,CAAAA,EAIZsB,OAJYtB,CAIJU,uBAJIV,EAAAA,IAAAA,CAAAA;EAAW,cAAUa,CAAAA,CAAAA,EAK9EL,WAL8EK;EAAsB,IAGhEH,GAAAA,CAAAA,CAAAA,EAG3CF,WAH2CE;;;;;AAEpCF,UAMLe,mBANKf,CAAAA,WAAAA,CAAAA,SAMoCf,mBANpCe,EAMyDH,sBANzDG,CAMgFa,WANhFb,CAAAA,EAM8FL,mBAN9FK,EAMmHQ,oBANnHR,EAMyIC,iBANzID,EAM4JgB,IAN5JhB,CAMiKD,oCANjKC,EAAAA,aAAAA,CAAAA,CAAAA;;AAP8D;AAapF;AAAoC,uBAKNiB,cALM,CAAA,WAAA,CAAA,SAK8BlC,aAL9B,CAK4CgB,oCAL5C,EAKkFX,cALlF,CAAA,YAK6G2B,mBAL7G,CAKiIF,WALjI,CAAA,CAAA;EAAA;EAA6E,OAAsEd,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAoC,IAAjKd,UAAAA,CAAAA,CAAAA,EAAAA;IAAqBY,CAAAA,GAAAA,EAAAA,MAAAA,CAAAA,EAAAA,MAAAA;EAAsB,CAAA,GAAeF,SAAAA;EAAmB,eAAEa,EAAAA,OAAAA;EAAoB;EAAmB,KAAEQ,EAAAA,MAAAA;EAAI,SAAA,EAAA,MAAA;EAKxJC,WAAAA,EAAAA,MAAc;EAAA,eAAA,EAAA,MAAA;EAAA,kBAAoClB,EAAAA,MAAAA;EAAoC,IAAEX,EAAAA,MAAAA;EAAc,IAAiCyB,EAAAA,MAAAA;EAAW,IAqB5JjB,EAAAA,MAAAA;EAAqB,eAChBe,EAAAA,MAAAA;EAAqB,gBAG3BJ,EAAAA,MAAAA;EAAqB,aACtBJ,EAAAA,MAAAA,EAAAA;EAAkB,QAGvBe,EAAAA,OAAAA;EAAM,WACsBL,EAAAA,MAAAA;EAAW,cAA1BD,EATNhB,qBASMgB,EAAAA;EAAc,kBACSC,CAAAA,EATxBF,qBASwBE,EAAAA;EAAW;EAAZ,kCACHA,EAAAA,OAAAA,GAAAA,SAAAA;EAAW,aAA/BE,EAPNR,qBAOMQ;EAAmB,YACS/B,EAPnCmB,kBAOmCnB;EAAe,WACH6B,EAAAA,OAAAA;EAAW,SAAhCnB,EAAAA,OAAAA;EAAoB,MAAgBW,CAAAA,EALnEa,MAKmEb,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA;EAAsB,UAC/DA,UAAAA,EALbO,cAKaP,CALEQ,WAKFR,CAAAA;EAAsB,UACfQ,kBAAAA,EALZD,cAKYC,CALGA,WAKHA,CAAAA;EAAW,WAAhCnB,CAAAA,MAAAA,CAAAA,EAJAqB,mBAIArB,CAJoBmB,WAIpBnB,CAAAA;EAAoB,WACCmB,CAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAJO7B,eAIP6B;EAAW,SAAhCnB,qBAAAA,CAAAA,MAAAA,CAAAA,EAHmBA,oBAGnBA,CAHwCmB,WAGxCnB,CAAAA,CAAAA,EAHuDW,sBAGvDX;EAAoB,iBAAgBW,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA,EAFtBA,sBAEsBA;EAAsB,WACpCQ,CAAAA,MAAAA,CAAAA,EAFtBnB,oBAEsBmB,CAFDA,WAECA,CAAAA,CAAAA,EAAAA,MAAAA,GAAAA,SAAAA;EAAW,WAA9BP,CAAAA,MAAAA,CAAAA,EADHZ,oBACGY,CADkBO,WAClBP,CAAAA,CAAAA,EADiCD,sBACjCC;EAAkB,eAAuBD,CAAAA,MAAAA,EAAzCC,kBAAyCD,CAAtBQ,WAAsBR,CAAAA,EAAAA,MAAAA,EAAAA,sBAAAA,CAAAA,EAAAA,IAAAA;EAAsB,IACvEP,QAAAA,CAAAA,CAAAA,EAAAA,kBAAAA;EAAkB,SACjBW,CAAAA,KAAAA,EAAAA,gBAAAA,EAAAA,EAAAA,MAAAA,CAAAA,EAA6BU,OAA7BV,CAAqCV,oCAArCU,CAAAA,CAAAA,EAA6ElB,QAA7EkB,CAAsFpB,sBAAtFoB,EAA8GrB,cAA9GqB,EAA8HV,oCAA9HU,CAAAA;EAAgB;EAAyD,QAA5CU,CAAAA,CAAAA,EAAAA,MAAAA;EAAO;;;EAA8H,gBAArF5B,CAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAQ,0BAARA;EAAQ,SAAA,CAAA,QAAA,EAOlFV,WAPkF,EAAA,EAAA,OAAA,EAAA,IAAA,CAAA,mBAAA,CAAA,EAAA,UAAA,EAOnBC,wBAPmB,GAAA,SAAA,CAAA,EAOoBwC,OAPpB,CAO4BnC,UAP5B,CAAA;EAMA,qBAClFN,CAAAA,SAAAA,EACaA,WADbA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAC6EC,wBAD7ED,CAAAA,EACwG0C,cADxG1C,CACuHK,mBADvHL,CAAAA;EAAW;EAA4E,iBAAuBM,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAAU,oBAAlBmC;EAAO;EACrF,kBAK1BJ,MAL+EpC,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAKzDoC,MALyDpC,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAKtBW,cALsBX,CAKP0C,SALO1C;EAAwB;EAAA,EAOtHoC,MAPyHK,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAO3FjC,6BAP2FiC,CAAAA,KAAAA,CAAAA,CAAAA,EAOpDhC,QAPoDgC,CAO3ClC,sBAP2CkC,EAOnBC,SAPmBD,CAAAA;EAAc,oBAKxHL;EAAM;EAAsB,kBAK5BA,MALwEM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAKlDN,MALkDM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAKf/B,cALe+B,CAKAA,SALAA;EAAS;EAAA,EAOhGN,MALAA,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAK8B5B,6BAL9B4B,CAAAA,IAAAA,CAAAA,CAAAA,EAKoE3B,QALpE2B,CAK6E7B,sBAL7E6B,EAAAA;IAA8B5B,GAAAA,EAMxBT,WANwBS;IAAgDD,MAAAA,EAOrEmC,SAPqEnC;EAAsB,CAAA,CAAA"}
1
+ {"version":3,"file":"chat_models.d.ts","names":["BaseMessage","CallbackManagerForLLMRun","BaseChatModel","LangSmithParams","BaseChatModelParams","ChatGenerationChunk","ChatResult","AIMessageChunk","BaseLanguageModelInput","StructuredOutputMethodOptions","ModelProfile","Runnable","AsyncCaller","InteropZodType","GoogleAIBaseLLMInput","GoogleAIModelParams","GoogleAISafetySetting","GoogleConnectionParams","GooglePlatformType","GoogleAIBaseLanguageModelCallOptions","GoogleAIAPI","GoogleAIAPIParams","GoogleSearchToolSetting","GoogleSpeechConfig","AbstractGoogleLLMConnection","GoogleAbstractedClient","GoogleBaseLLMInput","GoogleAISafetyHandler","GoogleAISafetyParams","GoogleAIToolType","GeminiAPIConfig","GoogleAIModelModality","ChatConnection","AuthOptions","Exclude","ChatGoogleBaseInput","Pick","ChatGoogleBase","Record","Partial","__types_js0","GoogleAIModelRequestParams","Promise","AsyncGenerator","RunOutput"],"sources":["../src/chat_models.d.ts"],"sourcesContent":["import { type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { BaseChatModel, LangSmithParams, type BaseChatModelParams } from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport { BaseLanguageModelInput, StructuredOutputMethodOptions } from \"@langchain/core/language_models/base\";\nimport { type ModelProfile } from \"@langchain/core/language_models/profile\";\nimport { Runnable } from \"@langchain/core/runnables\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { InteropZodType } from \"@langchain/core/utils/types\";\nimport { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GoogleAIBaseLanguageModelCallOptions, GoogleAIAPI, GoogleAIAPIParams, GoogleSearchToolSetting, GoogleSpeechConfig } from \"./types.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { GoogleAbstractedClient } from \"./auth.js\";\nimport type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType, GeminiAPIConfig, GoogleAIModelModality } from \"./types.js\";\nexport declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {\n convertSystemMessageToHumanContent: boolean | undefined;\n constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);\n get useSystemInstruction(): boolean;\n get computeUseSystemInstruction(): boolean;\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<GoogleSearchToolSetting, boolean>;\n computeGoogleSearchToolAdjustment(apiConfig: GeminiAPIConfig): Exclude<GoogleSearchToolSetting, true>;\n buildGeminiAPI(): GoogleAIAPI;\n get api(): GoogleAIAPI;\n}\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, GoogleAIAPIParams, Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {\n}\n/**\n * Integration with a Google chat model.\n */\nexport declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk> implements ChatGoogleBaseInput<AuthOptions> {\n // Used for tracing, replace with the same name as your class\n static lc_name(): string;\n get lc_secrets(): {\n [key: string]: string;\n } | undefined;\n lc_serializable: boolean;\n // Set based on modelName\n model: string;\n modelName: string;\n temperature: number;\n maxOutputTokens: number;\n maxReasoningTokens: number;\n topP: number;\n topK: number;\n seed: number;\n presencePenalty: number;\n frequencyPenalty: number;\n stopSequences: string[];\n logprobs: boolean;\n topLogprobs: number;\n safetySettings: GoogleAISafetySetting[];\n responseModalities?: GoogleAIModelModality[];\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n safetyHandler: GoogleAISafetyHandler;\n speechConfig: GoogleSpeechConfig;\n streamUsage: boolean;\n streaming: boolean;\n labels?: Record<string, string>;\n protected connection: ChatConnection<AuthOptions>;\n protected streamedConnection: ChatConnection<AuthOptions>;\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>);\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams;\n abstract buildAbstractedClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient;\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined;\n buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;\n buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;\n get platform(): GooglePlatformType;\n bindTools(tools: GoogleAIToolType[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;\n // Replace\n _llmType(): string;\n /**\n * Get the parameters used to invoke the model\n */\n invocationParams(options?: this[\"ParsedCallOptions\"]): import(\"./types.js\").GoogleAIModelRequestParams;\n _generate(messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;\n _streamResponseChunks(_messages: BaseMessage[], options: this[\"ParsedCallOptions\"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;\n /** @ignore */\n _combineLLMOutput(): never[];\n /**\n * Return profiling information for the model.\n *\n * Provides information about the model's capabilities and constraints,\n * including token limits, multimodal support, and advanced features like\n * tool calling and structured output.\n *\n * @returns {ModelProfile} An object describing the model's capabilities and constraints\n */\n get profile(): ModelProfile;\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {\n raw: BaseMessage;\n parsed: RunOutput;\n }>;\n}\n"],"mappings":";;;;;;;;;;;;;;AAcqBgC,cAAAA,cAAc,CAAA,WAAA,CAAA,SAAsBR,2BAAtB,CAAkDxB,WAAlD,EAAA,EAAiEiC,WAAjE,CAAA,CAAA;EAAA,kCAAA,EAAA,OAAA,GAAA,SAAA;EAAA,WAAkDjC,CAAAA,MAAAA,EAE7Dc,oBAF6Dd,CAExCiC,WAFwCjC,CAAAA,GAAAA,SAAAA,EAAAA,MAAAA,EAENY,WAFMZ,EAAAA,MAAAA,EAEeyB,sBAFfzB,EAAAA,SAAAA,EAAAA,OAAAA;EAAW,IAAIiC,oBAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,IAElEA,2BAAAA,CAAAA,CAAAA,EAAAA,OAAAA;EAAW,0CAAhCnB,CAAAA,CAAAA,EAG0BoB,OAH1BpB,CAGkCQ,uBAHlCR,EAAAA,OAAAA,CAAAA;EAAoB,iCAAmCF,CAAAA,SAAAA,EAI9BkB,eAJ8BlB,CAAAA,EAIZsB,OAJYtB,CAIJU,uBAJIV,EAAAA,IAAAA,CAAAA;EAAW,cAAUa,CAAAA,CAAAA,EAK9EL,WAL8EK;EAAsB,IAGhEH,GAAAA,CAAAA,CAAAA,EAG3CF,WAH2CE;;;;;AAEpCF,UAMLe,mBANKf,CAAAA,WAAAA,CAAAA,SAMoChB,mBANpCgB,EAMyDH,sBANzDG,CAMgFa,WANhFb,CAAAA,EAM8FL,mBAN9FK,EAMmHQ,oBANnHR,EAMyIC,iBANzID,EAM4JgB,IAN5JhB,CAMiKD,oCANjKC,EAAAA,aAAAA,CAAAA,CAAAA;;AAP8D;AAapF;AAAoC,uBAKNiB,cALM,CAAA,WAAA,CAAA,SAK8BnC,aAL9B,CAK4CiB,oCAL5C,EAKkFZ,cALlF,CAAA,YAK6G4B,mBAL7G,CAKiIF,WALjI,CAAA,CAAA;EAAA;EAA6E,OAAsEd,OAAAA,CAAAA,CAAAA,EAAAA,MAAAA;EAAoC,IAAjKf,UAAAA,CAAAA,CAAAA,EAAAA;IAAqBa,CAAAA,GAAAA,EAAAA,MAAAA,CAAAA,EAAAA,MAAAA;EAAsB,CAAA,GAAeF,SAAAA;EAAmB,eAAEa,EAAAA,OAAAA;EAAoB;EAAmB,KAAEQ,EAAAA,MAAAA;EAAI,SAAA,EAAA,MAAA;EAKxJC,WAAAA,EAAAA,MAAc;EAAA,eAAA,EAAA,MAAA;EAAA,kBAAoClB,EAAAA,MAAAA;EAAoC,IAAEZ,EAAAA,MAAAA;EAAc,IAAiC0B,EAAAA,MAAAA;EAAW,IAqB5JjB,EAAAA,MAAAA;EAAqB,eAChBe,EAAAA,MAAAA;EAAqB,gBAG3BJ,EAAAA,MAAAA;EAAqB,aACtBJ,EAAAA,MAAAA,EAAAA;EAAkB,QAGvBe,EAAAA,OAAAA;EAAM,WACsBL,EAAAA,MAAAA;EAAW,cAA1BD,EATNhB,qBASMgB,EAAAA;EAAc,kBACSC,CAAAA,EATxBF,qBASwBE,EAAAA;EAAW;EAAZ,kCACHA,EAAAA,OAAAA,GAAAA,SAAAA;EAAW,aAA/BE,EAPNR,qBAOMQ;EAAmB,YACShC,EAPnCoB,kBAOmCpB;EAAe,WACH8B,EAAAA,OAAAA;EAAW,SAAhCnB,EAAAA,OAAAA;EAAoB,MAAgBW,CAAAA,EALnEa,MAKmEb,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA;EAAsB,UAC/DA,UAAAA,EALbO,cAKaP,CALEQ,WAKFR,CAAAA;EAAsB,UACfQ,kBAAAA,EALZD,cAKYC,CALGA,WAKHA,CAAAA;EAAW,WAAhCnB,CAAAA,MAAAA,CAAAA,EAJAqB,mBAIArB,CAJoBmB,WAIpBnB,CAAAA;EAAoB,WACCmB,CAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAJO9B,eAIP8B;EAAW,SAAhCnB,qBAAAA,CAAAA,MAAAA,CAAAA,EAHmBA,oBAGnBA,CAHwCmB,WAGxCnB,CAAAA,CAAAA,EAHuDW,sBAGvDX;EAAoB,iBAAgBW,CAAAA,MAAAA,EAAAA,MAAAA,CAAAA,EAFtBA,sBAEsBA;EAAsB,WACpCQ,CAAAA,MAAAA,CAAAA,EAFtBnB,oBAEsBmB,CAFDA,WAECA,CAAAA,CAAAA,EAAAA,MAAAA,GAAAA,SAAAA;EAAW,WAA9BP,CAAAA,MAAAA,CAAAA,EADHZ,oBACGY,CADkBO,WAClBP,CAAAA,CAAAA,EADiCD,sBACjCC;EAAkB,eAAuBD,CAAAA,MAAAA,EAAzCC,kBAAyCD,CAAtBQ,WAAsBR,CAAAA,EAAAA,MAAAA,EAAAA,sBAAAA,CAAAA,EAAAA,IAAAA;EAAsB,IACvEP,QAAAA,CAAAA,CAAAA,EAAAA,kBAAAA;EAAkB,SACjBW,CAAAA,KAAAA,EAAAA,gBAAAA,EAAAA,EAAAA,MAAAA,CAAAA,EAA6BU,OAA7BV,CAAqCV,oCAArCU,CAAAA,CAAAA,EAA6ElB,QAA7EkB,CAAsFrB,sBAAtFqB,EAA8GtB,cAA9GsB,EAA8HV,oCAA9HU,CAAAA;EAAgB;EAAyD,QAA5CU,CAAAA,CAAAA,EAAAA,MAAAA;EAAO;;;EAA8H,gBAArF5B,CAAAA,OAAAA,CAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,CAAAA,EAAQ,0BAARA;EAAQ,SAAA,CAAA,QAAA,EAOlFX,WAPkF,EAAA,EAAA,OAAA,EAAA,IAAA,CAAA,mBAAA,CAAA,EAAA,UAAA,EAOnBC,wBAPmB,GAAA,SAAA,CAAA,EAOoByC,OAPpB,CAO4BpC,UAP5B,CAAA;EAMA,qBAClFN,CAAAA,SAAAA,EACaA,WADbA,EAAAA,EAAAA,OAAAA,EAAAA,IAAAA,CAAAA,mBAAAA,CAAAA,EAAAA,UAAAA,CAAAA,EAC6EC,wBAD7ED,CAAAA,EACwG2C,cADxG3C,CACuHK,mBADvHL,CAAAA;EAAW;EAA4E,iBAAuBM,CAAAA,CAAAA,EAAAA,KAAAA,EAAAA;EAAU;;;;;;;;;EAgBzC,IAAxBO,OAAAA,CAAAA,CAAAA,EAH5DH,YAG4DG;EAAc,oBAEtFyB;EAAM;EAAqD,kBAF5CA,MAE+D9B,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAFzC8B,MAEyC9B,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAFNK,cAEML,CAFSoC,SAETpC;EAAsB;EAAA,EAApG8B,MAAqE3B,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAAvCF,6BAAuCE,CAAAA,KAAAA,CAAAA,CAAAA,EAAAA,QAAAA,CAASH,sBAATG,EAAiCiC,SAAjCjC,CAAAA;EAAQ,oBAG9D2B;EAAM;EAAsB,kBAA5BA,MAAwEM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,GAAlDN,MAAkDM,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,CAAAA,CAAAA,YAAAA,EAAf/B,cAAe+B,CAAAA,SAAAA;EAAS;EAAA,EAEhGN,MAAAA,CAAAA,MAAAA,EAAAA,GAAAA,CAAAA,EAAAA,MAAAA,CAAAA,EAA8B7B,6BAA9B6B,CAAAA,IAAAA,CAAAA,CAAAA,EAAoE3B,QAApE2B,CAA6E9B,sBAA7E8B,EAAAA;IAA8B7B,GAAAA,EACxBT,WADwBS;IAA+CD,MAAAA,EAEpEoC,SAFoEpC;EAAsB,CAAA,CAAA"}
@@ -4,6 +4,7 @@ import { convertToGeminiTools, copyAIModelParams, copyAndValidateModelParamsInto
4
4
  import { ensureParams } from "./utils/failed_handler.js";
5
5
  import { AbstractGoogleLLMConnection } from "./connection.js";
6
6
  import { ApiKeyGoogleAuth } from "./auth.js";
7
+ import profiles_default from "./profiles.js";
7
8
  import { getEnvironmentVariable } from "@langchain/core/utils/env";
8
9
  import { BaseChatModel } from "@langchain/core/language_models/chat_models";
9
10
  import { ChatGenerationChunk } from "@langchain/core/outputs";
@@ -197,6 +198,18 @@ var ChatGoogleBase = class extends BaseChatModel {
197
198
  _combineLLMOutput() {
198
199
  return [];
199
200
  }
201
+ /**
202
+ * Return profiling information for the model.
203
+ *
204
+ * Provides information about the model's capabilities and constraints,
205
+ * including token limits, multimodal support, and advanced features like
206
+ * tool calling and structured output.
207
+ *
208
+ * @returns {ModelProfile} An object describing the model's capabilities and constraints
209
+ */
210
+ get profile() {
211
+ return profiles_default[this.model] ?? {};
212
+ }
200
213
  withStructuredOutput(outputSchema, config) {
201
214
  const schema = outputSchema;
202
215
  const name = config?.name;
@@ -1 +1 @@
1
- {"version":3,"file":"chat_models.js","names":["fields: GoogleAIBaseLLMInput<AuthOptions> | undefined","caller: AsyncCaller","client: GoogleAbstractedClient","streaming: boolean","apiConfig: GeminiAPIConfig","geminiConfig: GeminiAPIConfig","fields?: ChatGoogleBaseInput<AuthOptions>","options: this[\"ParsedCallOptions\"]","apiKey: string","fields?: GoogleAIBaseLLMInput<AuthOptions>","fields: GoogleBaseLLMInput<AuthOptions>","tools: GoogleAIToolType[]","kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>","options?: this[\"ParsedCallOptions\"]","messages: BaseMessage[]","runManager: CallbackManagerForLLMRun | undefined","finalChunk: ChatGenerationChunk | null","chunk","_messages: BaseMessage[]","runManager?: CallbackManagerForLLMRun","usageMetadata: UsageMetadata | undefined","outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","config?: StructuredOutputMethodOptions<boolean>","schema: InteropZodType<RunOutput> | Record<string, any>","outputParser: BaseLLMOutputParser<RunOutput>","tools: GeminiTool[]","geminiFunctionDefinition: GeminiFunctionDeclaration","parameters: GeminiJsonSchema","input: any","config"],"sources":["../src/chat_models.ts"],"sourcesContent":["import { getEnvironmentVariable } from \"@langchain/core/utils/env\";\nimport { UsageMetadata, type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\n\nimport {\n BaseChatModel,\n LangSmithParams,\n type BaseChatModelParams,\n} from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport {\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"@langchain/core/language_models/base\";\nimport {\n Runnable,\n RunnablePassthrough,\n RunnableSequence,\n} from \"@langchain/core/runnables\";\nimport { JsonOutputKeyToolsParser } from \"@langchain/core/output_parsers/openai_tools\";\nimport { BaseLLMOutputParser } from \"@langchain/core/output_parsers\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { concat } from \"@langchain/core/utils/stream\";\nimport {\n InteropZodType,\n isInteropZodSchema,\n} from \"@langchain/core/utils/types\";\nimport {\n GoogleAIBaseLLMInput,\n GoogleAIModelParams,\n GoogleAISafetySetting,\n GoogleConnectionParams,\n GooglePlatformType,\n GeminiTool,\n GoogleAIBaseLanguageModelCallOptions,\n GoogleAIAPI,\n GoogleAIAPIParams,\n GoogleSearchToolSetting,\n GoogleSpeechConfig,\n GeminiJsonSchema,\n} from \"./types.js\";\nimport {\n convertToGeminiTools,\n copyAIModelParams,\n copyAndValidateModelParamsInto,\n} from \"./utils/common.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { DefaultGeminiSafetyHandler, getGeminiAPI } from \"./utils/gemini.js\";\nimport { ApiKeyGoogleAuth, GoogleAbstractedClient } from \"./auth.js\";\nimport { JsonStream } from \"./utils/stream.js\";\nimport { ensureParams } from \"./utils/failed_handler.js\";\nimport type {\n GoogleBaseLLMInput,\n GoogleAISafetyHandler,\n GoogleAISafetyParams,\n GeminiFunctionDeclaration,\n GeminiFunctionSchema,\n GoogleAIToolType,\n GeminiAPIConfig,\n GoogleAIModelModality,\n} from \"./types.js\";\nimport {\n removeAdditionalProperties,\n schemaToGeminiParameters,\n} from \"./utils/zod_to_gemini_parameters.js\";\n\nexport class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<\n BaseMessage[],\n AuthOptions\n> {\n convertSystemMessageToHumanContent: boolean | undefined;\n\n constructor(\n fields: GoogleAIBaseLLMInput<AuthOptions> | undefined,\n caller: AsyncCaller,\n client: GoogleAbstractedClient,\n streaming: boolean\n ) {\n super(fields, caller, client, streaming);\n this.convertSystemMessageToHumanContent =\n fields?.convertSystemMessageToHumanContent;\n }\n\n get useSystemInstruction(): boolean {\n return typeof this.convertSystemMessageToHumanContent === \"boolean\"\n ? !this.convertSystemMessageToHumanContent\n : this.computeUseSystemInstruction;\n }\n\n get computeUseSystemInstruction(): boolean {\n // This works on models from April 2024 and later\n // Vertex AI: gemini-1.5-pro and gemini-1.0-002 and later\n // AI Studio: gemini-1.5-pro-latest\n if (this.modelFamily === \"palm\") {\n return false;\n } else if (this.modelName === \"gemini-1.0-pro-001\") {\n return false;\n } else if (this.modelName.startsWith(\"gemini-pro-vision\")) {\n return false;\n } else if (this.modelName.startsWith(\"gemini-1.0-pro-vision\")) {\n return false;\n } else if (this.modelName === \"gemini-pro\" && this.platform === \"gai\") {\n // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001\n return false;\n } else if (this.modelFamily === \"gemma\") {\n // At least as of 12 Mar 2025 gemma 3 on AIS, trying to use system instructions yields an error:\n // \"Developer instruction is not enabled for models/gemma-3-27b-it\"\n return false;\n }\n return true;\n }\n\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<\n GoogleSearchToolSetting,\n boolean\n > {\n if (this.modelName.startsWith(\"gemini-1.0\")) {\n return \"googleSearchRetrieval\";\n } else if (this.modelName.startsWith(\"gemini-1.5\")) {\n return \"googleSearchRetrieval\";\n } else {\n return \"googleSearch\";\n }\n }\n\n computeGoogleSearchToolAdjustment(\n apiConfig: GeminiAPIConfig\n ): Exclude<GoogleSearchToolSetting, true> {\n const adj = apiConfig.googleSearchToolAdjustment;\n if (adj === undefined || adj === true) {\n return this.computeGoogleSearchToolAdjustmentFromModel();\n } else {\n return adj;\n }\n }\n\n buildGeminiAPI(): GoogleAIAPI {\n const apiConfig: GeminiAPIConfig =\n (this.apiConfig as GeminiAPIConfig) ?? {};\n const googleSearchToolAdjustment =\n this.computeGoogleSearchToolAdjustment(apiConfig);\n const geminiConfig: GeminiAPIConfig = {\n useSystemInstruction: this.useSystemInstruction,\n googleSearchToolAdjustment,\n ...apiConfig,\n };\n return getGeminiAPI(geminiConfig);\n }\n\n get api(): GoogleAIAPI {\n switch (this.apiName) {\n case \"google\":\n return this.buildGeminiAPI();\n default:\n return super.api;\n }\n }\n}\n\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions>\n extends BaseChatModelParams,\n GoogleConnectionParams<AuthOptions>,\n GoogleAIModelParams,\n GoogleAISafetyParams,\n GoogleAIAPIParams,\n Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {}\n\n/**\n * Integration with a Google chat model.\n */\nexport abstract class ChatGoogleBase<AuthOptions>\n extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk>\n implements ChatGoogleBaseInput<AuthOptions>\n{\n // Used for tracing, replace with the same name as your class\n static lc_name() {\n return \"ChatGoogle\";\n }\n\n get lc_secrets(): { [key: string]: string } | undefined {\n return {\n authOptions: \"GOOGLE_AUTH_OPTIONS\",\n };\n }\n\n lc_serializable = true;\n\n // Set based on modelName\n model: string;\n\n modelName = \"gemini-pro\";\n\n temperature: number;\n\n maxOutputTokens: number;\n\n maxReasoningTokens: number;\n\n topP: number;\n\n topK: number;\n\n seed: number;\n\n presencePenalty: number;\n\n frequencyPenalty: number;\n\n stopSequences: string[] = [];\n\n logprobs: boolean;\n\n topLogprobs: number = 0;\n\n safetySettings: GoogleAISafetySetting[] = [];\n\n responseModalities?: GoogleAIModelModality[];\n\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n\n safetyHandler: GoogleAISafetyHandler;\n\n speechConfig: GoogleSpeechConfig;\n\n streamUsage = true;\n\n streaming = false;\n\n labels?: Record<string, string>;\n\n protected connection: ChatConnection<AuthOptions>;\n\n protected streamedConnection: ChatConnection<AuthOptions>;\n\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>) {\n super(ensureParams(fields));\n\n copyAndValidateModelParamsInto(fields, this);\n this.safetyHandler =\n fields?.safetyHandler ?? new DefaultGeminiSafetyHandler();\n this.streamUsage = fields?.streamUsage ?? this.streamUsage;\n const client = this.buildClient(fields);\n this.buildConnection(fields ?? {}, client);\n }\n\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams {\n const params = this.invocationParams(options);\n return {\n ls_provider: \"google_vertexai\",\n ls_model_name: this.model,\n ls_model_type: \"chat\",\n ls_temperature: params.temperature ?? undefined,\n ls_max_tokens: params.maxOutputTokens ?? undefined,\n ls_stop: options.stop,\n };\n }\n\n abstract buildAbstractedClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient;\n\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient {\n return new ApiKeyGoogleAuth(apiKey);\n }\n\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined {\n return fields?.apiKey ?? getEnvironmentVariable(\"GOOGLE_API_KEY\");\n }\n\n buildClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient {\n const apiKey = this.buildApiKey(fields);\n if (apiKey) {\n return this.buildApiKeyClient(apiKey);\n } else {\n return this.buildAbstractedClient(fields);\n }\n }\n\n buildConnection(\n fields: GoogleBaseLLMInput<AuthOptions>,\n client: GoogleAbstractedClient\n ) {\n this.connection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n false\n );\n\n this.streamedConnection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n true\n );\n }\n\n get platform(): GooglePlatformType {\n return this.connection.platform;\n }\n\n override bindTools(\n tools: GoogleAIToolType[],\n kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>\n ): Runnable<\n BaseLanguageModelInput,\n AIMessageChunk,\n GoogleAIBaseLanguageModelCallOptions\n > {\n return this.withConfig({ tools: convertToGeminiTools(tools), ...kwargs });\n }\n\n // Replace\n _llmType() {\n return \"chat_integration\";\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n override invocationParams(options?: this[\"ParsedCallOptions\"]) {\n return copyAIModelParams(this, options);\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager: CallbackManagerForLLMRun | undefined\n ): Promise<ChatResult> {\n const parameters = this.invocationParams(options);\n if (this.streaming) {\n const stream = this._streamResponseChunks(messages, options, runManager);\n let finalChunk: ChatGenerationChunk | null = null;\n for await (const chunk of stream) {\n finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk);\n }\n if (!finalChunk) {\n throw new Error(\"No chunks were returned from the stream.\");\n }\n return {\n generations: [finalChunk],\n };\n }\n\n const response = await this.connection.request(\n messages,\n parameters,\n options,\n runManager\n );\n const ret = this.connection.api.responseToChatResult(response);\n const chunk = ret?.generations?.[0];\n if (chunk) {\n await runManager?.handleLLMNewToken(chunk.text || \"\");\n }\n return ret;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n // Make the call as a streaming request\n const parameters = this.invocationParams(options);\n const response = await this.streamedConnection.request(\n _messages,\n parameters,\n options,\n runManager\n );\n\n // Get the streaming parser of the response\n const stream = response.data as JsonStream;\n let usageMetadata: UsageMetadata | undefined;\n // Loop until the end of the stream\n // During the loop, yield each time we get a chunk from the streaming parser\n // that is either available or added to the queue\n while (!stream.streamDone) {\n const output = await stream.nextChunk();\n await runManager?.handleCustomEvent(\n `google-chunk-${this.constructor.name}`,\n {\n output,\n }\n );\n if (\n output &&\n output.usageMetadata &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n usageMetadata = {\n input_tokens: output.usageMetadata.promptTokenCount,\n output_tokens: output.usageMetadata.candidatesTokenCount,\n total_tokens: output.usageMetadata.totalTokenCount,\n };\n }\n const chunk =\n output !== null\n ? this.connection.api.responseToChatGeneration({ data: output })\n : new ChatGenerationChunk({\n text: \"\",\n generationInfo: { finishReason: \"stop\" },\n message: new AIMessageChunk({\n content: \"\",\n usage_metadata: usageMetadata,\n }),\n });\n if (chunk) {\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text ?? \"\",\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n }\n }\n\n /** @ignore */\n _combineLLMOutput() {\n return [];\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const schema: InteropZodType<RunOutput> | Record<string, any> =\n outputSchema;\n const name = config?.name;\n const method = config?.method;\n const includeRaw = config?.includeRaw;\n if (method === \"jsonMode\") {\n throw new Error(`Google only supports \"functionCalling\" as a method.`);\n }\n\n let functionName = name ?? \"extract\";\n let outputParser: BaseLLMOutputParser<RunOutput>;\n let tools: GeminiTool[];\n if (isInteropZodSchema(schema)) {\n const jsonSchema = schemaToGeminiParameters(schema);\n tools = [\n {\n functionDeclarations: [\n {\n name: functionName,\n description:\n jsonSchema.description ?? \"A function available to call.\",\n parameters: jsonSchema as GeminiFunctionSchema,\n },\n ],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser({\n returnSingle: true,\n keyName: functionName,\n zodSchema: schema,\n });\n } else {\n let geminiFunctionDefinition: GeminiFunctionDeclaration;\n if (\n typeof schema.name === \"string\" &&\n typeof schema.parameters === \"object\" &&\n schema.parameters != null\n ) {\n geminiFunctionDefinition = schema as GeminiFunctionDeclaration;\n functionName = schema.name;\n } else {\n // We are providing the schema for *just* the parameters, probably\n const parameters: GeminiJsonSchema = removeAdditionalProperties(schema);\n geminiFunctionDefinition = {\n name: functionName,\n description: schema.description ?? \"\",\n parameters,\n };\n }\n tools = [\n {\n functionDeclarations: [geminiFunctionDefinition],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser<RunOutput>({\n returnSingle: true,\n keyName: functionName,\n });\n }\n const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });\n\n if (!includeRaw) {\n return llm.pipe(outputParser).withConfig({\n runName: \"ChatGoogleStructuredOutput\",\n }) as Runnable<BaseLanguageModelInput, RunOutput>;\n }\n\n const parserAssign = RunnablePassthrough.assign({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsed: (input: any, config) => outputParser.invoke(input.raw, config),\n });\n const parserNone = RunnablePassthrough.assign({\n parsed: () => null,\n });\n const parsedWithFallback = parserAssign.withFallbacks({\n fallbacks: [parserNone],\n });\n return RunnableSequence.from<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n >([\n {\n raw: llm,\n },\n parsedWithFallback,\n ]).withConfig({\n runName: \"StructuredOutputRunnable\",\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAmEA,IAAa,iBAAb,cAAiD,4BAG/C;CACA;CAEA,YACEA,QACAC,QACAC,QACAC,WACA;EACA,MAAM,QAAQ,QAAQ,QAAQ,UAAU;EACxC,KAAK,qCACH,QAAQ;CACX;CAED,IAAI,uBAAgC;AAClC,SAAO,OAAO,KAAK,uCAAuC,YACtD,CAAC,KAAK,qCACN,KAAK;CACV;CAED,IAAI,8BAAuC;AAIzC,MAAI,KAAK,gBAAgB,OACvB,QAAO;WACE,KAAK,cAAc,qBAC5B,QAAO;WACE,KAAK,UAAU,WAAW,oBAAoB,CACvD,QAAO;WACE,KAAK,UAAU,WAAW,wBAAwB,CAC3D,QAAO;WACE,KAAK,cAAc,gBAAgB,KAAK,aAAa,MAE9D,QAAO;WACE,KAAK,gBAAgB,QAG9B,QAAO;AAET,SAAO;CACR;CAED,6CAGE;AACA,MAAI,KAAK,UAAU,WAAW,aAAa,CACzC,QAAO;WACE,KAAK,UAAU,WAAW,aAAa,CAChD,QAAO;MAEP,QAAO;CAEV;CAED,kCACEC,WACwC;EACxC,MAAM,MAAM,UAAU;AACtB,MAAI,QAAQ,UAAa,QAAQ,KAC/B,QAAO,KAAK,4CAA4C;MAExD,QAAO;CAEV;CAED,iBAA8B;EAC5B,MAAMA,YACH,KAAK,aAAiC,CAAE;EAC3C,MAAM,6BACJ,KAAK,kCAAkC,UAAU;EACnD,MAAMC,eAAgC;GACpC,sBAAsB,KAAK;GAC3B;GACA,GAAG;EACJ;AACD,SAAO,aAAa,aAAa;CAClC;CAED,IAAI,MAAmB;AACrB,UAAQ,KAAK,SAAb;GACE,KAAK,SACH,QAAO,KAAK,gBAAgB;GAC9B,QACE,QAAO,MAAM;EAChB;CACF;AACF;;;;AAgBD,IAAsB,iBAAtB,cACU,cAEV;CAEE,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,aAAoD;AACtD,SAAO,EACL,aAAa,sBACd;CACF;CAED,kBAAkB;CAGlB;CAEA,YAAY;CAEZ;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA,gBAA0B,CAAE;CAE5B;CAEA,cAAsB;CAEtB,iBAA0C,CAAE;CAE5C;CAGA;CAEA;CAEA;CAEA,cAAc;CAEd,YAAY;CAEZ;CAEA,AAAU;CAEV,AAAU;CAEV,YAAYC,QAA2C;EACrD,MAAM,aAAa,OAAO,CAAC;EAE3B,+BAA+B,QAAQ,KAAK;EAC5C,KAAK,gBACH,QAAQ,iBAAiB,IAAI;EAC/B,KAAK,cAAc,QAAQ,eAAe,KAAK;EAC/C,MAAM,SAAS,KAAK,YAAY,OAAO;EACvC,KAAK,gBAAgB,UAAU,CAAE,GAAE,OAAO;CAC3C;CAED,YAAYC,SAAqD;EAC/D,MAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,SAAO;GACL,aAAa;GACb,eAAe,KAAK;GACpB,eAAe;GACf,gBAAgB,OAAO,eAAe;GACtC,eAAe,OAAO,mBAAmB;GACzC,SAAS,QAAQ;EAClB;CACF;CAMD,kBAAkBC,QAAwC;AACxD,SAAO,IAAI,iBAAiB;CAC7B;CAED,YAAYC,QAAgE;AAC1E,SAAO,QAAQ,UAAU,uBAAuB,iBAAiB;CAClE;CAED,YACEA,QACwB;EACxB,MAAM,SAAS,KAAK,YAAY,OAAO;AACvC,MAAI,OACF,QAAO,KAAK,kBAAkB,OAAO;MAErC,QAAO,KAAK,sBAAsB,OAAO;CAE5C;CAED,gBACEC,QACAR,QACA;EACA,KAAK,aAAa,IAAI,eACpB;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;EAGF,KAAK,qBAAqB,IAAI,eAC5B;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;CAEH;CAED,IAAI,WAA+B;AACjC,SAAO,KAAK,WAAW;CACxB;CAED,AAAS,UACPS,OACAC,QAKA;AACA,SAAO,KAAK,WAAW;GAAE,OAAO,qBAAqB,MAAM;GAAE,GAAG;EAAQ,EAAC;CAC1E;CAGD,WAAW;AACT,SAAO;CACR;;;;CAKD,AAAS,iBAAiBC,SAAqC;AAC7D,SAAO,kBAAkB,MAAM,QAAQ;CACxC;CAED,MAAM,UACJC,UACAP,SACAQ,YACqB;EACrB,MAAM,aAAa,KAAK,iBAAiB,QAAQ;AACjD,MAAI,KAAK,WAAW;GAClB,MAAM,SAAS,KAAK,sBAAsB,UAAU,SAAS,WAAW;GACxE,IAAIC,aAAyC;AAC7C,cAAW,MAAMC,WAAS,QACxB,aAAa,CAAC,aAAaA,UAAQ,OAAO,YAAYA,QAAM;AAE9D,OAAI,CAAC,WACH,OAAM,IAAI,MAAM;AAElB,UAAO,EACL,aAAa,CAAC,UAAW,EAC1B;EACF;EAED,MAAM,WAAW,MAAM,KAAK,WAAW,QACrC,UACA,YACA,SACA,WACD;EACD,MAAM,MAAM,KAAK,WAAW,IAAI,qBAAqB,SAAS;EAC9D,MAAM,QAAQ,KAAK,cAAc;AACjC,MAAI,OACF,MAAM,YAAY,kBAAkB,MAAM,QAAQ,GAAG;AAEvD,SAAO;CACR;CAED,OAAO,sBACLC,WACAX,SACAY,YACqC;EAErC,MAAM,aAAa,KAAK,iBAAiB,QAAQ;EACjD,MAAM,WAAW,MAAM,KAAK,mBAAmB,QAC7C,WACA,YACA,SACA,WACD;EAGD,MAAM,SAAS,SAAS;EACxB,IAAIC;AAIJ,SAAO,CAAC,OAAO,YAAY;GACzB,MAAM,SAAS,MAAM,OAAO,WAAW;GACvC,MAAM,YAAY,kBAChB,CAAC,aAAa,EAAE,KAAK,YAAY,MAAM,EACvC,EACE,OACD,EACF;AACD,OACE,UACA,OAAO,iBACP,KAAK,gBAAgB,SACrB,QAAQ,gBAAgB,OAExB,gBAAgB;IACd,cAAc,OAAO,cAAc;IACnC,eAAe,OAAO,cAAc;IACpC,cAAc,OAAO,cAAc;GACpC;GAEH,MAAM,QACJ,WAAW,OACP,KAAK,WAAW,IAAI,yBAAyB,EAAE,MAAM,OAAQ,EAAC,GAC9D,IAAI,oBAAoB;IACtB,MAAM;IACN,gBAAgB,EAAE,cAAc,OAAQ;IACxC,SAAS,IAAI,eAAe;KAC1B,SAAS;KACT,gBAAgB;IACjB;GACF;AACP,OAAI,OAAO;IACT,MAAM;IACN,MAAM,YAAY,kBAChB,MAAM,QAAQ,IACd,QACA,QACA,QACA,QACA,EAAE,MAAO,EACV;GACF;EACF;CACF;;CAGD,oBAAoB;AAClB,SAAO,CAAE;CACV;CAwBD,qBAIEC,cAIAC,QAMI;EAEJ,MAAMC,SACJ;EACF,MAAM,OAAO,QAAQ;EACrB,MAAM,SAAS,QAAQ;EACvB,MAAM,aAAa,QAAQ;AAC3B,MAAI,WAAW,WACb,OAAM,IAAI,MAAM,CAAC,mDAAmD,CAAC;EAGvE,IAAI,eAAe,QAAQ;EAC3B,IAAIC;EACJ,IAAIC;AACJ,MAAI,mBAAmB,OAAO,EAAE;GAC9B,MAAM,aAAa,yBAAyB,OAAO;GACnD,QAAQ,CACN,EACE,sBAAsB,CACpB;IACE,MAAM;IACN,aACE,WAAW,eAAe;IAC5B,YAAY;GACb,CACF,EACF,CACF;GACD,eAAe,IAAI,yBAAyB;IAC1C,cAAc;IACd,SAAS;IACT,WAAW;GACZ;EACF,OAAM;GACL,IAAIC;AACJ,OACE,OAAO,OAAO,SAAS,YACvB,OAAO,OAAO,eAAe,YAC7B,OAAO,cAAc,MACrB;IACA,2BAA2B;IAC3B,eAAe,OAAO;GACvB,OAAM;IAEL,MAAMC,aAA+B,2BAA2B,OAAO;IACvE,2BAA2B;KACzB,MAAM;KACN,aAAa,OAAO,eAAe;KACnC;IACD;GACF;GACD,QAAQ,CACN,EACE,sBAAsB,CAAC,wBAAyB,EACjD,CACF;GACD,eAAe,IAAI,yBAAoC;IACrD,cAAc;IACd,SAAS;GACV;EACF;EACD,MAAM,MAAM,KAAK,UAAU,MAAM,CAAC,WAAW,EAAE,aAAa,aAAc,EAAC;AAE3E,MAAI,CAAC,WACH,QAAO,IAAI,KAAK,aAAa,CAAC,WAAW,EACvC,SAAS,6BACV,EAAC;EAGJ,MAAM,eAAe,oBAAoB,OAAO,EAE9C,QAAQ,CAACC,OAAYC,aAAW,aAAa,OAAO,MAAM,KAAKA,SAAO,CACvE,EAAC;EACF,MAAM,aAAa,oBAAoB,OAAO,EAC5C,QAAQ,MAAM,KACf,EAAC;EACF,MAAM,qBAAqB,aAAa,cAAc,EACpD,WAAW,CAAC,UAAW,EACxB,EAAC;AACF,SAAO,iBAAiB,KAGtB,CACA,EACE,KAAK,IACN,GACD,kBACD,EAAC,CAAC,WAAW,EACZ,SAAS,2BACV,EAAC;CACH;AACF"}
1
+ {"version":3,"file":"chat_models.js","names":["fields: GoogleAIBaseLLMInput<AuthOptions> | undefined","caller: AsyncCaller","client: GoogleAbstractedClient","streaming: boolean","apiConfig: GeminiAPIConfig","geminiConfig: GeminiAPIConfig","fields?: ChatGoogleBaseInput<AuthOptions>","options: this[\"ParsedCallOptions\"]","apiKey: string","fields?: GoogleAIBaseLLMInput<AuthOptions>","fields: GoogleBaseLLMInput<AuthOptions>","tools: GoogleAIToolType[]","kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>","options?: this[\"ParsedCallOptions\"]","messages: BaseMessage[]","runManager: CallbackManagerForLLMRun | undefined","finalChunk: ChatGenerationChunk | null","chunk","_messages: BaseMessage[]","runManager?: CallbackManagerForLLMRun","usageMetadata: UsageMetadata | undefined","PROFILES","outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>","config?: StructuredOutputMethodOptions<boolean>","schema: InteropZodType<RunOutput> | Record<string, any>","outputParser: BaseLLMOutputParser<RunOutput>","tools: GeminiTool[]","geminiFunctionDefinition: GeminiFunctionDeclaration","parameters: GeminiJsonSchema","input: any","config"],"sources":["../src/chat_models.ts"],"sourcesContent":["import { getEnvironmentVariable } from \"@langchain/core/utils/env\";\nimport { UsageMetadata, type BaseMessage } from \"@langchain/core/messages\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\n\nimport {\n BaseChatModel,\n LangSmithParams,\n type BaseChatModelParams,\n} from \"@langchain/core/language_models/chat_models\";\nimport { ChatGenerationChunk, ChatResult } from \"@langchain/core/outputs\";\nimport { AIMessageChunk } from \"@langchain/core/messages\";\nimport {\n BaseLanguageModelInput,\n StructuredOutputMethodOptions,\n} from \"@langchain/core/language_models/base\";\nimport { type ModelProfile } from \"@langchain/core/language_models/profile\";\nimport {\n Runnable,\n RunnablePassthrough,\n RunnableSequence,\n} from \"@langchain/core/runnables\";\nimport { JsonOutputKeyToolsParser } from \"@langchain/core/output_parsers/openai_tools\";\nimport { BaseLLMOutputParser } from \"@langchain/core/output_parsers\";\nimport { AsyncCaller } from \"@langchain/core/utils/async_caller\";\nimport { concat } from \"@langchain/core/utils/stream\";\nimport {\n InteropZodType,\n isInteropZodSchema,\n} from \"@langchain/core/utils/types\";\nimport {\n GoogleAIBaseLLMInput,\n GoogleAIModelParams,\n GoogleAISafetySetting,\n GoogleConnectionParams,\n GooglePlatformType,\n GeminiTool,\n GoogleAIBaseLanguageModelCallOptions,\n GoogleAIAPI,\n GoogleAIAPIParams,\n GoogleSearchToolSetting,\n GoogleSpeechConfig,\n GeminiJsonSchema,\n} from \"./types.js\";\nimport {\n convertToGeminiTools,\n copyAIModelParams,\n copyAndValidateModelParamsInto,\n} from \"./utils/common.js\";\nimport { AbstractGoogleLLMConnection } from \"./connection.js\";\nimport { DefaultGeminiSafetyHandler, getGeminiAPI } from \"./utils/gemini.js\";\nimport { ApiKeyGoogleAuth, GoogleAbstractedClient } from \"./auth.js\";\nimport { JsonStream } from \"./utils/stream.js\";\nimport { ensureParams } from \"./utils/failed_handler.js\";\nimport type {\n GoogleBaseLLMInput,\n GoogleAISafetyHandler,\n GoogleAISafetyParams,\n GeminiFunctionDeclaration,\n GeminiFunctionSchema,\n GoogleAIToolType,\n GeminiAPIConfig,\n GoogleAIModelModality,\n} from \"./types.js\";\nimport {\n removeAdditionalProperties,\n schemaToGeminiParameters,\n} from \"./utils/zod_to_gemini_parameters.js\";\nimport PROFILES from \"./profiles.js\";\n\nexport class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<\n BaseMessage[],\n AuthOptions\n> {\n convertSystemMessageToHumanContent: boolean | undefined;\n\n constructor(\n fields: GoogleAIBaseLLMInput<AuthOptions> | undefined,\n caller: AsyncCaller,\n client: GoogleAbstractedClient,\n streaming: boolean\n ) {\n super(fields, caller, client, streaming);\n this.convertSystemMessageToHumanContent =\n fields?.convertSystemMessageToHumanContent;\n }\n\n get useSystemInstruction(): boolean {\n return typeof this.convertSystemMessageToHumanContent === \"boolean\"\n ? !this.convertSystemMessageToHumanContent\n : this.computeUseSystemInstruction;\n }\n\n get computeUseSystemInstruction(): boolean {\n // This works on models from April 2024 and later\n // Vertex AI: gemini-1.5-pro and gemini-1.0-002 and later\n // AI Studio: gemini-1.5-pro-latest\n if (this.modelFamily === \"palm\") {\n return false;\n } else if (this.modelName === \"gemini-1.0-pro-001\") {\n return false;\n } else if (this.modelName.startsWith(\"gemini-pro-vision\")) {\n return false;\n } else if (this.modelName.startsWith(\"gemini-1.0-pro-vision\")) {\n return false;\n } else if (this.modelName === \"gemini-pro\" && this.platform === \"gai\") {\n // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001\n return false;\n } else if (this.modelFamily === \"gemma\") {\n // At least as of 12 Mar 2025 gemma 3 on AIS, trying to use system instructions yields an error:\n // \"Developer instruction is not enabled for models/gemma-3-27b-it\"\n return false;\n }\n return true;\n }\n\n computeGoogleSearchToolAdjustmentFromModel(): Exclude<\n GoogleSearchToolSetting,\n boolean\n > {\n if (this.modelName.startsWith(\"gemini-1.0\")) {\n return \"googleSearchRetrieval\";\n } else if (this.modelName.startsWith(\"gemini-1.5\")) {\n return \"googleSearchRetrieval\";\n } else {\n return \"googleSearch\";\n }\n }\n\n computeGoogleSearchToolAdjustment(\n apiConfig: GeminiAPIConfig\n ): Exclude<GoogleSearchToolSetting, true> {\n const adj = apiConfig.googleSearchToolAdjustment;\n if (adj === undefined || adj === true) {\n return this.computeGoogleSearchToolAdjustmentFromModel();\n } else {\n return adj;\n }\n }\n\n buildGeminiAPI(): GoogleAIAPI {\n const apiConfig: GeminiAPIConfig =\n (this.apiConfig as GeminiAPIConfig) ?? {};\n const googleSearchToolAdjustment =\n this.computeGoogleSearchToolAdjustment(apiConfig);\n const geminiConfig: GeminiAPIConfig = {\n useSystemInstruction: this.useSystemInstruction,\n googleSearchToolAdjustment,\n ...apiConfig,\n };\n return getGeminiAPI(geminiConfig);\n }\n\n get api(): GoogleAIAPI {\n switch (this.apiName) {\n case \"google\":\n return this.buildGeminiAPI();\n default:\n return super.api;\n }\n }\n}\n\n/**\n * Input to chat model class.\n */\nexport interface ChatGoogleBaseInput<AuthOptions>\n extends BaseChatModelParams,\n GoogleConnectionParams<AuthOptions>,\n GoogleAIModelParams,\n GoogleAISafetyParams,\n GoogleAIAPIParams,\n Pick<GoogleAIBaseLanguageModelCallOptions, \"streamUsage\"> {}\n\n/**\n * Integration with a Google chat model.\n */\nexport abstract class ChatGoogleBase<AuthOptions>\n extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk>\n implements ChatGoogleBaseInput<AuthOptions>\n{\n // Used for tracing, replace with the same name as your class\n static lc_name() {\n return \"ChatGoogle\";\n }\n\n get lc_secrets(): { [key: string]: string } | undefined {\n return {\n authOptions: \"GOOGLE_AUTH_OPTIONS\",\n };\n }\n\n lc_serializable = true;\n\n // Set based on modelName\n model: string;\n\n modelName = \"gemini-pro\";\n\n temperature: number;\n\n maxOutputTokens: number;\n\n maxReasoningTokens: number;\n\n topP: number;\n\n topK: number;\n\n seed: number;\n\n presencePenalty: number;\n\n frequencyPenalty: number;\n\n stopSequences: string[] = [];\n\n logprobs: boolean;\n\n topLogprobs: number = 0;\n\n safetySettings: GoogleAISafetySetting[] = [];\n\n responseModalities?: GoogleAIModelModality[];\n\n // May intentionally be undefined, meaning to compute this.\n convertSystemMessageToHumanContent: boolean | undefined;\n\n safetyHandler: GoogleAISafetyHandler;\n\n speechConfig: GoogleSpeechConfig;\n\n streamUsage = true;\n\n streaming = false;\n\n labels?: Record<string, string>;\n\n protected connection: ChatConnection<AuthOptions>;\n\n protected streamedConnection: ChatConnection<AuthOptions>;\n\n constructor(fields?: ChatGoogleBaseInput<AuthOptions>) {\n super(ensureParams(fields));\n\n copyAndValidateModelParamsInto(fields, this);\n this.safetyHandler =\n fields?.safetyHandler ?? new DefaultGeminiSafetyHandler();\n this.streamUsage = fields?.streamUsage ?? this.streamUsage;\n const client = this.buildClient(fields);\n this.buildConnection(fields ?? {}, client);\n }\n\n getLsParams(options: this[\"ParsedCallOptions\"]): LangSmithParams {\n const params = this.invocationParams(options);\n return {\n ls_provider: \"google_vertexai\",\n ls_model_name: this.model,\n ls_model_type: \"chat\",\n ls_temperature: params.temperature ?? undefined,\n ls_max_tokens: params.maxOutputTokens ?? undefined,\n ls_stop: options.stop,\n };\n }\n\n abstract buildAbstractedClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient;\n\n buildApiKeyClient(apiKey: string): GoogleAbstractedClient {\n return new ApiKeyGoogleAuth(apiKey);\n }\n\n buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined {\n return fields?.apiKey ?? getEnvironmentVariable(\"GOOGLE_API_KEY\");\n }\n\n buildClient(\n fields?: GoogleAIBaseLLMInput<AuthOptions>\n ): GoogleAbstractedClient {\n const apiKey = this.buildApiKey(fields);\n if (apiKey) {\n return this.buildApiKeyClient(apiKey);\n } else {\n return this.buildAbstractedClient(fields);\n }\n }\n\n buildConnection(\n fields: GoogleBaseLLMInput<AuthOptions>,\n client: GoogleAbstractedClient\n ) {\n this.connection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n false\n );\n\n this.streamedConnection = new ChatConnection(\n { ...fields, ...this },\n this.caller,\n client,\n true\n );\n }\n\n get platform(): GooglePlatformType {\n return this.connection.platform;\n }\n\n override bindTools(\n tools: GoogleAIToolType[],\n kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>\n ): Runnable<\n BaseLanguageModelInput,\n AIMessageChunk,\n GoogleAIBaseLanguageModelCallOptions\n > {\n return this.withConfig({ tools: convertToGeminiTools(tools), ...kwargs });\n }\n\n // Replace\n _llmType() {\n return \"chat_integration\";\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n override invocationParams(options?: this[\"ParsedCallOptions\"]) {\n return copyAIModelParams(this, options);\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager: CallbackManagerForLLMRun | undefined\n ): Promise<ChatResult> {\n const parameters = this.invocationParams(options);\n if (this.streaming) {\n const stream = this._streamResponseChunks(messages, options, runManager);\n let finalChunk: ChatGenerationChunk | null = null;\n for await (const chunk of stream) {\n finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk);\n }\n if (!finalChunk) {\n throw new Error(\"No chunks were returned from the stream.\");\n }\n return {\n generations: [finalChunk],\n };\n }\n\n const response = await this.connection.request(\n messages,\n parameters,\n options,\n runManager\n );\n const ret = this.connection.api.responseToChatResult(response);\n const chunk = ret?.generations?.[0];\n if (chunk) {\n await runManager?.handleLLMNewToken(chunk.text || \"\");\n }\n return ret;\n }\n\n async *_streamResponseChunks(\n _messages: BaseMessage[],\n options: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n // Make the call as a streaming request\n const parameters = this.invocationParams(options);\n const response = await this.streamedConnection.request(\n _messages,\n parameters,\n options,\n runManager\n );\n\n // Get the streaming parser of the response\n const stream = response.data as JsonStream;\n let usageMetadata: UsageMetadata | undefined;\n // Loop until the end of the stream\n // During the loop, yield each time we get a chunk from the streaming parser\n // that is either available or added to the queue\n while (!stream.streamDone) {\n const output = await stream.nextChunk();\n await runManager?.handleCustomEvent(\n `google-chunk-${this.constructor.name}`,\n {\n output,\n }\n );\n if (\n output &&\n output.usageMetadata &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n usageMetadata = {\n input_tokens: output.usageMetadata.promptTokenCount,\n output_tokens: output.usageMetadata.candidatesTokenCount,\n total_tokens: output.usageMetadata.totalTokenCount,\n };\n }\n const chunk =\n output !== null\n ? this.connection.api.responseToChatGeneration({ data: output })\n : new ChatGenerationChunk({\n text: \"\",\n generationInfo: { finishReason: \"stop\" },\n message: new AIMessageChunk({\n content: \"\",\n usage_metadata: usageMetadata,\n }),\n });\n if (chunk) {\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text ?? \"\",\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n }\n }\n\n /** @ignore */\n _combineLLMOutput() {\n return [];\n }\n\n /**\n * Return profiling information for the model.\n *\n * Provides information about the model's capabilities and constraints,\n * including token limits, multimodal support, and advanced features like\n * tool calling and structured output.\n *\n * @returns {ModelProfile} An object describing the model's capabilities and constraints\n */\n get profile(): ModelProfile {\n return PROFILES[this.model] ?? {};\n }\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<false>\n ): Runnable<BaseLanguageModelInput, RunOutput>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<true>\n ): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;\n\n withStructuredOutput<\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n RunOutput extends Record<string, any> = Record<string, any>\n >(\n outputSchema:\n | InteropZodType<RunOutput>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n | Record<string, any>,\n config?: StructuredOutputMethodOptions<boolean>\n ):\n | Runnable<BaseLanguageModelInput, RunOutput>\n | Runnable<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n > {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const schema: InteropZodType<RunOutput> | Record<string, any> =\n outputSchema;\n const name = config?.name;\n const method = config?.method;\n const includeRaw = config?.includeRaw;\n if (method === \"jsonMode\") {\n throw new Error(`Google only supports \"functionCalling\" as a method.`);\n }\n\n let functionName = name ?? \"extract\";\n let outputParser: BaseLLMOutputParser<RunOutput>;\n let tools: GeminiTool[];\n if (isInteropZodSchema(schema)) {\n const jsonSchema = schemaToGeminiParameters(schema);\n tools = [\n {\n functionDeclarations: [\n {\n name: functionName,\n description:\n jsonSchema.description ?? \"A function available to call.\",\n parameters: jsonSchema as GeminiFunctionSchema,\n },\n ],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser({\n returnSingle: true,\n keyName: functionName,\n zodSchema: schema,\n });\n } else {\n let geminiFunctionDefinition: GeminiFunctionDeclaration;\n if (\n typeof schema.name === \"string\" &&\n typeof schema.parameters === \"object\" &&\n schema.parameters != null\n ) {\n geminiFunctionDefinition = schema as GeminiFunctionDeclaration;\n functionName = schema.name;\n } else {\n // We are providing the schema for *just* the parameters, probably\n const parameters: GeminiJsonSchema = removeAdditionalProperties(schema);\n geminiFunctionDefinition = {\n name: functionName,\n description: schema.description ?? \"\",\n parameters,\n };\n }\n tools = [\n {\n functionDeclarations: [geminiFunctionDefinition],\n },\n ];\n outputParser = new JsonOutputKeyToolsParser<RunOutput>({\n returnSingle: true,\n keyName: functionName,\n });\n }\n const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });\n\n if (!includeRaw) {\n return llm.pipe(outputParser).withConfig({\n runName: \"ChatGoogleStructuredOutput\",\n }) as Runnable<BaseLanguageModelInput, RunOutput>;\n }\n\n const parserAssign = RunnablePassthrough.assign({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parsed: (input: any, config) => outputParser.invoke(input.raw, config),\n });\n const parserNone = RunnablePassthrough.assign({\n parsed: () => null,\n });\n const parsedWithFallback = parserAssign.withFallbacks({\n fallbacks: [parserNone],\n });\n return RunnableSequence.from<\n BaseLanguageModelInput,\n { raw: BaseMessage; parsed: RunOutput }\n >([\n {\n raw: llm,\n },\n parsedWithFallback,\n ]).withConfig({\n runName: \"StructuredOutputRunnable\",\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAqEA,IAAa,iBAAb,cAAiD,4BAG/C;CACA;CAEA,YACEA,QACAC,QACAC,QACAC,WACA;EACA,MAAM,QAAQ,QAAQ,QAAQ,UAAU;EACxC,KAAK,qCACH,QAAQ;CACX;CAED,IAAI,uBAAgC;AAClC,SAAO,OAAO,KAAK,uCAAuC,YACtD,CAAC,KAAK,qCACN,KAAK;CACV;CAED,IAAI,8BAAuC;AAIzC,MAAI,KAAK,gBAAgB,OACvB,QAAO;WACE,KAAK,cAAc,qBAC5B,QAAO;WACE,KAAK,UAAU,WAAW,oBAAoB,CACvD,QAAO;WACE,KAAK,UAAU,WAAW,wBAAwB,CAC3D,QAAO;WACE,KAAK,cAAc,gBAAgB,KAAK,aAAa,MAE9D,QAAO;WACE,KAAK,gBAAgB,QAG9B,QAAO;AAET,SAAO;CACR;CAED,6CAGE;AACA,MAAI,KAAK,UAAU,WAAW,aAAa,CACzC,QAAO;WACE,KAAK,UAAU,WAAW,aAAa,CAChD,QAAO;MAEP,QAAO;CAEV;CAED,kCACEC,WACwC;EACxC,MAAM,MAAM,UAAU;AACtB,MAAI,QAAQ,UAAa,QAAQ,KAC/B,QAAO,KAAK,4CAA4C;MAExD,QAAO;CAEV;CAED,iBAA8B;EAC5B,MAAMA,YACH,KAAK,aAAiC,CAAE;EAC3C,MAAM,6BACJ,KAAK,kCAAkC,UAAU;EACnD,MAAMC,eAAgC;GACpC,sBAAsB,KAAK;GAC3B;GACA,GAAG;EACJ;AACD,SAAO,aAAa,aAAa;CAClC;CAED,IAAI,MAAmB;AACrB,UAAQ,KAAK,SAAb;GACE,KAAK,SACH,QAAO,KAAK,gBAAgB;GAC9B,QACE,QAAO,MAAM;EAChB;CACF;AACF;;;;AAgBD,IAAsB,iBAAtB,cACU,cAEV;CAEE,OAAO,UAAU;AACf,SAAO;CACR;CAED,IAAI,aAAoD;AACtD,SAAO,EACL,aAAa,sBACd;CACF;CAED,kBAAkB;CAGlB;CAEA,YAAY;CAEZ;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA;CAEA,gBAA0B,CAAE;CAE5B;CAEA,cAAsB;CAEtB,iBAA0C,CAAE;CAE5C;CAGA;CAEA;CAEA;CAEA,cAAc;CAEd,YAAY;CAEZ;CAEA,AAAU;CAEV,AAAU;CAEV,YAAYC,QAA2C;EACrD,MAAM,aAAa,OAAO,CAAC;EAE3B,+BAA+B,QAAQ,KAAK;EAC5C,KAAK,gBACH,QAAQ,iBAAiB,IAAI;EAC/B,KAAK,cAAc,QAAQ,eAAe,KAAK;EAC/C,MAAM,SAAS,KAAK,YAAY,OAAO;EACvC,KAAK,gBAAgB,UAAU,CAAE,GAAE,OAAO;CAC3C;CAED,YAAYC,SAAqD;EAC/D,MAAM,SAAS,KAAK,iBAAiB,QAAQ;AAC7C,SAAO;GACL,aAAa;GACb,eAAe,KAAK;GACpB,eAAe;GACf,gBAAgB,OAAO,eAAe;GACtC,eAAe,OAAO,mBAAmB;GACzC,SAAS,QAAQ;EAClB;CACF;CAMD,kBAAkBC,QAAwC;AACxD,SAAO,IAAI,iBAAiB;CAC7B;CAED,YAAYC,QAAgE;AAC1E,SAAO,QAAQ,UAAU,uBAAuB,iBAAiB;CAClE;CAED,YACEA,QACwB;EACxB,MAAM,SAAS,KAAK,YAAY,OAAO;AACvC,MAAI,OACF,QAAO,KAAK,kBAAkB,OAAO;MAErC,QAAO,KAAK,sBAAsB,OAAO;CAE5C;CAED,gBACEC,QACAR,QACA;EACA,KAAK,aAAa,IAAI,eACpB;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;EAGF,KAAK,qBAAqB,IAAI,eAC5B;GAAE,GAAG;GAAQ,GAAG;EAAM,GACtB,KAAK,QACL,QACA;CAEH;CAED,IAAI,WAA+B;AACjC,SAAO,KAAK,WAAW;CACxB;CAED,AAAS,UACPS,OACAC,QAKA;AACA,SAAO,KAAK,WAAW;GAAE,OAAO,qBAAqB,MAAM;GAAE,GAAG;EAAQ,EAAC;CAC1E;CAGD,WAAW;AACT,SAAO;CACR;;;;CAKD,AAAS,iBAAiBC,SAAqC;AAC7D,SAAO,kBAAkB,MAAM,QAAQ;CACxC;CAED,MAAM,UACJC,UACAP,SACAQ,YACqB;EACrB,MAAM,aAAa,KAAK,iBAAiB,QAAQ;AACjD,MAAI,KAAK,WAAW;GAClB,MAAM,SAAS,KAAK,sBAAsB,UAAU,SAAS,WAAW;GACxE,IAAIC,aAAyC;AAC7C,cAAW,MAAMC,WAAS,QACxB,aAAa,CAAC,aAAaA,UAAQ,OAAO,YAAYA,QAAM;AAE9D,OAAI,CAAC,WACH,OAAM,IAAI,MAAM;AAElB,UAAO,EACL,aAAa,CAAC,UAAW,EAC1B;EACF;EAED,MAAM,WAAW,MAAM,KAAK,WAAW,QACrC,UACA,YACA,SACA,WACD;EACD,MAAM,MAAM,KAAK,WAAW,IAAI,qBAAqB,SAAS;EAC9D,MAAM,QAAQ,KAAK,cAAc;AACjC,MAAI,OACF,MAAM,YAAY,kBAAkB,MAAM,QAAQ,GAAG;AAEvD,SAAO;CACR;CAED,OAAO,sBACLC,WACAX,SACAY,YACqC;EAErC,MAAM,aAAa,KAAK,iBAAiB,QAAQ;EACjD,MAAM,WAAW,MAAM,KAAK,mBAAmB,QAC7C,WACA,YACA,SACA,WACD;EAGD,MAAM,SAAS,SAAS;EACxB,IAAIC;AAIJ,SAAO,CAAC,OAAO,YAAY;GACzB,MAAM,SAAS,MAAM,OAAO,WAAW;GACvC,MAAM,YAAY,kBAChB,CAAC,aAAa,EAAE,KAAK,YAAY,MAAM,EACvC,EACE,OACD,EACF;AACD,OACE,UACA,OAAO,iBACP,KAAK,gBAAgB,SACrB,QAAQ,gBAAgB,OAExB,gBAAgB;IACd,cAAc,OAAO,cAAc;IACnC,eAAe,OAAO,cAAc;IACpC,cAAc,OAAO,cAAc;GACpC;GAEH,MAAM,QACJ,WAAW,OACP,KAAK,WAAW,IAAI,yBAAyB,EAAE,MAAM,OAAQ,EAAC,GAC9D,IAAI,oBAAoB;IACtB,MAAM;IACN,gBAAgB,EAAE,cAAc,OAAQ;IACxC,SAAS,IAAI,eAAe;KAC1B,SAAS;KACT,gBAAgB;IACjB;GACF;AACP,OAAI,OAAO;IACT,MAAM;IACN,MAAM,YAAY,kBAChB,MAAM,QAAQ,IACd,QACA,QACA,QACA,QACA,EAAE,MAAO,EACV;GACF;EACF;CACF;;CAGD,oBAAoB;AAClB,SAAO,CAAE;CACV;;;;;;;;;;CAWD,IAAI,UAAwB;AAC1B,SAAOC,iBAAS,KAAK,UAAU,CAAE;CAClC;CAwBD,qBAIEC,cAIAC,QAMI;EAEJ,MAAMC,SACJ;EACF,MAAM,OAAO,QAAQ;EACrB,MAAM,SAAS,QAAQ;EACvB,MAAM,aAAa,QAAQ;AAC3B,MAAI,WAAW,WACb,OAAM,IAAI,MAAM,CAAC,mDAAmD,CAAC;EAGvE,IAAI,eAAe,QAAQ;EAC3B,IAAIC;EACJ,IAAIC;AACJ,MAAI,mBAAmB,OAAO,EAAE;GAC9B,MAAM,aAAa,yBAAyB,OAAO;GACnD,QAAQ,CACN,EACE,sBAAsB,CACpB;IACE,MAAM;IACN,aACE,WAAW,eAAe;IAC5B,YAAY;GACb,CACF,EACF,CACF;GACD,eAAe,IAAI,yBAAyB;IAC1C,cAAc;IACd,SAAS;IACT,WAAW;GACZ;EACF,OAAM;GACL,IAAIC;AACJ,OACE,OAAO,OAAO,SAAS,YACvB,OAAO,OAAO,eAAe,YAC7B,OAAO,cAAc,MACrB;IACA,2BAA2B;IAC3B,eAAe,OAAO;GACvB,OAAM;IAEL,MAAMC,aAA+B,2BAA2B,OAAO;IACvE,2BAA2B;KACzB,MAAM;KACN,aAAa,OAAO,eAAe;KACnC;IACD;GACF;GACD,QAAQ,CACN,EACE,sBAAsB,CAAC,wBAAyB,EACjD,CACF;GACD,eAAe,IAAI,yBAAoC;IACrD,cAAc;IACd,SAAS;GACV;EACF;EACD,MAAM,MAAM,KAAK,UAAU,MAAM,CAAC,WAAW,EAAE,aAAa,aAAc,EAAC;AAE3E,MAAI,CAAC,WACH,QAAO,IAAI,KAAK,aAAa,CAAC,WAAW,EACvC,SAAS,6BACV,EAAC;EAGJ,MAAM,eAAe,oBAAoB,OAAO,EAE9C,QAAQ,CAACC,OAAYC,aAAW,aAAa,OAAO,MAAM,KAAKA,SAAO,CACvE,EAAC;EACF,MAAM,aAAa,oBAAoB,OAAO,EAC5C,QAAQ,MAAM,KACf,EAAC;EACF,MAAM,qBAAqB,aAAa,cAAc,EACpD,WAAW,CAAC,UAAW,EACxB,EAAC;AACF,SAAO,iBAAiB,KAGtB,CACA,EACE,KAAK,IACN,GACD,kBACD,EAAC,CAAC,WAAW,EACZ,SAAS,2BACV,EAAC;CACH;AACF"}
@@ -0,0 +1,219 @@
1
+
2
+ //#region src/profiles.ts
3
+ const PROFILES = {
4
+ "gemini-embedding-001": {
5
+ maxInputTokens: 2048,
6
+ imageInputs: false,
7
+ audioInputs: false,
8
+ pdfInputs: false,
9
+ videoInputs: false,
10
+ maxOutputTokens: 3072,
11
+ reasoningOutput: false,
12
+ imageOutputs: false,
13
+ audioOutputs: false,
14
+ videoOutputs: false,
15
+ toolCalling: false,
16
+ structuredOutput: false
17
+ },
18
+ "gemini-2.5-flash-preview-05-20": {
19
+ maxInputTokens: 1048576,
20
+ imageInputs: true,
21
+ audioInputs: true,
22
+ pdfInputs: true,
23
+ videoInputs: true,
24
+ maxOutputTokens: 65536,
25
+ reasoningOutput: true,
26
+ imageOutputs: false,
27
+ audioOutputs: false,
28
+ videoOutputs: false,
29
+ toolCalling: true,
30
+ structuredOutput: false
31
+ },
32
+ "gemini-flash-lite-latest": {
33
+ maxInputTokens: 1048576,
34
+ imageInputs: true,
35
+ audioInputs: true,
36
+ pdfInputs: true,
37
+ videoInputs: true,
38
+ maxOutputTokens: 65536,
39
+ reasoningOutput: true,
40
+ imageOutputs: false,
41
+ audioOutputs: false,
42
+ videoOutputs: false,
43
+ toolCalling: true,
44
+ structuredOutput: false
45
+ },
46
+ "gemini-2.5-flash": {
47
+ maxInputTokens: 1048576,
48
+ imageInputs: true,
49
+ audioInputs: true,
50
+ pdfInputs: true,
51
+ videoInputs: true,
52
+ maxOutputTokens: 65536,
53
+ reasoningOutput: true,
54
+ imageOutputs: false,
55
+ audioOutputs: false,
56
+ videoOutputs: false,
57
+ toolCalling: true,
58
+ structuredOutput: false
59
+ },
60
+ "gemini-flash-latest": {
61
+ maxInputTokens: 1048576,
62
+ imageInputs: true,
63
+ audioInputs: true,
64
+ pdfInputs: true,
65
+ videoInputs: true,
66
+ maxOutputTokens: 65536,
67
+ reasoningOutput: true,
68
+ imageOutputs: false,
69
+ audioOutputs: false,
70
+ videoOutputs: false,
71
+ toolCalling: true,
72
+ structuredOutput: false
73
+ },
74
+ "gemini-2.5-pro-preview-05-06": {
75
+ maxInputTokens: 1048576,
76
+ imageInputs: true,
77
+ audioInputs: true,
78
+ pdfInputs: true,
79
+ videoInputs: true,
80
+ maxOutputTokens: 65536,
81
+ reasoningOutput: true,
82
+ imageOutputs: false,
83
+ audioOutputs: false,
84
+ videoOutputs: false,
85
+ toolCalling: true,
86
+ structuredOutput: false
87
+ },
88
+ "gemini-2.0-flash-lite": {
89
+ maxInputTokens: 1048576,
90
+ imageInputs: true,
91
+ audioInputs: true,
92
+ pdfInputs: true,
93
+ videoInputs: true,
94
+ maxOutputTokens: 8192,
95
+ reasoningOutput: false,
96
+ imageOutputs: false,
97
+ audioOutputs: false,
98
+ videoOutputs: false,
99
+ toolCalling: true,
100
+ structuredOutput: false
101
+ },
102
+ "gemini-2.0-flash": {
103
+ maxInputTokens: 1048576,
104
+ imageInputs: true,
105
+ audioInputs: true,
106
+ pdfInputs: true,
107
+ videoInputs: true,
108
+ maxOutputTokens: 8192,
109
+ reasoningOutput: false,
110
+ imageOutputs: false,
111
+ audioOutputs: false,
112
+ videoOutputs: false,
113
+ toolCalling: true,
114
+ structuredOutput: false
115
+ },
116
+ "gemini-2.5-flash-lite": {
117
+ maxInputTokens: 1048576,
118
+ imageInputs: true,
119
+ audioInputs: true,
120
+ pdfInputs: true,
121
+ videoInputs: true,
122
+ maxOutputTokens: 65536,
123
+ reasoningOutput: true,
124
+ imageOutputs: false,
125
+ audioOutputs: false,
126
+ videoOutputs: false,
127
+ toolCalling: true,
128
+ structuredOutput: false
129
+ },
130
+ "gemini-2.5-pro-preview-06-05": {
131
+ maxInputTokens: 1048576,
132
+ imageInputs: true,
133
+ audioInputs: true,
134
+ pdfInputs: true,
135
+ videoInputs: true,
136
+ maxOutputTokens: 65536,
137
+ reasoningOutput: true,
138
+ imageOutputs: false,
139
+ audioOutputs: false,
140
+ videoOutputs: false,
141
+ toolCalling: true,
142
+ structuredOutput: false
143
+ },
144
+ "gemini-2.5-flash-lite-preview-06-17": {
145
+ maxInputTokens: 65536,
146
+ imageInputs: true,
147
+ audioInputs: true,
148
+ pdfInputs: true,
149
+ videoInputs: true,
150
+ maxOutputTokens: 65536,
151
+ reasoningOutput: true,
152
+ imageOutputs: false,
153
+ audioOutputs: false,
154
+ videoOutputs: false,
155
+ toolCalling: true,
156
+ structuredOutput: false
157
+ },
158
+ "gemini-2.5-flash-preview-09-2025": {
159
+ maxInputTokens: 1048576,
160
+ imageInputs: true,
161
+ audioInputs: true,
162
+ pdfInputs: true,
163
+ videoInputs: true,
164
+ maxOutputTokens: 65536,
165
+ reasoningOutput: true,
166
+ imageOutputs: false,
167
+ audioOutputs: false,
168
+ videoOutputs: false,
169
+ toolCalling: true,
170
+ structuredOutput: false
171
+ },
172
+ "gemini-2.5-flash-preview-04-17": {
173
+ maxInputTokens: 1048576,
174
+ imageInputs: true,
175
+ audioInputs: true,
176
+ pdfInputs: true,
177
+ videoInputs: true,
178
+ maxOutputTokens: 65536,
179
+ reasoningOutput: true,
180
+ imageOutputs: false,
181
+ audioOutputs: false,
182
+ videoOutputs: false,
183
+ toolCalling: true,
184
+ structuredOutput: false
185
+ },
186
+ "gemini-2.5-pro": {
187
+ maxInputTokens: 1048576,
188
+ imageInputs: true,
189
+ audioInputs: true,
190
+ pdfInputs: true,
191
+ videoInputs: true,
192
+ maxOutputTokens: 65536,
193
+ reasoningOutput: true,
194
+ imageOutputs: false,
195
+ audioOutputs: false,
196
+ videoOutputs: false,
197
+ toolCalling: true,
198
+ structuredOutput: false
199
+ },
200
+ "gemini-2.5-flash-lite-preview-09-2025": {
201
+ maxInputTokens: 1048576,
202
+ imageInputs: true,
203
+ audioInputs: true,
204
+ pdfInputs: true,
205
+ videoInputs: true,
206
+ maxOutputTokens: 65536,
207
+ reasoningOutput: true,
208
+ imageOutputs: false,
209
+ audioOutputs: false,
210
+ videoOutputs: false,
211
+ toolCalling: true,
212
+ structuredOutput: false
213
+ }
214
+ };
215
+ var profiles_default = PROFILES;
216
+
217
+ //#endregion
218
+ exports.default = profiles_default;
219
+ //# sourceMappingURL=profiles.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"profiles.cjs","names":["PROFILES: Record<string, ModelProfile>"],"sources":["../src/profiles.ts"],"sourcesContent":["/**\n * This file was automatically generated by an automated script. Do not edit manually.\n */\nimport type { ModelProfile } from \"@langchain/core/language_models/profile\";\nconst PROFILES: Record<string, ModelProfile> = {\n \"gemini-embedding-001\": {\n maxInputTokens: 2048,\n imageInputs: false,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 3072,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-05-20\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-flash-lite-latest\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-flash-latest\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro-preview-05-06\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.0-flash-lite\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.0-flash\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro-preview-06-05\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite-preview-06-17\": {\n maxInputTokens: 65536,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-09-2025\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-04-17\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite-preview-09-2025\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n};\nexport default PROFILES;\n"],"mappings":";;AAIA,MAAMA,WAAyC;CAC7C,wBAAwB;EACtB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,4BAA4B;EAC1B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uBAAuB;EACrB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uCAAuC;EACrC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oCAAoC;EAClC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kBAAkB;EAChB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yCAAyC;EACvC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;AACF;AACD,uBAAe"}
@@ -0,0 +1,218 @@
1
+ //#region src/profiles.ts
2
+ const PROFILES = {
3
+ "gemini-embedding-001": {
4
+ maxInputTokens: 2048,
5
+ imageInputs: false,
6
+ audioInputs: false,
7
+ pdfInputs: false,
8
+ videoInputs: false,
9
+ maxOutputTokens: 3072,
10
+ reasoningOutput: false,
11
+ imageOutputs: false,
12
+ audioOutputs: false,
13
+ videoOutputs: false,
14
+ toolCalling: false,
15
+ structuredOutput: false
16
+ },
17
+ "gemini-2.5-flash-preview-05-20": {
18
+ maxInputTokens: 1048576,
19
+ imageInputs: true,
20
+ audioInputs: true,
21
+ pdfInputs: true,
22
+ videoInputs: true,
23
+ maxOutputTokens: 65536,
24
+ reasoningOutput: true,
25
+ imageOutputs: false,
26
+ audioOutputs: false,
27
+ videoOutputs: false,
28
+ toolCalling: true,
29
+ structuredOutput: false
30
+ },
31
+ "gemini-flash-lite-latest": {
32
+ maxInputTokens: 1048576,
33
+ imageInputs: true,
34
+ audioInputs: true,
35
+ pdfInputs: true,
36
+ videoInputs: true,
37
+ maxOutputTokens: 65536,
38
+ reasoningOutput: true,
39
+ imageOutputs: false,
40
+ audioOutputs: false,
41
+ videoOutputs: false,
42
+ toolCalling: true,
43
+ structuredOutput: false
44
+ },
45
+ "gemini-2.5-flash": {
46
+ maxInputTokens: 1048576,
47
+ imageInputs: true,
48
+ audioInputs: true,
49
+ pdfInputs: true,
50
+ videoInputs: true,
51
+ maxOutputTokens: 65536,
52
+ reasoningOutput: true,
53
+ imageOutputs: false,
54
+ audioOutputs: false,
55
+ videoOutputs: false,
56
+ toolCalling: true,
57
+ structuredOutput: false
58
+ },
59
+ "gemini-flash-latest": {
60
+ maxInputTokens: 1048576,
61
+ imageInputs: true,
62
+ audioInputs: true,
63
+ pdfInputs: true,
64
+ videoInputs: true,
65
+ maxOutputTokens: 65536,
66
+ reasoningOutput: true,
67
+ imageOutputs: false,
68
+ audioOutputs: false,
69
+ videoOutputs: false,
70
+ toolCalling: true,
71
+ structuredOutput: false
72
+ },
73
+ "gemini-2.5-pro-preview-05-06": {
74
+ maxInputTokens: 1048576,
75
+ imageInputs: true,
76
+ audioInputs: true,
77
+ pdfInputs: true,
78
+ videoInputs: true,
79
+ maxOutputTokens: 65536,
80
+ reasoningOutput: true,
81
+ imageOutputs: false,
82
+ audioOutputs: false,
83
+ videoOutputs: false,
84
+ toolCalling: true,
85
+ structuredOutput: false
86
+ },
87
+ "gemini-2.0-flash-lite": {
88
+ maxInputTokens: 1048576,
89
+ imageInputs: true,
90
+ audioInputs: true,
91
+ pdfInputs: true,
92
+ videoInputs: true,
93
+ maxOutputTokens: 8192,
94
+ reasoningOutput: false,
95
+ imageOutputs: false,
96
+ audioOutputs: false,
97
+ videoOutputs: false,
98
+ toolCalling: true,
99
+ structuredOutput: false
100
+ },
101
+ "gemini-2.0-flash": {
102
+ maxInputTokens: 1048576,
103
+ imageInputs: true,
104
+ audioInputs: true,
105
+ pdfInputs: true,
106
+ videoInputs: true,
107
+ maxOutputTokens: 8192,
108
+ reasoningOutput: false,
109
+ imageOutputs: false,
110
+ audioOutputs: false,
111
+ videoOutputs: false,
112
+ toolCalling: true,
113
+ structuredOutput: false
114
+ },
115
+ "gemini-2.5-flash-lite": {
116
+ maxInputTokens: 1048576,
117
+ imageInputs: true,
118
+ audioInputs: true,
119
+ pdfInputs: true,
120
+ videoInputs: true,
121
+ maxOutputTokens: 65536,
122
+ reasoningOutput: true,
123
+ imageOutputs: false,
124
+ audioOutputs: false,
125
+ videoOutputs: false,
126
+ toolCalling: true,
127
+ structuredOutput: false
128
+ },
129
+ "gemini-2.5-pro-preview-06-05": {
130
+ maxInputTokens: 1048576,
131
+ imageInputs: true,
132
+ audioInputs: true,
133
+ pdfInputs: true,
134
+ videoInputs: true,
135
+ maxOutputTokens: 65536,
136
+ reasoningOutput: true,
137
+ imageOutputs: false,
138
+ audioOutputs: false,
139
+ videoOutputs: false,
140
+ toolCalling: true,
141
+ structuredOutput: false
142
+ },
143
+ "gemini-2.5-flash-lite-preview-06-17": {
144
+ maxInputTokens: 65536,
145
+ imageInputs: true,
146
+ audioInputs: true,
147
+ pdfInputs: true,
148
+ videoInputs: true,
149
+ maxOutputTokens: 65536,
150
+ reasoningOutput: true,
151
+ imageOutputs: false,
152
+ audioOutputs: false,
153
+ videoOutputs: false,
154
+ toolCalling: true,
155
+ structuredOutput: false
156
+ },
157
+ "gemini-2.5-flash-preview-09-2025": {
158
+ maxInputTokens: 1048576,
159
+ imageInputs: true,
160
+ audioInputs: true,
161
+ pdfInputs: true,
162
+ videoInputs: true,
163
+ maxOutputTokens: 65536,
164
+ reasoningOutput: true,
165
+ imageOutputs: false,
166
+ audioOutputs: false,
167
+ videoOutputs: false,
168
+ toolCalling: true,
169
+ structuredOutput: false
170
+ },
171
+ "gemini-2.5-flash-preview-04-17": {
172
+ maxInputTokens: 1048576,
173
+ imageInputs: true,
174
+ audioInputs: true,
175
+ pdfInputs: true,
176
+ videoInputs: true,
177
+ maxOutputTokens: 65536,
178
+ reasoningOutput: true,
179
+ imageOutputs: false,
180
+ audioOutputs: false,
181
+ videoOutputs: false,
182
+ toolCalling: true,
183
+ structuredOutput: false
184
+ },
185
+ "gemini-2.5-pro": {
186
+ maxInputTokens: 1048576,
187
+ imageInputs: true,
188
+ audioInputs: true,
189
+ pdfInputs: true,
190
+ videoInputs: true,
191
+ maxOutputTokens: 65536,
192
+ reasoningOutput: true,
193
+ imageOutputs: false,
194
+ audioOutputs: false,
195
+ videoOutputs: false,
196
+ toolCalling: true,
197
+ structuredOutput: false
198
+ },
199
+ "gemini-2.5-flash-lite-preview-09-2025": {
200
+ maxInputTokens: 1048576,
201
+ imageInputs: true,
202
+ audioInputs: true,
203
+ pdfInputs: true,
204
+ videoInputs: true,
205
+ maxOutputTokens: 65536,
206
+ reasoningOutput: true,
207
+ imageOutputs: false,
208
+ audioOutputs: false,
209
+ videoOutputs: false,
210
+ toolCalling: true,
211
+ structuredOutput: false
212
+ }
213
+ };
214
+ var profiles_default = PROFILES;
215
+
216
+ //#endregion
217
+ export { profiles_default as default };
218
+ //# sourceMappingURL=profiles.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"profiles.js","names":["PROFILES: Record<string, ModelProfile>"],"sources":["../src/profiles.ts"],"sourcesContent":["/**\n * This file was automatically generated by an automated script. Do not edit manually.\n */\nimport type { ModelProfile } from \"@langchain/core/language_models/profile\";\nconst PROFILES: Record<string, ModelProfile> = {\n \"gemini-embedding-001\": {\n maxInputTokens: 2048,\n imageInputs: false,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 3072,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-05-20\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-flash-lite-latest\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-flash-latest\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro-preview-05-06\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.0-flash-lite\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.0-flash\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro-preview-06-05\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite-preview-06-17\": {\n maxInputTokens: 65536,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-09-2025\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-04-17\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite-preview-09-2025\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n};\nexport default PROFILES;\n"],"mappings":";AAIA,MAAMA,WAAyC;CAC7C,wBAAwB;EACtB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,4BAA4B;EAC1B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uBAAuB;EACrB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uCAAuC;EACrC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oCAAoC;EAClC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kBAAkB;EAChB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yCAAyC;EACvC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;AACF;AACD,uBAAe"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/google-common",
3
- "version": "1.0.0",
3
+ "version": "1.0.1",
4
4
  "description": "Core types and classes for Google services.",
5
5
  "type": "module",
6
6
  "engines": {
@@ -36,7 +36,7 @@
36
36
  "typescript": "~5.8.3",
37
37
  "zod": "^3.25.76",
38
38
  "@langchain/eslint": "0.1.0",
39
- "@langchain/core": "1.0.0"
39
+ "@langchain/core": "1.0.5"
40
40
  },
41
41
  "publishConfig": {
42
42
  "access": "public"
@@ -108,7 +108,8 @@
108
108
  "LICENSE"
109
109
  ],
110
110
  "scripts": {
111
- "build": "pnpm --filter @langchain/build compile @langchain/google-common",
111
+ "build": "turbo build:compile --filter @langchain/google-common",
112
+ "build:compile": "pnpm --filter @langchain/build compile @langchain/google-common",
112
113
  "lint:eslint": "eslint --cache src/",
113
114
  "lint:dpdm": "dpdm --skip-dynamic-imports circular --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts",
114
115
  "lint": "pnpm lint:eslint && pnpm lint:dpdm",
@@ -119,6 +120,8 @@
119
120
  "test:single": "NODE_OPTIONS=--experimental-vm-modules pnpm run jest --config jest.config.cjs --testTimeout 100000",
120
121
  "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%",
121
122
  "format": "prettier --config .prettierrc --write \"src\"",
122
- "format:check": "prettier --config .prettierrc --check \"src\""
123
+ "format:check": "prettier --config .prettierrc --check \"src\"",
124
+ "typegen": "pnpm run typegen:profiles",
125
+ "typegen:profiles": "pnpm --filter @langchain/model-profiles make --config profiles.toml"
123
126
  }
124
127
  }