@langchain/google-common 0.2.1 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -51,6 +51,11 @@ class ChatConnection extends connection_js_1.AbstractGoogleLLMConnection {
51
51
  // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001
52
52
  return false;
53
53
  }
54
+ else if (this.modelFamily === "gemma") {
55
+ // At least as of 12 Mar 2025 gemma 3 on AIS, trying to use system instructions yields an error:
56
+ // "Developer instruction is not enabled for models/gemma-3-27b-it"
57
+ return false;
58
+ }
54
59
  return true;
55
60
  }
56
61
  computeGoogleSearchToolAdjustmentFromModel() {
@@ -187,6 +192,12 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
187
192
  writable: true,
188
193
  value: []
189
194
  });
195
+ Object.defineProperty(this, "responseModalities", {
196
+ enumerable: true,
197
+ configurable: true,
198
+ writable: true,
199
+ value: void 0
200
+ });
190
201
  // May intentionally be undefined, meaning to compute this.
191
202
  Object.defineProperty(this, "convertSystemMessageToHumanContent", {
192
203
  enumerable: true,
@@ -10,7 +10,7 @@ import { AsyncCaller } from "@langchain/core/utils/async_caller";
10
10
  import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GoogleAIBaseLanguageModelCallOptions, GoogleAIAPI, GoogleAIAPIParams, GoogleSearchToolSetting } from "./types.js";
11
11
  import { AbstractGoogleLLMConnection } from "./connection.js";
12
12
  import { GoogleAbstractedClient } from "./auth.js";
13
- import type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType, GeminiAPIConfig } from "./types.js";
13
+ import type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType, GeminiAPIConfig, GoogleAIModelModality } from "./types.js";
14
14
  export declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {
15
15
  convertSystemMessageToHumanContent: boolean | undefined;
16
16
  constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);
@@ -47,6 +47,7 @@ export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<
47
47
  logprobs: boolean;
48
48
  topLogprobs: number;
49
49
  safetySettings: GoogleAISafetySetting[];
50
+ responseModalities?: GoogleAIModelModality[];
50
51
  convertSystemMessageToHumanContent: boolean | undefined;
51
52
  safetyHandler: GoogleAISafetyHandler;
52
53
  streamUsage: boolean;
@@ -48,6 +48,11 @@ export class ChatConnection extends AbstractGoogleLLMConnection {
48
48
  // on AI Studio gemini-pro is still pointing at gemini-1.0-pro-001
49
49
  return false;
50
50
  }
51
+ else if (this.modelFamily === "gemma") {
52
+ // At least as of 12 Mar 2025 gemma 3 on AIS, trying to use system instructions yields an error:
53
+ // "Developer instruction is not enabled for models/gemma-3-27b-it"
54
+ return false;
55
+ }
51
56
  return true;
52
57
  }
53
58
  computeGoogleSearchToolAdjustmentFromModel() {
@@ -183,6 +188,12 @@ export class ChatGoogleBase extends BaseChatModel {
183
188
  writable: true,
184
189
  value: []
185
190
  });
191
+ Object.defineProperty(this, "responseModalities", {
192
+ enumerable: true,
193
+ configurable: true,
194
+ writable: true,
195
+ value: void 0
196
+ });
186
197
  // May intentionally be undefined, meaning to compute this.
187
198
  Object.defineProperty(this, "convertSystemMessageToHumanContent", {
188
199
  enumerable: true,
@@ -216,6 +216,7 @@ class GoogleAIConnection extends GoogleHostConnection {
216
216
  get api() {
217
217
  switch (this.apiName) {
218
218
  case "google":
219
+ case "gemma": // TODO: Is this true?
219
220
  return (0, index_js_1.getGeminiAPI)(this.apiConfig);
220
221
  case "anthropic":
221
222
  return (0, anthropic_js_1.getAnthropicAPI)(this.apiConfig);
@@ -319,6 +320,7 @@ class AbstractGoogleLLMConnection extends GoogleAIConnection {
319
320
  async buildUrlMethod() {
320
321
  switch (this.modelFamily) {
321
322
  case "gemini":
323
+ case "gemma": // TODO: Is this true?
322
324
  return this.buildUrlMethodGemini();
323
325
  case "claude":
324
326
  return this.buildUrlMethodClaude();
@@ -210,6 +210,7 @@ export class GoogleAIConnection extends GoogleHostConnection {
210
210
  get api() {
211
211
  switch (this.apiName) {
212
212
  case "google":
213
+ case "gemma": // TODO: Is this true?
213
214
  return getGeminiAPI(this.apiConfig);
214
215
  case "anthropic":
215
216
  return getAnthropicAPI(this.apiConfig);
@@ -312,6 +313,7 @@ export class AbstractGoogleLLMConnection extends GoogleAIConnection {
312
313
  async buildUrlMethod() {
313
314
  switch (this.modelFamily) {
314
315
  case "gemini":
316
+ case "gemma": // TODO: Is this true?
315
317
  return this.buildUrlMethodGemini();
316
318
  case "claude":
317
319
  return this.buildUrlMethodClaude();
package/dist/types.d.ts CHANGED
@@ -84,6 +84,7 @@ export interface GoogleAISafetySetting {
84
84
  method?: GoogleAISafetyMethod | string;
85
85
  }
86
86
  export type GoogleAIResponseMimeType = "text/plain" | "application/json";
87
+ export type GoogleAIModelModality = "TEXT" | "IMAGE" | "AUDIO" | string;
87
88
  export interface GoogleAIModelParams {
88
89
  /** Model to use */
89
90
  model?: string;
@@ -177,6 +178,10 @@ export interface GoogleAIModelParams {
177
178
  * logprobs must be set to true if this parameter is used.
178
179
  */
179
180
  topLogprobs?: number;
181
+ /**
182
+ * The modalities of the response.
183
+ */
184
+ responseModalities?: GoogleAIModelModality[];
180
185
  }
181
186
  export type GoogleAIToolType = BindToolsInput | GeminiTool;
182
187
  /**
@@ -202,6 +207,16 @@ export interface GoogleAIModelRequestParams extends GoogleAIModelParams {
202
207
  * If empty, any one of the provided functions are called.
203
208
  */
204
209
  allowed_function_names?: string[];
210
+ /**
211
+ * Used to specify a previously created context cache to use with generation.
212
+ * For Vertex, this should be of the form:
213
+ * "projects/PROJECT_NUMBER/locations/LOCATION/cachedContents/CACHE_ID",
214
+ *
215
+ * See these guides for more information on how to use context caching:
216
+ * https://cloud.google.com/vertex-ai/generative-ai/docs/context-cache/context-cache-create
217
+ * https://cloud.google.com/vertex-ai/generative-ai/docs/context-cache/context-cache-use
218
+ */
219
+ cachedContent?: string;
205
220
  }
206
221
  export interface GoogleAIBaseLLMInput<AuthOptions> extends BaseLLMParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, GoogleAIAPIParams {
207
222
  }
@@ -382,6 +397,7 @@ export interface GeminiGenerationConfig {
382
397
  responseMimeType?: GoogleAIResponseMimeType;
383
398
  responseLogprobs?: boolean;
384
399
  logprobs?: number;
400
+ responseModalities?: GoogleAIModelModality[];
385
401
  }
386
402
  export interface GeminiRequest {
387
403
  contents?: GeminiContent[];
@@ -395,6 +411,7 @@ export interface GeminiRequest {
395
411
  };
396
412
  safetySettings?: GeminiSafetySetting[];
397
413
  generationConfig?: GeminiGenerationConfig;
414
+ cachedContent?: string;
398
415
  }
399
416
  export interface GeminiResponseCandidate {
400
417
  content: {
@@ -409,6 +426,7 @@ export interface GeminiResponseCandidate {
409
426
  groundingMetadata?: GeminiGroundingMetadata;
410
427
  avgLogprobs?: number;
411
428
  logprobsResult: GeminiLogprobsResult;
429
+ finishMessage?: string;
412
430
  }
413
431
  interface GeminiResponsePromptFeedback {
414
432
  blockReason?: string;
@@ -419,7 +437,7 @@ export interface GenerateContentResponseData {
419
437
  promptFeedback: GeminiResponsePromptFeedback;
420
438
  usageMetadata: Record<string, unknown>;
421
439
  }
422
- export type GoogleLLMModelFamily = null | "palm" | "gemini";
440
+ export type GoogleLLMModelFamily = null | "palm" | "gemini" | "gemma";
423
441
  export type VertexModelFamily = GoogleLLMModelFamily | "claude";
424
442
  export type GoogleLLMResponseData = JsonStream | GenerateContentResponseData | GenerateContentResponseData[];
425
443
  export interface GoogleLLMResponse extends GoogleResponse {
@@ -128,6 +128,10 @@ function copyAIModelParamsInto(params, options, target) {
128
128
  options?.responseMimeType ??
129
129
  params?.responseMimeType ??
130
130
  target?.responseMimeType;
131
+ ret.responseModalities =
132
+ options?.responseModalities ??
133
+ params?.responseModalities ??
134
+ target?.responseModalities;
131
135
  ret.streaming = options?.streaming ?? params?.streaming ?? target?.streaming;
132
136
  const toolChoice = processToolChoice(options?.tool_choice, options?.allowed_function_names);
133
137
  if (toolChoice) {
@@ -139,6 +143,9 @@ function copyAIModelParamsInto(params, options, target) {
139
143
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
140
144
  ret.tools = convertToGeminiTools(tools);
141
145
  }
146
+ if (options?.cachedContent) {
147
+ ret.cachedContent = options.cachedContent;
148
+ }
142
149
  return ret;
143
150
  }
144
151
  exports.copyAIModelParamsInto = copyAIModelParamsInto;
@@ -149,6 +156,9 @@ function modelToFamily(modelName) {
149
156
  else if ((0, gemini_js_1.isModelGemini)(modelName)) {
150
157
  return "gemini";
151
158
  }
159
+ else if ((0, gemini_js_1.isModelGemma)(modelName)) {
160
+ return "gemma";
161
+ }
152
162
  else if ((0, anthropic_js_1.isModelClaude)(modelName)) {
153
163
  return "claude";
154
164
  }
@@ -161,6 +171,7 @@ function modelToPublisher(modelName) {
161
171
  const family = modelToFamily(modelName);
162
172
  switch (family) {
163
173
  case "gemini":
174
+ case "gemma":
164
175
  case "palm":
165
176
  return "google";
166
177
  case "claude":
@@ -175,6 +186,7 @@ function validateModelParams(params) {
175
186
  const model = testParams.model ?? testParams.modelName;
176
187
  switch (modelToFamily(model)) {
177
188
  case "gemini":
189
+ case "gemma": // TODO: Are we sure?
178
190
  return (0, gemini_js_1.validateGeminiParams)(testParams);
179
191
  case "claude":
180
192
  return (0, anthropic_js_1.validateClaudeParams)(testParams);
@@ -1,6 +1,6 @@
1
1
  import { isOpenAITool } from "@langchain/core/language_models/base";
2
2
  import { isLangChainTool } from "@langchain/core/utils/function_calling";
3
- import { isModelGemini, validateGeminiParams } from "./gemini.js";
3
+ import { isModelGemini, isModelGemma, validateGeminiParams } from "./gemini.js";
4
4
  import { GeminiToolAttributes, } from "../types.js";
5
5
  import { jsonSchemaToGeminiParameters, zodToGeminiParameters, } from "./zod_to_gemini_parameters.js";
6
6
  import { isModelClaude, validateClaudeParams } from "./anthropic.js";
@@ -123,6 +123,10 @@ export function copyAIModelParamsInto(params, options, target) {
123
123
  options?.responseMimeType ??
124
124
  params?.responseMimeType ??
125
125
  target?.responseMimeType;
126
+ ret.responseModalities =
127
+ options?.responseModalities ??
128
+ params?.responseModalities ??
129
+ target?.responseModalities;
126
130
  ret.streaming = options?.streaming ?? params?.streaming ?? target?.streaming;
127
131
  const toolChoice = processToolChoice(options?.tool_choice, options?.allowed_function_names);
128
132
  if (toolChoice) {
@@ -134,6 +138,9 @@ export function copyAIModelParamsInto(params, options, target) {
134
138
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
135
139
  ret.tools = convertToGeminiTools(tools);
136
140
  }
141
+ if (options?.cachedContent) {
142
+ ret.cachedContent = options.cachedContent;
143
+ }
137
144
  return ret;
138
145
  }
139
146
  export function modelToFamily(modelName) {
@@ -143,6 +150,9 @@ export function modelToFamily(modelName) {
143
150
  else if (isModelGemini(modelName)) {
144
151
  return "gemini";
145
152
  }
153
+ else if (isModelGemma(modelName)) {
154
+ return "gemma";
155
+ }
146
156
  else if (isModelClaude(modelName)) {
147
157
  return "claude";
148
158
  }
@@ -154,6 +164,7 @@ export function modelToPublisher(modelName) {
154
164
  const family = modelToFamily(modelName);
155
165
  switch (family) {
156
166
  case "gemini":
167
+ case "gemma":
157
168
  case "palm":
158
169
  return "google";
159
170
  case "claude":
@@ -167,6 +178,7 @@ export function validateModelParams(params) {
167
178
  const model = testParams.model ?? testParams.modelName;
168
179
  switch (modelToFamily(model)) {
169
180
  case "gemini":
181
+ case "gemma": // TODO: Are we sure?
170
182
  return validateGeminiParams(testParams);
171
183
  case "claude":
172
184
  return validateClaudeParams(testParams);
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.isModelGemini = exports.validateGeminiParams = exports.getGeminiAPI = exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = void 0;
3
+ exports.isModelGemma = exports.isModelGemini = exports.validateGeminiParams = exports.getGeminiAPI = exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = void 0;
4
4
  const uuid_1 = require("uuid");
5
5
  const messages_1 = require("@langchain/core/messages");
6
6
  const outputs_1 = require("@langchain/core/outputs");
@@ -535,10 +535,17 @@ function getGeminiAPI(config) {
535
535
  };
536
536
  }
537
537
  function responseToGenerationInfo(response) {
538
- if (!Array.isArray(response.data)) {
538
+ const data =
539
+ // eslint-disable-next-line no-nested-ternary
540
+ Array.isArray(response.data) && response.data[0]
541
+ ? response.data[0]
542
+ : response.data &&
543
+ response.data.candidates
544
+ ? response.data
545
+ : undefined;
546
+ if (!data) {
539
547
  return {};
540
548
  }
541
- const data = response.data[0];
542
549
  return {
543
550
  usage_metadata: {
544
551
  prompt_token_count: data.usageMetadata?.promptTokenCount,
@@ -555,6 +562,7 @@ function getGeminiAPI(config) {
555
562
  citation_metadata: data.candidates[0]?.citationMetadata,
556
563
  grounding_metadata: data.candidates[0]?.groundingMetadata,
557
564
  finish_reason: data.candidates[0]?.finishReason,
565
+ finish_message: data.candidates[0]?.finishMessage,
558
566
  avgLogprobs: data.candidates[0]?.avgLogprobs,
559
567
  logprobs: candidateToLogprobs(data.candidates[0]),
560
568
  };
@@ -933,6 +941,7 @@ function getGeminiAPI(config) {
933
941
  maxOutputTokens: parameters.maxOutputTokens,
934
942
  stopSequences: parameters.stopSequences,
935
943
  responseMimeType: parameters.responseMimeType,
944
+ responseModalities: parameters.responseModalities,
936
945
  };
937
946
  // Add the logprobs if explicitly set
938
947
  if (typeof parameters.logprobs !== "undefined") {
@@ -1083,6 +1092,9 @@ function getGeminiAPI(config) {
1083
1092
  systemInstruction?.parts?.length) {
1084
1093
  ret.systemInstruction = systemInstruction;
1085
1094
  }
1095
+ if (parameters.cachedContent) {
1096
+ ret.cachedContent = parameters.cachedContent;
1097
+ }
1086
1098
  return ret;
1087
1099
  }
1088
1100
  return {
@@ -1117,3 +1129,7 @@ function isModelGemini(modelName) {
1117
1129
  return modelName.toLowerCase().startsWith("gemini");
1118
1130
  }
1119
1131
  exports.isModelGemini = isModelGemini;
1132
+ function isModelGemma(modelName) {
1133
+ return modelName.toLowerCase().startsWith("gemma");
1134
+ }
1135
+ exports.isModelGemma = isModelGemma;
@@ -42,3 +42,4 @@ export declare class MessageGeminiSafetyHandler extends DefaultGeminiSafetyHandl
42
42
  export declare function getGeminiAPI(config?: GeminiAPIConfig): GoogleAIAPI;
43
43
  export declare function validateGeminiParams(params: GoogleAIModelParams): void;
44
44
  export declare function isModelGemini(modelName: string): boolean;
45
+ export declare function isModelGemma(modelName: string): boolean;
@@ -530,10 +530,17 @@ export function getGeminiAPI(config) {
530
530
  };
531
531
  }
532
532
  function responseToGenerationInfo(response) {
533
- if (!Array.isArray(response.data)) {
533
+ const data =
534
+ // eslint-disable-next-line no-nested-ternary
535
+ Array.isArray(response.data) && response.data[0]
536
+ ? response.data[0]
537
+ : response.data &&
538
+ response.data.candidates
539
+ ? response.data
540
+ : undefined;
541
+ if (!data) {
534
542
  return {};
535
543
  }
536
- const data = response.data[0];
537
544
  return {
538
545
  usage_metadata: {
539
546
  prompt_token_count: data.usageMetadata?.promptTokenCount,
@@ -550,6 +557,7 @@ export function getGeminiAPI(config) {
550
557
  citation_metadata: data.candidates[0]?.citationMetadata,
551
558
  grounding_metadata: data.candidates[0]?.groundingMetadata,
552
559
  finish_reason: data.candidates[0]?.finishReason,
560
+ finish_message: data.candidates[0]?.finishMessage,
553
561
  avgLogprobs: data.candidates[0]?.avgLogprobs,
554
562
  logprobs: candidateToLogprobs(data.candidates[0]),
555
563
  };
@@ -928,6 +936,7 @@ export function getGeminiAPI(config) {
928
936
  maxOutputTokens: parameters.maxOutputTokens,
929
937
  stopSequences: parameters.stopSequences,
930
938
  responseMimeType: parameters.responseMimeType,
939
+ responseModalities: parameters.responseModalities,
931
940
  };
932
941
  // Add the logprobs if explicitly set
933
942
  if (typeof parameters.logprobs !== "undefined") {
@@ -1078,6 +1087,9 @@ export function getGeminiAPI(config) {
1078
1087
  systemInstruction?.parts?.length) {
1079
1088
  ret.systemInstruction = systemInstruction;
1080
1089
  }
1090
+ if (parameters.cachedContent) {
1091
+ ret.cachedContent = parameters.cachedContent;
1092
+ }
1081
1093
  return ret;
1082
1094
  }
1083
1095
  return {
@@ -1109,3 +1121,6 @@ export function validateGeminiParams(params) {
1109
1121
  export function isModelGemini(modelName) {
1110
1122
  return modelName.toLowerCase().startsWith("gemini");
1111
1123
  }
1124
+ export function isModelGemma(modelName) {
1125
+ return modelName.toLowerCase().startsWith("gemma");
1126
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/google-common",
3
- "version": "0.2.1",
3
+ "version": "0.2.3",
4
4
  "description": "Core types and classes for Google services.",
5
5
  "type": "module",
6
6
  "engines": {