@langchain/google-common 0.2.15 → 0.2.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -242,6 +242,12 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
242
242
  writable: true,
243
243
  value: false
244
244
  });
245
+ Object.defineProperty(this, "labels", {
246
+ enumerable: true,
247
+ configurable: true,
248
+ writable: true,
249
+ value: void 0
250
+ });
245
251
  Object.defineProperty(this, "connection", {
246
252
  enumerable: true,
247
253
  configurable: true,
@@ -55,6 +55,7 @@ export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<
55
55
  speechConfig: GoogleSpeechConfig;
56
56
  streamUsage: boolean;
57
57
  streaming: boolean;
58
+ labels?: Record<string, string>;
58
59
  protected connection: ChatConnection<AuthOptions>;
59
60
  protected streamedConnection: ChatConnection<AuthOptions>;
60
61
  constructor(fields?: ChatGoogleBaseInput<AuthOptions>);
@@ -238,6 +238,12 @@ export class ChatGoogleBase extends BaseChatModel {
238
238
  writable: true,
239
239
  value: false
240
240
  });
241
+ Object.defineProperty(this, "labels", {
242
+ enumerable: true,
243
+ configurable: true,
244
+ writable: true,
245
+ value: void 0
246
+ });
241
247
  Object.defineProperty(this, "connection", {
242
248
  enumerable: true,
243
249
  configurable: true,
@@ -282,12 +282,7 @@ class GoogleAIConnection extends GoogleHostConnection {
282
282
  get computedLocation() {
283
283
  switch (this.apiName) {
284
284
  case "google":
285
- if (this.modelName.startsWith("gemini-2.5-flash-lite")) {
286
- return "global";
287
- }
288
- else {
289
- return super.computedLocation;
290
- }
285
+ return super.computedLocation;
291
286
  case "anthropic":
292
287
  return "us-east5";
293
288
  default:
@@ -375,7 +370,13 @@ class AbstractGoogleLLMConnection extends GoogleAIConnection {
375
370
  }
376
371
  }
377
372
  async formatData(input, parameters) {
378
- return this.api.formatData(input, parameters);
373
+ // Filter out labels for non-Vertex AI platforms (labels are only supported on Vertex AI)
374
+ let filteredParameters = parameters;
375
+ if (parameters.labels && this.platform !== "gcp") {
376
+ const { labels, ...paramsWithoutLabels } = parameters;
377
+ filteredParameters = paramsWithoutLabels;
378
+ }
379
+ return this.api.formatData(input, filteredParameters);
379
380
  }
380
381
  }
381
382
  exports.AbstractGoogleLLMConnection = AbstractGoogleLLMConnection;
@@ -276,12 +276,7 @@ export class GoogleAIConnection extends GoogleHostConnection {
276
276
  get computedLocation() {
277
277
  switch (this.apiName) {
278
278
  case "google":
279
- if (this.modelName.startsWith("gemini-2.5-flash-lite")) {
280
- return "global";
281
- }
282
- else {
283
- return super.computedLocation;
284
- }
279
+ return super.computedLocation;
285
280
  case "anthropic":
286
281
  return "us-east5";
287
282
  default:
@@ -368,7 +363,13 @@ export class AbstractGoogleLLMConnection extends GoogleAIConnection {
368
363
  }
369
364
  }
370
365
  async formatData(input, parameters) {
371
- return this.api.formatData(input, parameters);
366
+ // Filter out labels for non-Vertex AI platforms (labels are only supported on Vertex AI)
367
+ let filteredParameters = parameters;
368
+ if (parameters.labels && this.platform !== "gcp") {
369
+ const { labels, ...paramsWithoutLabels } = parameters;
370
+ filteredParameters = paramsWithoutLabels;
371
+ }
372
+ return this.api.formatData(input, filteredParameters);
372
373
  }
373
374
  }
374
375
  export class GoogleRequestCallbackHandler extends BaseCallbackHandler {
package/dist/types.d.ts CHANGED
@@ -263,6 +263,23 @@ export interface GoogleAIModelParams extends GoogleModelParams {
263
263
  * The modalities of the response.
264
264
  */
265
265
  responseModalities?: GoogleAIModelModality[];
266
+ /**
267
+ * Custom metadata labels to associate with the request.
268
+ * Only supported on Vertex AI (Google Cloud Platform).
269
+ * Labels are key-value pairs where both keys and values must be strings.
270
+ *
271
+ * Example:
272
+ * ```typescript
273
+ * {
274
+ * labels: {
275
+ * "team": "research",
276
+ * "component": "frontend",
277
+ * "environment": "production"
278
+ * }
279
+ * }
280
+ * ```
281
+ */
282
+ labels?: Record<string, string>;
266
283
  /**
267
284
  * Speech generation configuration.
268
285
  * You can use either Google's definition of the speech configuration,
@@ -530,6 +547,10 @@ export interface GeminiRequest {
530
547
  safetySettings?: GeminiSafetySetting[];
531
548
  generationConfig?: GeminiGenerationConfig;
532
549
  cachedContent?: string;
550
+ /**
551
+ * Custom metadata labels to associate with the API call.
552
+ */
553
+ labels?: Record<string, string>;
533
554
  }
534
555
  export interface GeminiResponseCandidate {
535
556
  content: {
@@ -180,6 +180,7 @@ function copyAIModelParamsInto(params, options, target) {
180
180
  if (options?.cachedContent) {
181
181
  ret.cachedContent = options.cachedContent;
182
182
  }
183
+ ret.labels = options?.labels ?? params?.labels ?? target?.labels;
183
184
  return ret;
184
185
  }
185
186
  function modelToFamily(modelName) {
@@ -171,6 +171,7 @@ export function copyAIModelParamsInto(params, options, target) {
171
171
  if (options?.cachedContent) {
172
172
  ret.cachedContent = options.cachedContent;
173
173
  }
174
+ ret.labels = options?.labels ?? params?.labels ?? target?.labels;
174
175
  return ret;
175
176
  }
176
177
  export function modelToFamily(modelName) {
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = void 0;
4
4
  exports.normalizeSpeechConfig = normalizeSpeechConfig;
5
+ exports.normalizeMessageContentComplex = normalizeMessageContentComplex;
5
6
  exports.getGeminiAPI = getGeminiAPI;
6
7
  exports.validateGeminiParams = validateGeminiParams;
7
8
  exports.isModelGemini = isModelGemini;
@@ -197,6 +198,49 @@ function normalizeSpeechConfig(config) {
197
198
  }
198
199
  return ret;
199
200
  }
201
+ // Compatibility layer for other well known content block types
202
+ function normalizeMessageContentComplex(content) {
203
+ return content.map((c) => {
204
+ // OpenAI completions `input_audio`
205
+ if (c.type === "input_audio" &&
206
+ "input_audio" in c &&
207
+ typeof c.input_audio === "object") {
208
+ const { format, data } = c.input_audio;
209
+ if (format === "wav") {
210
+ return {
211
+ type: "audio",
212
+ source_type: "base64",
213
+ mime_type: "audio/wav",
214
+ data,
215
+ };
216
+ }
217
+ }
218
+ // OpenAI completions `image_url`
219
+ if (c.type === "image_url" &&
220
+ "image_url" in c &&
221
+ typeof c.image_url === "object") {
222
+ const { url } = c.image_url;
223
+ return {
224
+ type: "image",
225
+ source_type: "url",
226
+ url,
227
+ };
228
+ }
229
+ // OpenAI completions `file`
230
+ if (c.type === "file" &&
231
+ "file" in c &&
232
+ typeof c.file === "object" &&
233
+ "file_data" in c.file) {
234
+ const { file_data } = c.file;
235
+ return {
236
+ type: "file",
237
+ source_type: "base64",
238
+ data: file_data,
239
+ };
240
+ }
241
+ return c;
242
+ });
243
+ }
200
244
  function getGeminiAPI(config) {
201
245
  function messageContentText(content) {
202
246
  if (content?.text && content?.text.length > 0) {
@@ -444,8 +488,10 @@ function getGeminiAPI(config) {
444
488
  },
445
489
  ]
446
490
  : content;
491
+ // Normalize the content to use standard format
492
+ const normalizedContent = normalizeMessageContentComplex(messageContent);
447
493
  // Get all of the parts, even those that don't correctly resolve
448
- const allParts = await messageContentComplexToParts(messageContent);
494
+ const allParts = await messageContentComplexToParts(normalizedContent);
449
495
  // Remove any invalid parts
450
496
  const parts = allParts.reduce((acc, val) => {
451
497
  if (val) {
@@ -1408,6 +1454,9 @@ function getGeminiAPI(config) {
1408
1454
  if (parameters.cachedContent) {
1409
1455
  ret.cachedContent = parameters.cachedContent;
1410
1456
  }
1457
+ if (parameters.labels && Object.keys(parameters.labels).length > 0) {
1458
+ ret.labels = parameters.labels;
1459
+ }
1411
1460
  return ret;
1412
1461
  }
1413
1462
  return {
@@ -1,3 +1,4 @@
1
+ import { MessageContentComplex } from "@langchain/core/messages";
1
2
  import { GoogleLLMResponse, GoogleAIModelParams, GenerateContentResponseData, GoogleAISafetyHandler, GoogleAIAPI, GeminiAPIConfig, GoogleSpeechConfig, GoogleSpeechConfigSimplified } from "../types.js";
2
3
  export interface FunctionCall {
3
4
  name: string;
@@ -40,6 +41,7 @@ export declare class MessageGeminiSafetyHandler extends DefaultGeminiSafetyHandl
40
41
  handleData(response: GoogleLLMResponse, data: GenerateContentResponseData): GenerateContentResponseData;
41
42
  }
42
43
  export declare function normalizeSpeechConfig(config: GoogleSpeechConfig | GoogleSpeechConfigSimplified | undefined): GoogleSpeechConfig | undefined;
44
+ export declare function normalizeMessageContentComplex(content: MessageContentComplex[]): MessageContentComplex[];
43
45
  export declare function getGeminiAPI(config?: GeminiAPIConfig): GoogleAIAPI;
44
46
  export declare function validateGeminiParams(params: GoogleAIModelParams): void;
45
47
  export declare function isModelGemini(modelName: string): boolean;
@@ -187,6 +187,49 @@ export function normalizeSpeechConfig(config) {
187
187
  }
188
188
  return ret;
189
189
  }
190
+ // Compatibility layer for other well known content block types
191
+ export function normalizeMessageContentComplex(content) {
192
+ return content.map((c) => {
193
+ // OpenAI completions `input_audio`
194
+ if (c.type === "input_audio" &&
195
+ "input_audio" in c &&
196
+ typeof c.input_audio === "object") {
197
+ const { format, data } = c.input_audio;
198
+ if (format === "wav") {
199
+ return {
200
+ type: "audio",
201
+ source_type: "base64",
202
+ mime_type: "audio/wav",
203
+ data,
204
+ };
205
+ }
206
+ }
207
+ // OpenAI completions `image_url`
208
+ if (c.type === "image_url" &&
209
+ "image_url" in c &&
210
+ typeof c.image_url === "object") {
211
+ const { url } = c.image_url;
212
+ return {
213
+ type: "image",
214
+ source_type: "url",
215
+ url,
216
+ };
217
+ }
218
+ // OpenAI completions `file`
219
+ if (c.type === "file" &&
220
+ "file" in c &&
221
+ typeof c.file === "object" &&
222
+ "file_data" in c.file) {
223
+ const { file_data } = c.file;
224
+ return {
225
+ type: "file",
226
+ source_type: "base64",
227
+ data: file_data,
228
+ };
229
+ }
230
+ return c;
231
+ });
232
+ }
190
233
  export function getGeminiAPI(config) {
191
234
  function messageContentText(content) {
192
235
  if (content?.text && content?.text.length > 0) {
@@ -434,8 +477,10 @@ export function getGeminiAPI(config) {
434
477
  },
435
478
  ]
436
479
  : content;
480
+ // Normalize the content to use standard format
481
+ const normalizedContent = normalizeMessageContentComplex(messageContent);
437
482
  // Get all of the parts, even those that don't correctly resolve
438
- const allParts = await messageContentComplexToParts(messageContent);
483
+ const allParts = await messageContentComplexToParts(normalizedContent);
439
484
  // Remove any invalid parts
440
485
  const parts = allParts.reduce((acc, val) => {
441
486
  if (val) {
@@ -1398,6 +1443,9 @@ export function getGeminiAPI(config) {
1398
1443
  if (parameters.cachedContent) {
1399
1444
  ret.cachedContent = parameters.cachedContent;
1400
1445
  }
1446
+ if (parameters.labels && Object.keys(parameters.labels).length > 0) {
1447
+ ret.labels = parameters.labels;
1448
+ }
1401
1449
  return ret;
1402
1450
  }
1403
1451
  return {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/google-common",
3
- "version": "0.2.15",
3
+ "version": "0.2.17",
4
4
  "description": "Core types and classes for Google services.",
5
5
  "type": "module",
6
6
  "engines": {