@langchain/google-common 0.0.22 → 0.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -57,10 +57,23 @@ class ChatConnection extends connection_js_1.AbstractGoogleLLMConnection {
57
57
  return input
58
58
  .map((msg, i) => (0, gemini_js_1.baseMessageToContent)(msg, input[i - 1], this.useSystemInstruction))
59
59
  .reduce((acc, cur) => {
60
- // Filter out the system content, since those don't belong
61
- // in the actual content.
62
- const hasNoSystem = cur.every((content) => content.role !== "system");
63
- return hasNoSystem ? [...acc, ...cur] : acc;
60
+ // Filter out the system content
61
+ if (cur.every((content) => content.role === "system")) {
62
+ return acc;
63
+ }
64
+ // Combine adjacent function messages
65
+ if (cur[0]?.role === "function" &&
66
+ acc.length > 0 &&
67
+ acc[acc.length - 1].role === "function") {
68
+ acc[acc.length - 1].parts = [
69
+ ...acc[acc.length - 1].parts,
70
+ ...cur[0].parts,
71
+ ];
72
+ }
73
+ else {
74
+ acc.push(...cur);
75
+ }
76
+ return acc;
64
77
  }, []);
65
78
  }
66
79
  formatSystemInstruction(input, _parameters) {
@@ -3,15 +3,14 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
3
3
  import { BaseChatModel, LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
4
4
  import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
5
5
  import { AIMessageChunk } from "@langchain/core/messages";
6
- import { BaseLanguageModelInput, StructuredOutputMethodOptions, ToolDefinition } from "@langchain/core/language_models/base";
6
+ import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
7
7
  import type { z } from "zod";
8
- import { Runnable, RunnableToolLike } from "@langchain/core/runnables";
8
+ import { Runnable } from "@langchain/core/runnables";
9
9
  import { AsyncCaller } from "@langchain/core/utils/async_caller";
10
- import { StructuredToolInterface } from "@langchain/core/tools";
11
10
  import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GeminiContent, GoogleAIBaseLanguageModelCallOptions } from "./types.js";
12
11
  import { AbstractGoogleLLMConnection } from "./connection.js";
13
12
  import { GoogleAbstractedClient } from "./auth.js";
14
- import type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams } from "./types.js";
13
+ import type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType } from "./types.js";
15
14
  declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {
16
15
  convertSystemMessageToHumanContent: boolean | undefined;
17
16
  constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);
@@ -56,7 +55,7 @@ export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<
56
55
  buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
57
56
  buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;
58
57
  get platform(): GooglePlatformType;
59
- bindTools(tools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition | RunnableToolLike)[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;
58
+ bindTools(tools: GoogleAIToolType[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;
60
59
  _llmType(): string;
61
60
  /**
62
61
  * Get the parameters used to invoke the model
@@ -54,10 +54,23 @@ class ChatConnection extends AbstractGoogleLLMConnection {
54
54
  return input
55
55
  .map((msg, i) => baseMessageToContent(msg, input[i - 1], this.useSystemInstruction))
56
56
  .reduce((acc, cur) => {
57
- // Filter out the system content, since those don't belong
58
- // in the actual content.
59
- const hasNoSystem = cur.every((content) => content.role !== "system");
60
- return hasNoSystem ? [...acc, ...cur] : acc;
57
+ // Filter out the system content
58
+ if (cur.every((content) => content.role === "system")) {
59
+ return acc;
60
+ }
61
+ // Combine adjacent function messages
62
+ if (cur[0]?.role === "function" &&
63
+ acc.length > 0 &&
64
+ acc[acc.length - 1].role === "function") {
65
+ acc[acc.length - 1].parts = [
66
+ ...acc[acc.length - 1].parts,
67
+ ...cur[0].parts,
68
+ ];
69
+ }
70
+ else {
71
+ acc.push(...cur);
72
+ }
73
+ return acc;
61
74
  }, []);
62
75
  }
63
76
  formatSystemInstruction(input, _parameters) {
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.AbstractGoogleLLMConnection = exports.GoogleAIConnection = exports.GoogleHostConnection = exports.GoogleConnection = void 0;
4
4
  const env_1 = require("@langchain/core/utils/env");
5
+ const function_calling_1 = require("@langchain/core/utils/function_calling");
5
6
  const zod_to_gemini_parameters_js_1 = require("./utils/zod_to_gemini_parameters.cjs");
6
7
  class GoogleConnection {
7
8
  constructor(caller, client, streaming) {
@@ -221,16 +222,11 @@ class AbstractGoogleLLMConnection extends GoogleAIConnection {
221
222
  formatSystemInstruction(_input, _parameters) {
222
223
  return {};
223
224
  }
224
- // Borrowed from the OpenAI invocation params test
225
- isStructuredToolArray(tools) {
226
- return (tools !== undefined &&
227
- tools.every((tool) => Array.isArray(tool.lc_namespace)));
228
- }
229
225
  structuredToolToFunctionDeclaration(tool) {
230
226
  const jsonSchema = (0, zod_to_gemini_parameters_js_1.zodToGeminiParameters)(tool.schema);
231
227
  return {
232
228
  name: tool.name,
233
- description: tool.description,
229
+ description: tool.description ?? `A function available to call.`,
234
230
  parameters: jsonSchema,
235
231
  };
236
232
  }
@@ -246,11 +242,13 @@ class AbstractGoogleLLMConnection extends GoogleAIConnection {
246
242
  if (!tools || tools.length === 0) {
247
243
  return [];
248
244
  }
249
- if (this.isStructuredToolArray(tools)) {
245
+ if (tools.every(function_calling_1.isLangChainTool)) {
250
246
  return this.structuredToolsToGeminiTools(tools);
251
247
  }
252
248
  else {
253
- if (tools.length === 1 && !tools[0].functionDeclarations?.length) {
249
+ if (tools.length === 1 &&
250
+ (!("functionDeclarations" in tools[0]) ||
251
+ !tools[0].functionDeclarations?.length)) {
254
252
  return [];
255
253
  }
256
254
  return tools;
@@ -1,6 +1,6 @@
1
1
  import { BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
2
2
  import { AsyncCaller, AsyncCallerCallOptions } from "@langchain/core/utils/async_caller";
3
- import { StructuredToolInterface } from "@langchain/core/tools";
3
+ import { StructuredToolParams } from "@langchain/core/tools";
4
4
  import type { GoogleAIBaseLLMInput, GoogleConnectionParams, GoogleLLMModelFamily, GooglePlatformType, GoogleResponse, GoogleLLMResponse, GeminiContent, GeminiGenerationConfig, GeminiRequest, GeminiSafetySetting, GeminiTool, GeminiFunctionDeclaration, GoogleAIModelRequestParams } from "./types.js";
5
5
  import { GoogleAbstractedClient, GoogleAbstractedClientOpsMethod } from "./auth.js";
6
6
  export declare abstract class GoogleConnection<CallOptions extends AsyncCallerCallOptions, ResponseType extends GoogleResponse> {
@@ -49,9 +49,8 @@ export declare abstract class AbstractGoogleLLMConnection<MessageType, AuthOptio
49
49
  formatGenerationConfig(_input: MessageType, parameters: GoogleAIModelRequestParams): GeminiGenerationConfig;
50
50
  formatSafetySettings(_input: MessageType, parameters: GoogleAIModelRequestParams): GeminiSafetySetting[];
51
51
  formatSystemInstruction(_input: MessageType, _parameters: GoogleAIModelRequestParams): GeminiContent;
52
- isStructuredToolArray(tools?: unknown[]): tools is StructuredToolInterface[];
53
- structuredToolToFunctionDeclaration(tool: StructuredToolInterface): GeminiFunctionDeclaration;
54
- structuredToolsToGeminiTools(tools: StructuredToolInterface[]): GeminiTool[];
52
+ structuredToolToFunctionDeclaration(tool: StructuredToolParams): GeminiFunctionDeclaration;
53
+ structuredToolsToGeminiTools(tools: StructuredToolParams[]): GeminiTool[];
55
54
  formatTools(_input: MessageType, parameters: GoogleAIModelRequestParams): GeminiTool[];
56
55
  formatToolConfig(parameters: GoogleAIModelRequestParams): GeminiRequest["toolConfig"] | undefined;
57
56
  formatData(input: MessageType, parameters: GoogleAIModelRequestParams): GeminiRequest;
@@ -1,4 +1,5 @@
1
1
  import { getRuntimeEnvironment } from "@langchain/core/utils/env";
2
+ import { isLangChainTool } from "@langchain/core/utils/function_calling";
2
3
  import { zodToGeminiParameters } from "./utils/zod_to_gemini_parameters.js";
3
4
  export class GoogleConnection {
4
5
  constructor(caller, client, streaming) {
@@ -215,16 +216,11 @@ export class AbstractGoogleLLMConnection extends GoogleAIConnection {
215
216
  formatSystemInstruction(_input, _parameters) {
216
217
  return {};
217
218
  }
218
- // Borrowed from the OpenAI invocation params test
219
- isStructuredToolArray(tools) {
220
- return (tools !== undefined &&
221
- tools.every((tool) => Array.isArray(tool.lc_namespace)));
222
- }
223
219
  structuredToolToFunctionDeclaration(tool) {
224
220
  const jsonSchema = zodToGeminiParameters(tool.schema);
225
221
  return {
226
222
  name: tool.name,
227
- description: tool.description,
223
+ description: tool.description ?? `A function available to call.`,
228
224
  parameters: jsonSchema,
229
225
  };
230
226
  }
@@ -240,11 +236,13 @@ export class AbstractGoogleLLMConnection extends GoogleAIConnection {
240
236
  if (!tools || tools.length === 0) {
241
237
  return [];
242
238
  }
243
- if (this.isStructuredToolArray(tools)) {
239
+ if (tools.every(isLangChainTool)) {
244
240
  return this.structuredToolsToGeminiTools(tools);
245
241
  }
246
242
  else {
247
- if (tools.length === 1 && !tools[0].functionDeclarations?.length) {
243
+ if (tools.length === 1 &&
244
+ (!("functionDeclarations" in tools[0]) ||
245
+ !tools[0].functionDeclarations?.length)) {
248
246
  return [];
249
247
  }
250
248
  return tools;
package/dist/types.d.ts CHANGED
@@ -1,6 +1,5 @@
1
1
  import type { BaseLLMParams } from "@langchain/core/language_models/llms";
2
- import { StructuredToolInterface } from "@langchain/core/tools";
3
- import type { BaseChatModelCallOptions } from "@langchain/core/language_models/chat_models";
2
+ import type { BaseChatModelCallOptions, BindToolsInput } from "@langchain/core/language_models/chat_models";
4
3
  import type { JsonStream } from "./utils/stream.js";
5
4
  /**
6
5
  * Parameters needed to setup the client connection.
@@ -36,6 +35,7 @@ export interface GoogleConnectionParams<AuthOptions> extends GoogleClientParams<
36
35
  export interface GoogleAISafetySetting {
37
36
  category: string;
38
37
  threshold: string;
38
+ method?: string;
39
39
  }
40
40
  export type GoogleAIResponseMimeType = "text/plain" | "application/json";
41
41
  export interface GoogleAIModelParams {
@@ -91,11 +91,12 @@ export interface GoogleAIModelParams {
91
91
  */
92
92
  streaming?: boolean;
93
93
  }
94
+ export type GoogleAIToolType = BindToolsInput | GeminiTool;
94
95
  /**
95
96
  * The params which can be passed to the API at request time.
96
97
  */
97
98
  export interface GoogleAIModelRequestParams extends GoogleAIModelParams {
98
- tools?: StructuredToolInterface[] | GeminiTool[];
99
+ tools?: GoogleAIToolType[];
99
100
  /**
100
101
  * Force the model to use tools in a specific way.
101
102
  *
@@ -36,35 +36,35 @@ function processToolChoice(toolChoice, allowedFunctionNames) {
36
36
  }
37
37
  throw new Error("Object inputs for tool_choice not supported.");
38
38
  }
39
- function convertToGeminiTools(structuredTools) {
40
- const tools = [
39
+ function convertToGeminiTools(tools) {
40
+ const geminiTools = [
41
41
  {
42
42
  functionDeclarations: [],
43
43
  },
44
44
  ];
45
- structuredTools.forEach((tool) => {
45
+ tools.forEach((tool) => {
46
46
  if ("functionDeclarations" in tool &&
47
47
  Array.isArray(tool.functionDeclarations)) {
48
48
  const funcs = tool.functionDeclarations;
49
- tools[0].functionDeclarations?.push(...funcs);
49
+ geminiTools[0].functionDeclarations?.push(...funcs);
50
50
  }
51
- else if ((0, function_calling_1.isStructuredTool)(tool)) {
51
+ else if ((0, function_calling_1.isLangChainTool)(tool)) {
52
52
  const jsonSchema = (0, zod_to_gemini_parameters_js_1.zodToGeminiParameters)(tool.schema);
53
- tools[0].functionDeclarations?.push({
53
+ geminiTools[0].functionDeclarations?.push({
54
54
  name: tool.name,
55
- description: tool.description,
55
+ description: tool.description ?? `A function available to call.`,
56
56
  parameters: jsonSchema,
57
57
  });
58
58
  }
59
59
  else if ((0, base_1.isOpenAITool)(tool)) {
60
- tools[0].functionDeclarations?.push({
60
+ geminiTools[0].functionDeclarations?.push({
61
61
  name: tool.function.name,
62
62
  description: tool.function.description ?? `A function available to call.`,
63
63
  parameters: (0, zod_to_gemini_parameters_js_1.jsonSchemaToGeminiParameters)(tool.function.parameters),
64
64
  });
65
65
  }
66
66
  });
67
- return tools;
67
+ return geminiTools;
68
68
  }
69
69
  exports.convertToGeminiTools = convertToGeminiTools;
70
70
  function copyAIModelParamsInto(params, options, target) {
@@ -1,9 +1,6 @@
1
- import { StructuredToolInterface } from "@langchain/core/tools";
2
- import { ToolDefinition } from "@langchain/core/language_models/base";
3
- import { RunnableToolLike } from "@langchain/core/runnables";
4
- import type { GeminiTool, GoogleAIBaseLanguageModelCallOptions, GoogleAIModelParams, GoogleAIModelRequestParams, GoogleLLMModelFamily } from "../types.js";
1
+ import type { GeminiTool, GoogleAIBaseLanguageModelCallOptions, GoogleAIModelParams, GoogleAIModelRequestParams, GoogleAIToolType, GoogleLLMModelFamily } from "../types.js";
5
2
  export declare function copyAIModelParams(params: GoogleAIModelParams | undefined, options: GoogleAIBaseLanguageModelCallOptions | undefined): GoogleAIModelRequestParams;
6
- export declare function convertToGeminiTools(structuredTools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition | RunnableToolLike)[]): GeminiTool[];
3
+ export declare function convertToGeminiTools(tools: GoogleAIToolType[]): GeminiTool[];
7
4
  export declare function copyAIModelParamsInto(params: GoogleAIModelParams | undefined, options: GoogleAIBaseLanguageModelCallOptions | undefined, target: GoogleAIModelParams): GoogleAIModelRequestParams;
8
5
  export declare function modelToFamily(modelName: string | undefined): GoogleLLMModelFamily;
9
6
  export declare function validateModelParams(params: GoogleAIModelParams | undefined): void;
@@ -1,5 +1,5 @@
1
- import { isOpenAITool, } from "@langchain/core/language_models/base";
2
- import { isStructuredTool } from "@langchain/core/utils/function_calling";
1
+ import { isOpenAITool } from "@langchain/core/language_models/base";
2
+ import { isLangChainTool } from "@langchain/core/utils/function_calling";
3
3
  import { isModelGemini, validateGeminiParams } from "./gemini.js";
4
4
  import { jsonSchemaToGeminiParameters, zodToGeminiParameters, } from "./zod_to_gemini_parameters.js";
5
5
  export function copyAIModelParams(params, options) {
@@ -32,35 +32,35 @@ function processToolChoice(toolChoice, allowedFunctionNames) {
32
32
  }
33
33
  throw new Error("Object inputs for tool_choice not supported.");
34
34
  }
35
- export function convertToGeminiTools(structuredTools) {
36
- const tools = [
35
+ export function convertToGeminiTools(tools) {
36
+ const geminiTools = [
37
37
  {
38
38
  functionDeclarations: [],
39
39
  },
40
40
  ];
41
- structuredTools.forEach((tool) => {
41
+ tools.forEach((tool) => {
42
42
  if ("functionDeclarations" in tool &&
43
43
  Array.isArray(tool.functionDeclarations)) {
44
44
  const funcs = tool.functionDeclarations;
45
- tools[0].functionDeclarations?.push(...funcs);
45
+ geminiTools[0].functionDeclarations?.push(...funcs);
46
46
  }
47
- else if (isStructuredTool(tool)) {
47
+ else if (isLangChainTool(tool)) {
48
48
  const jsonSchema = zodToGeminiParameters(tool.schema);
49
- tools[0].functionDeclarations?.push({
49
+ geminiTools[0].functionDeclarations?.push({
50
50
  name: tool.name,
51
- description: tool.description,
51
+ description: tool.description ?? `A function available to call.`,
52
52
  parameters: jsonSchema,
53
53
  });
54
54
  }
55
55
  else if (isOpenAITool(tool)) {
56
- tools[0].functionDeclarations?.push({
56
+ geminiTools[0].functionDeclarations?.push({
57
57
  name: tool.function.name,
58
58
  description: tool.function.description ?? `A function available to call.`,
59
59
  parameters: jsonSchemaToGeminiParameters(tool.function.parameters),
60
60
  });
61
61
  }
62
62
  });
63
- return tools;
63
+ return geminiTools;
64
64
  }
65
65
  export function copyAIModelParamsInto(params, options, target) {
66
66
  const ret = target || {};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/google-common",
3
- "version": "0.0.22",
3
+ "version": "0.0.23",
4
4
  "description": "Core types and classes for Google services.",
5
5
  "type": "module",
6
6
  "engines": {
@@ -40,7 +40,7 @@
40
40
  "author": "LangChain",
41
41
  "license": "MIT",
42
42
  "dependencies": {
43
- "@langchain/core": ">=0.2.16 <0.3.0",
43
+ "@langchain/core": ">=0.2.21 <0.3.0",
44
44
  "uuid": "^10.0.0",
45
45
  "zod-to-json-schema": "^3.22.4"
46
46
  },
@@ -63,7 +63,7 @@
63
63
  "jest": "^29.5.0",
64
64
  "jest-environment-node": "^29.6.4",
65
65
  "prettier": "^2.8.3",
66
- "release-it": "^15.10.1",
66
+ "release-it": "^17.6.0",
67
67
  "rollup": "^4.5.2",
68
68
  "ts-jest": "^29.1.0",
69
69
  "typescript": "<5.2.0",