@langchain/google-genai 0.0.21 → 0.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,7 +5,7 @@ import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
5
5
  import { BaseChatModel, LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
6
6
  import { BaseLanguageModelCallOptions, BaseLanguageModelInput, StructuredOutputMethodOptions, ToolDefinition } from "@langchain/core/language_models/base";
7
7
  import { StructuredToolInterface } from "@langchain/core/tools";
8
- import { Runnable } from "@langchain/core/runnables";
8
+ import { Runnable, RunnableToolLike } from "@langchain/core/runnables";
9
9
  import type { z } from "zod";
10
10
  export type BaseMessageExamplePair = {
11
11
  input: BaseMessage;
@@ -160,7 +160,7 @@ export declare class ChatGoogleGenerativeAI extends BaseChatModel<GoogleGenerati
160
160
  getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
161
161
  _combineLLMOutput(): never[];
162
162
  _llmType(): string;
163
- bindTools(tools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition)[], kwargs?: Partial<GoogleGenerativeAIChatCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleGenerativeAIChatCallOptions>;
163
+ bindTools(tools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition | RunnableToolLike)[], kwargs?: Partial<GoogleGenerativeAIChatCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleGenerativeAIChatCallOptions>;
164
164
  invocationParams(options?: this["ParsedCallOptions"]): Omit<GenerateContentRequest, "contents">;
165
165
  _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
166
166
  _streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
@@ -198,7 +198,10 @@ function mapGenerateContentResultToChatResult(response, extra) {
198
198
  text,
199
199
  message: new messages_1.AIMessage({
200
200
  content: text,
201
- tool_calls: functionCalls,
201
+ tool_calls: functionCalls?.map((fc) => ({
202
+ ...fc,
203
+ type: "tool_call",
204
+ })),
202
205
  additional_kwargs: {
203
206
  ...generationInfo,
204
207
  },
@@ -225,6 +228,7 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
225
228
  ...fc,
226
229
  args: JSON.stringify(fc.args),
227
230
  index: extra.index,
231
+ type: "tool_call_chunk",
228
232
  })));
229
233
  }
230
234
  return new outputs_1.ChatGenerationChunk({
@@ -3,6 +3,7 @@ import { BaseMessage, UsageMetadata } from "@langchain/core/messages";
3
3
  import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
4
4
  import { StructuredToolInterface } from "@langchain/core/tools";
5
5
  import { ToolDefinition } from "@langchain/core/language_models/base";
6
+ import { RunnableToolLike } from "@langchain/core/runnables";
6
7
  export declare function getMessageAuthor(message: BaseMessage): string;
7
8
  /**
8
9
  * Maps a message type to a Google Generative AI chat author.
@@ -20,4 +21,4 @@ export declare function convertResponseContentToChatGenerationChunk(response: En
20
21
  usageMetadata?: UsageMetadata | undefined;
21
22
  index: number;
22
23
  }): ChatGenerationChunk | null;
23
- export declare function convertToGenerativeAITools(structuredTools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition)[]): GoogleGenerativeAIFunctionDeclarationsTool[];
24
+ export declare function convertToGenerativeAITools(structuredTools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition | RunnableToolLike)[]): GoogleGenerativeAIFunctionDeclarationsTool[];
@@ -191,7 +191,10 @@ export function mapGenerateContentResultToChatResult(response, extra) {
191
191
  text,
192
192
  message: new AIMessage({
193
193
  content: text,
194
- tool_calls: functionCalls,
194
+ tool_calls: functionCalls?.map((fc) => ({
195
+ ...fc,
196
+ type: "tool_call",
197
+ })),
195
198
  additional_kwargs: {
196
199
  ...generationInfo,
197
200
  },
@@ -217,6 +220,7 @@ export function convertResponseContentToChatGenerationChunk(response, extra) {
217
220
  ...fc,
218
221
  args: JSON.stringify(fc.args),
219
222
  index: extra.index,
223
+ type: "tool_call_chunk",
220
224
  })));
221
225
  }
222
226
  return new ChatGenerationChunk({
@@ -8,8 +8,7 @@ function removeAdditionalProperties(
8
8
  obj) {
9
9
  if (typeof obj === "object" && obj !== null) {
10
10
  const newObj = { ...obj };
11
- if ("additionalProperties" in newObj &&
12
- typeof newObj.additionalProperties === "boolean") {
11
+ if ("additionalProperties" in newObj) {
13
12
  delete newObj.additionalProperties;
14
13
  }
15
14
  for (const key in newObj) {
@@ -5,8 +5,7 @@ export function removeAdditionalProperties(
5
5
  obj) {
6
6
  if (typeof obj === "object" && obj !== null) {
7
7
  const newObj = { ...obj };
8
- if ("additionalProperties" in newObj &&
9
- typeof newObj.additionalProperties === "boolean") {
8
+ if ("additionalProperties" in newObj) {
10
9
  delete newObj.additionalProperties;
11
10
  }
12
11
  for (const key in newObj) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/google-genai",
3
- "version": "0.0.21",
3
+ "version": "0.0.23",
4
4
  "description": "Sample integration for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -36,7 +36,7 @@
36
36
  "license": "MIT",
37
37
  "dependencies": {
38
38
  "@google/generative-ai": "^0.7.0",
39
- "@langchain/core": ">=0.2.9 <0.3.0",
39
+ "@langchain/core": ">=0.2.16 <0.3.0",
40
40
  "zod-to-json-schema": "^3.22.4"
41
41
  },
42
42
  "devDependencies": {