@langchain/google-genai 0.1.2 → 0.1.3-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +12 -50
- package/dist/chat_models.js +14 -52
- package/dist/types.d.ts +2 -2
- package/dist/utils/common.cjs +86 -8
- package/dist/utils/common.js +86 -8
- package/dist/utils/tools.cjs +89 -0
- package/dist/utils/tools.d.ts +10 -0
- package/dist/utils/tools.js +85 -0
- package/dist/utils/zod_to_genai_parameters.d.ts +1 -1
- package/package.json +3 -3
package/dist/chat_models.cjs
CHANGED
|
@@ -9,6 +9,7 @@ const types_1 = require("@langchain/core/utils/types");
|
|
|
9
9
|
const zod_to_genai_parameters_js_1 = require("./utils/zod_to_genai_parameters.cjs");
|
|
10
10
|
const common_js_1 = require("./utils/common.cjs");
|
|
11
11
|
const output_parsers_js_1 = require("./output_parsers.cjs");
|
|
12
|
+
const tools_js_1 = require("./utils/tools.cjs");
|
|
12
13
|
/**
|
|
13
14
|
* Google Generative AI chat model integration.
|
|
14
15
|
*
|
|
@@ -522,59 +523,20 @@ class ChatGoogleGenerativeAI extends chat_models_1.BaseChatModel {
|
|
|
522
523
|
return "googlegenerativeai";
|
|
523
524
|
}
|
|
524
525
|
bindTools(tools, kwargs) {
|
|
525
|
-
return this.bind({ tools: (0,
|
|
526
|
+
return this.bind({ tools: (0, tools_js_1.convertToolsToGenAI)(tools)?.tools, ...kwargs });
|
|
526
527
|
}
|
|
527
528
|
invocationParams(options) {
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
genaiTools = (0, common_js_1.convertToGenerativeAITools)(options?.tools);
|
|
535
|
-
}
|
|
536
|
-
else {
|
|
537
|
-
genaiTools = options?.tools;
|
|
538
|
-
}
|
|
539
|
-
let toolConfig;
|
|
540
|
-
if (genaiTools?.length && options?.tool_choice) {
|
|
541
|
-
if (["any", "auto", "none"].some((c) => c === options.tool_choice)) {
|
|
542
|
-
const modeMap = {
|
|
543
|
-
any: generative_ai_1.FunctionCallingMode.ANY,
|
|
544
|
-
auto: generative_ai_1.FunctionCallingMode.AUTO,
|
|
545
|
-
none: generative_ai_1.FunctionCallingMode.NONE,
|
|
546
|
-
};
|
|
547
|
-
toolConfig = {
|
|
548
|
-
functionCallingConfig: {
|
|
549
|
-
mode: modeMap[options.tool_choice] ??
|
|
550
|
-
"MODE_UNSPECIFIED",
|
|
551
|
-
allowedFunctionNames: options.allowedFunctionNames,
|
|
552
|
-
},
|
|
553
|
-
};
|
|
554
|
-
}
|
|
555
|
-
else if (typeof options.tool_choice === "string") {
|
|
556
|
-
toolConfig = {
|
|
557
|
-
functionCallingConfig: {
|
|
558
|
-
mode: generative_ai_1.FunctionCallingMode.ANY,
|
|
559
|
-
allowedFunctionNames: [
|
|
560
|
-
...(options.allowedFunctionNames ?? []),
|
|
561
|
-
options.tool_choice,
|
|
562
|
-
],
|
|
563
|
-
},
|
|
564
|
-
};
|
|
565
|
-
}
|
|
566
|
-
if (!options.tool_choice && options.allowedFunctionNames) {
|
|
567
|
-
toolConfig = {
|
|
568
|
-
functionCallingConfig: {
|
|
569
|
-
mode: generative_ai_1.FunctionCallingMode.ANY,
|
|
570
|
-
allowedFunctionNames: options.allowedFunctionNames,
|
|
571
|
-
},
|
|
572
|
-
};
|
|
573
|
-
}
|
|
574
|
-
}
|
|
529
|
+
const toolsAndConfig = options?.tools?.length
|
|
530
|
+
? (0, tools_js_1.convertToolsToGenAI)(options.tools, {
|
|
531
|
+
toolChoice: options.tool_choice,
|
|
532
|
+
allowedFunctionNames: options.allowedFunctionNames,
|
|
533
|
+
})
|
|
534
|
+
: undefined;
|
|
575
535
|
return {
|
|
576
|
-
tools:
|
|
577
|
-
toolConfig
|
|
536
|
+
...(toolsAndConfig?.tools ? { tools: toolsAndConfig.tools } : {}),
|
|
537
|
+
...(toolsAndConfig?.toolConfig
|
|
538
|
+
? { toolConfig: toolsAndConfig.toolConfig }
|
|
539
|
+
: {}),
|
|
578
540
|
};
|
|
579
541
|
}
|
|
580
542
|
async _generate(messages, options, runManager) {
|
package/dist/chat_models.js
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
|
-
import { GoogleGenerativeAI as GenerativeAI,
|
|
1
|
+
import { GoogleGenerativeAI as GenerativeAI, } from "@google/generative-ai";
|
|
2
2
|
import { getEnvironmentVariable } from "@langchain/core/utils/env";
|
|
3
3
|
import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
|
|
4
4
|
import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables";
|
|
5
5
|
import { isZodSchema } from "@langchain/core/utils/types";
|
|
6
6
|
import { zodToGenerativeAIParameters } from "./utils/zod_to_genai_parameters.js";
|
|
7
|
-
import { convertBaseMessagesToContent, convertResponseContentToChatGenerationChunk,
|
|
7
|
+
import { convertBaseMessagesToContent, convertResponseContentToChatGenerationChunk, mapGenerateContentResultToChatResult, } from "./utils/common.js";
|
|
8
8
|
import { GoogleGenerativeAIToolsOutputParser } from "./output_parsers.js";
|
|
9
|
+
import { convertToolsToGenAI } from "./utils/tools.js";
|
|
9
10
|
/**
|
|
10
11
|
* Google Generative AI chat model integration.
|
|
11
12
|
*
|
|
@@ -519,59 +520,20 @@ export class ChatGoogleGenerativeAI extends BaseChatModel {
|
|
|
519
520
|
return "googlegenerativeai";
|
|
520
521
|
}
|
|
521
522
|
bindTools(tools, kwargs) {
|
|
522
|
-
return this.bind({ tools:
|
|
523
|
+
return this.bind({ tools: convertToolsToGenAI(tools)?.tools, ...kwargs });
|
|
523
524
|
}
|
|
524
525
|
invocationParams(options) {
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
genaiTools = convertToGenerativeAITools(options?.tools);
|
|
532
|
-
}
|
|
533
|
-
else {
|
|
534
|
-
genaiTools = options?.tools;
|
|
535
|
-
}
|
|
536
|
-
let toolConfig;
|
|
537
|
-
if (genaiTools?.length && options?.tool_choice) {
|
|
538
|
-
if (["any", "auto", "none"].some((c) => c === options.tool_choice)) {
|
|
539
|
-
const modeMap = {
|
|
540
|
-
any: FunctionCallingMode.ANY,
|
|
541
|
-
auto: FunctionCallingMode.AUTO,
|
|
542
|
-
none: FunctionCallingMode.NONE,
|
|
543
|
-
};
|
|
544
|
-
toolConfig = {
|
|
545
|
-
functionCallingConfig: {
|
|
546
|
-
mode: modeMap[options.tool_choice] ??
|
|
547
|
-
"MODE_UNSPECIFIED",
|
|
548
|
-
allowedFunctionNames: options.allowedFunctionNames,
|
|
549
|
-
},
|
|
550
|
-
};
|
|
551
|
-
}
|
|
552
|
-
else if (typeof options.tool_choice === "string") {
|
|
553
|
-
toolConfig = {
|
|
554
|
-
functionCallingConfig: {
|
|
555
|
-
mode: FunctionCallingMode.ANY,
|
|
556
|
-
allowedFunctionNames: [
|
|
557
|
-
...(options.allowedFunctionNames ?? []),
|
|
558
|
-
options.tool_choice,
|
|
559
|
-
],
|
|
560
|
-
},
|
|
561
|
-
};
|
|
562
|
-
}
|
|
563
|
-
if (!options.tool_choice && options.allowedFunctionNames) {
|
|
564
|
-
toolConfig = {
|
|
565
|
-
functionCallingConfig: {
|
|
566
|
-
mode: FunctionCallingMode.ANY,
|
|
567
|
-
allowedFunctionNames: options.allowedFunctionNames,
|
|
568
|
-
},
|
|
569
|
-
};
|
|
570
|
-
}
|
|
571
|
-
}
|
|
526
|
+
const toolsAndConfig = options?.tools?.length
|
|
527
|
+
? convertToolsToGenAI(options.tools, {
|
|
528
|
+
toolChoice: options.tool_choice,
|
|
529
|
+
allowedFunctionNames: options.allowedFunctionNames,
|
|
530
|
+
})
|
|
531
|
+
: undefined;
|
|
572
532
|
return {
|
|
573
|
-
tools:
|
|
574
|
-
toolConfig
|
|
533
|
+
...(toolsAndConfig?.tools ? { tools: toolsAndConfig.tools } : {}),
|
|
534
|
+
...(toolsAndConfig?.toolConfig
|
|
535
|
+
? { toolConfig: toolsAndConfig.toolConfig }
|
|
536
|
+
: {}),
|
|
575
537
|
};
|
|
576
538
|
}
|
|
577
539
|
async _generate(messages, options, runManager) {
|
package/dist/types.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool } from "@google/generative-ai";
|
|
1
|
+
import { CodeExecutionTool, FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool, GoogleSearchRetrievalTool } from "@google/generative-ai";
|
|
2
2
|
import { BindToolsInput } from "@langchain/core/language_models/chat_models";
|
|
3
|
-
export type GoogleGenerativeAIToolType = BindToolsInput | GoogleGenerativeAIFunctionDeclarationsTool;
|
|
3
|
+
export type GoogleGenerativeAIToolType = BindToolsInput | GoogleGenerativeAIFunctionDeclarationsTool | CodeExecutionTool | GoogleSearchRetrievalTool;
|
package/dist/utils/common.cjs
CHANGED
|
@@ -71,7 +71,7 @@ function convertMessageContentToParts(message, isMultimodalModel) {
|
|
|
71
71
|
},
|
|
72
72
|
}));
|
|
73
73
|
}
|
|
74
|
-
else if (message.
|
|
74
|
+
else if (message.getType() === "tool" && message.name && message.content) {
|
|
75
75
|
functionResponses = [
|
|
76
76
|
{
|
|
77
77
|
functionResponse: {
|
|
@@ -88,6 +88,16 @@ function convertMessageContentToParts(message, isMultimodalModel) {
|
|
|
88
88
|
text: c.text,
|
|
89
89
|
};
|
|
90
90
|
}
|
|
91
|
+
else if (c.type === "executableCode") {
|
|
92
|
+
return {
|
|
93
|
+
executableCode: c.executableCode,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
else if (c.type === "codeExecutionResult") {
|
|
97
|
+
return {
|
|
98
|
+
codeExecutionResult: c.codeExecutionResult,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
91
101
|
if (c.type === "image_url") {
|
|
92
102
|
if (!isMultimodalModel) {
|
|
93
103
|
throw new Error(`This model does not support images`);
|
|
@@ -192,12 +202,45 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
192
202
|
}
|
|
193
203
|
const functionCalls = response.functionCalls();
|
|
194
204
|
const [candidate] = response.candidates;
|
|
195
|
-
const { content, ...generationInfo } = candidate;
|
|
196
|
-
|
|
205
|
+
const { content: candidateContent, ...generationInfo } = candidate;
|
|
206
|
+
let content;
|
|
207
|
+
if (candidateContent?.parts.length === 1 && candidateContent.parts[0].text) {
|
|
208
|
+
content = candidateContent.parts[0].text;
|
|
209
|
+
}
|
|
210
|
+
else {
|
|
211
|
+
content = candidateContent.parts.map((p) => {
|
|
212
|
+
if ("text" in p) {
|
|
213
|
+
return {
|
|
214
|
+
type: "text",
|
|
215
|
+
text: p.text,
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
else if ("executableCode" in p) {
|
|
219
|
+
return {
|
|
220
|
+
type: "executableCode",
|
|
221
|
+
executableCode: p.executableCode,
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
else if ("codeExecutionResult" in p) {
|
|
225
|
+
return {
|
|
226
|
+
type: "codeExecutionResult",
|
|
227
|
+
codeExecutionResult: p.codeExecutionResult,
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
return p;
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
let text = "";
|
|
234
|
+
if (typeof content === "string") {
|
|
235
|
+
text = content;
|
|
236
|
+
}
|
|
237
|
+
else if ("text" in content[0]) {
|
|
238
|
+
text = content[0].text;
|
|
239
|
+
}
|
|
197
240
|
const generation = {
|
|
198
241
|
text,
|
|
199
242
|
message: new messages_1.AIMessage({
|
|
200
|
-
content
|
|
243
|
+
content,
|
|
201
244
|
tool_calls: functionCalls?.map((fc) => ({
|
|
202
245
|
...fc,
|
|
203
246
|
type: "tool_call",
|
|
@@ -220,8 +263,43 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
|
220
263
|
}
|
|
221
264
|
const functionCalls = response.functionCalls();
|
|
222
265
|
const [candidate] = response.candidates;
|
|
223
|
-
const { content, ...generationInfo } = candidate;
|
|
224
|
-
|
|
266
|
+
const { content: candidateContent, ...generationInfo } = candidate;
|
|
267
|
+
let content;
|
|
268
|
+
// Checks if some parts do not have text. If false, it means that the content is a string.
|
|
269
|
+
if (candidateContent?.parts &&
|
|
270
|
+
candidateContent.parts.every((p) => "text" in p)) {
|
|
271
|
+
content = candidateContent.parts.map((p) => p.text).join("");
|
|
272
|
+
}
|
|
273
|
+
else if (candidateContent.parts) {
|
|
274
|
+
content = candidateContent.parts.map((p) => {
|
|
275
|
+
if ("text" in p) {
|
|
276
|
+
return {
|
|
277
|
+
type: "text",
|
|
278
|
+
text: p.text,
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
else if ("executableCode" in p) {
|
|
282
|
+
return {
|
|
283
|
+
type: "executableCode",
|
|
284
|
+
executableCode: p.executableCode,
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
else if ("codeExecutionResult" in p) {
|
|
288
|
+
return {
|
|
289
|
+
type: "codeExecutionResult",
|
|
290
|
+
codeExecutionResult: p.codeExecutionResult,
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
return p;
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
let text = "";
|
|
297
|
+
if (content && typeof content === "string") {
|
|
298
|
+
text = content;
|
|
299
|
+
}
|
|
300
|
+
else if (content && typeof content === "object" && "text" in content[0]) {
|
|
301
|
+
text = content[0].text;
|
|
302
|
+
}
|
|
225
303
|
const toolCallChunks = [];
|
|
226
304
|
if (functionCalls) {
|
|
227
305
|
toolCallChunks.push(...functionCalls.map((fc) => ({
|
|
@@ -234,8 +312,8 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
|
234
312
|
return new outputs_1.ChatGenerationChunk({
|
|
235
313
|
text,
|
|
236
314
|
message: new messages_1.AIMessageChunk({
|
|
237
|
-
content:
|
|
238
|
-
name: !
|
|
315
|
+
content: content || "",
|
|
316
|
+
name: !candidateContent ? undefined : candidateContent.role,
|
|
239
317
|
tool_call_chunks: toolCallChunks,
|
|
240
318
|
// Each chunk can have unique "generationInfo", and merging strategy is unclear,
|
|
241
319
|
// so leave blank for now.
|
package/dist/utils/common.js
CHANGED
|
@@ -66,7 +66,7 @@ export function convertMessageContentToParts(message, isMultimodalModel) {
|
|
|
66
66
|
},
|
|
67
67
|
}));
|
|
68
68
|
}
|
|
69
|
-
else if (message.
|
|
69
|
+
else if (message.getType() === "tool" && message.name && message.content) {
|
|
70
70
|
functionResponses = [
|
|
71
71
|
{
|
|
72
72
|
functionResponse: {
|
|
@@ -83,6 +83,16 @@ export function convertMessageContentToParts(message, isMultimodalModel) {
|
|
|
83
83
|
text: c.text,
|
|
84
84
|
};
|
|
85
85
|
}
|
|
86
|
+
else if (c.type === "executableCode") {
|
|
87
|
+
return {
|
|
88
|
+
executableCode: c.executableCode,
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
else if (c.type === "codeExecutionResult") {
|
|
92
|
+
return {
|
|
93
|
+
codeExecutionResult: c.codeExecutionResult,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
86
96
|
if (c.type === "image_url") {
|
|
87
97
|
if (!isMultimodalModel) {
|
|
88
98
|
throw new Error(`This model does not support images`);
|
|
@@ -185,12 +195,45 @@ export function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
185
195
|
}
|
|
186
196
|
const functionCalls = response.functionCalls();
|
|
187
197
|
const [candidate] = response.candidates;
|
|
188
|
-
const { content, ...generationInfo } = candidate;
|
|
189
|
-
|
|
198
|
+
const { content: candidateContent, ...generationInfo } = candidate;
|
|
199
|
+
let content;
|
|
200
|
+
if (candidateContent?.parts.length === 1 && candidateContent.parts[0].text) {
|
|
201
|
+
content = candidateContent.parts[0].text;
|
|
202
|
+
}
|
|
203
|
+
else {
|
|
204
|
+
content = candidateContent.parts.map((p) => {
|
|
205
|
+
if ("text" in p) {
|
|
206
|
+
return {
|
|
207
|
+
type: "text",
|
|
208
|
+
text: p.text,
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
else if ("executableCode" in p) {
|
|
212
|
+
return {
|
|
213
|
+
type: "executableCode",
|
|
214
|
+
executableCode: p.executableCode,
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
else if ("codeExecutionResult" in p) {
|
|
218
|
+
return {
|
|
219
|
+
type: "codeExecutionResult",
|
|
220
|
+
codeExecutionResult: p.codeExecutionResult,
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
return p;
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
let text = "";
|
|
227
|
+
if (typeof content === "string") {
|
|
228
|
+
text = content;
|
|
229
|
+
}
|
|
230
|
+
else if ("text" in content[0]) {
|
|
231
|
+
text = content[0].text;
|
|
232
|
+
}
|
|
190
233
|
const generation = {
|
|
191
234
|
text,
|
|
192
235
|
message: new AIMessage({
|
|
193
|
-
content
|
|
236
|
+
content,
|
|
194
237
|
tool_calls: functionCalls?.map((fc) => ({
|
|
195
238
|
...fc,
|
|
196
239
|
type: "tool_call",
|
|
@@ -212,8 +255,43 @@ export function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
|
212
255
|
}
|
|
213
256
|
const functionCalls = response.functionCalls();
|
|
214
257
|
const [candidate] = response.candidates;
|
|
215
|
-
const { content, ...generationInfo } = candidate;
|
|
216
|
-
|
|
258
|
+
const { content: candidateContent, ...generationInfo } = candidate;
|
|
259
|
+
let content;
|
|
260
|
+
// Checks if some parts do not have text. If false, it means that the content is a string.
|
|
261
|
+
if (candidateContent?.parts &&
|
|
262
|
+
candidateContent.parts.every((p) => "text" in p)) {
|
|
263
|
+
content = candidateContent.parts.map((p) => p.text).join("");
|
|
264
|
+
}
|
|
265
|
+
else if (candidateContent.parts) {
|
|
266
|
+
content = candidateContent.parts.map((p) => {
|
|
267
|
+
if ("text" in p) {
|
|
268
|
+
return {
|
|
269
|
+
type: "text",
|
|
270
|
+
text: p.text,
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
else if ("executableCode" in p) {
|
|
274
|
+
return {
|
|
275
|
+
type: "executableCode",
|
|
276
|
+
executableCode: p.executableCode,
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
else if ("codeExecutionResult" in p) {
|
|
280
|
+
return {
|
|
281
|
+
type: "codeExecutionResult",
|
|
282
|
+
codeExecutionResult: p.codeExecutionResult,
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
return p;
|
|
286
|
+
});
|
|
287
|
+
}
|
|
288
|
+
let text = "";
|
|
289
|
+
if (content && typeof content === "string") {
|
|
290
|
+
text = content;
|
|
291
|
+
}
|
|
292
|
+
else if (content && typeof content === "object" && "text" in content[0]) {
|
|
293
|
+
text = content[0].text;
|
|
294
|
+
}
|
|
217
295
|
const toolCallChunks = [];
|
|
218
296
|
if (functionCalls) {
|
|
219
297
|
toolCallChunks.push(...functionCalls.map((fc) => ({
|
|
@@ -226,8 +304,8 @@ export function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
|
226
304
|
return new ChatGenerationChunk({
|
|
227
305
|
text,
|
|
228
306
|
message: new AIMessageChunk({
|
|
229
|
-
content:
|
|
230
|
-
name: !
|
|
307
|
+
content: content || "",
|
|
308
|
+
name: !candidateContent ? undefined : candidateContent.role,
|
|
231
309
|
tool_call_chunks: toolCallChunks,
|
|
232
310
|
// Each chunk can have unique "generationInfo", and merging strategy is unclear,
|
|
233
311
|
// so leave blank for now.
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.convertToolsToGenAI = void 0;
|
|
4
|
+
const generative_ai_1 = require("@google/generative-ai");
|
|
5
|
+
const function_calling_1 = require("@langchain/core/utils/function_calling");
|
|
6
|
+
const common_js_1 = require("./common.cjs");
|
|
7
|
+
function convertToolsToGenAI(tools, extra) {
|
|
8
|
+
// Extract function declaration processing to a separate function
|
|
9
|
+
const genAITools = processTools(tools);
|
|
10
|
+
// Simplify tool config creation
|
|
11
|
+
const toolConfig = createToolConfig(genAITools, extra);
|
|
12
|
+
return { tools: genAITools, toolConfig };
|
|
13
|
+
}
|
|
14
|
+
exports.convertToolsToGenAI = convertToolsToGenAI;
|
|
15
|
+
function processTools(tools) {
|
|
16
|
+
let functionDeclarationTools = [];
|
|
17
|
+
const genAITools = [];
|
|
18
|
+
tools.forEach((tool) => {
|
|
19
|
+
if ((0, function_calling_1.isLangChainTool)(tool)) {
|
|
20
|
+
const [convertedTool] = (0, common_js_1.convertToGenerativeAITools)([
|
|
21
|
+
tool,
|
|
22
|
+
]);
|
|
23
|
+
if (convertedTool.functionDeclarations) {
|
|
24
|
+
functionDeclarationTools.push(...convertedTool.functionDeclarations);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
else {
|
|
28
|
+
genAITools.push(tool);
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
const genAIFunctionDeclaration = genAITools.find((t) => "functionDeclarations" in t);
|
|
32
|
+
if (genAIFunctionDeclaration) {
|
|
33
|
+
return genAITools.map((tool) => {
|
|
34
|
+
if (functionDeclarationTools?.length > 0 &&
|
|
35
|
+
"functionDeclarations" in tool) {
|
|
36
|
+
const newTool = {
|
|
37
|
+
functionDeclarations: [
|
|
38
|
+
...(tool.functionDeclarations || []),
|
|
39
|
+
...functionDeclarationTools,
|
|
40
|
+
],
|
|
41
|
+
};
|
|
42
|
+
// Clear the functionDeclarationTools array so it is not passed again
|
|
43
|
+
functionDeclarationTools = [];
|
|
44
|
+
return newTool;
|
|
45
|
+
}
|
|
46
|
+
return tool;
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
return [
|
|
50
|
+
...genAITools,
|
|
51
|
+
...(functionDeclarationTools.length > 0
|
|
52
|
+
? [
|
|
53
|
+
{
|
|
54
|
+
functionDeclarations: functionDeclarationTools,
|
|
55
|
+
},
|
|
56
|
+
]
|
|
57
|
+
: []),
|
|
58
|
+
];
|
|
59
|
+
}
|
|
60
|
+
function createToolConfig(genAITools, extra) {
|
|
61
|
+
if (!genAITools.length || !extra)
|
|
62
|
+
return undefined;
|
|
63
|
+
const { toolChoice, allowedFunctionNames } = extra;
|
|
64
|
+
const modeMap = {
|
|
65
|
+
any: generative_ai_1.FunctionCallingMode.ANY,
|
|
66
|
+
auto: generative_ai_1.FunctionCallingMode.AUTO,
|
|
67
|
+
none: generative_ai_1.FunctionCallingMode.NONE,
|
|
68
|
+
};
|
|
69
|
+
if (toolChoice && ["any", "auto", "none"].includes(toolChoice)) {
|
|
70
|
+
return {
|
|
71
|
+
functionCallingConfig: {
|
|
72
|
+
mode: modeMap[toolChoice] ?? "MODE_UNSPECIFIED",
|
|
73
|
+
allowedFunctionNames,
|
|
74
|
+
},
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
if (typeof toolChoice === "string" || allowedFunctionNames) {
|
|
78
|
+
return {
|
|
79
|
+
functionCallingConfig: {
|
|
80
|
+
mode: generative_ai_1.FunctionCallingMode.ANY,
|
|
81
|
+
allowedFunctionNames: [
|
|
82
|
+
...(allowedFunctionNames ?? []),
|
|
83
|
+
...(toolChoice && typeof toolChoice === "string" ? [toolChoice] : []),
|
|
84
|
+
],
|
|
85
|
+
},
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
return undefined;
|
|
89
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Tool as GenerativeAITool, ToolConfig } from "@google/generative-ai";
|
|
2
|
+
import { ToolChoice } from "@langchain/core/language_models/chat_models";
|
|
3
|
+
import { GoogleGenerativeAIToolType } from "../types.js";
|
|
4
|
+
export declare function convertToolsToGenAI(tools: GoogleGenerativeAIToolType[], extra?: {
|
|
5
|
+
toolChoice?: ToolChoice;
|
|
6
|
+
allowedFunctionNames?: string[];
|
|
7
|
+
}): {
|
|
8
|
+
tools: GenerativeAITool[];
|
|
9
|
+
toolConfig?: ToolConfig;
|
|
10
|
+
};
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { FunctionCallingMode, } from "@google/generative-ai";
|
|
2
|
+
import { isLangChainTool } from "@langchain/core/utils/function_calling";
|
|
3
|
+
import { convertToGenerativeAITools } from "./common.js";
|
|
4
|
+
export function convertToolsToGenAI(tools, extra) {
|
|
5
|
+
// Extract function declaration processing to a separate function
|
|
6
|
+
const genAITools = processTools(tools);
|
|
7
|
+
// Simplify tool config creation
|
|
8
|
+
const toolConfig = createToolConfig(genAITools, extra);
|
|
9
|
+
return { tools: genAITools, toolConfig };
|
|
10
|
+
}
|
|
11
|
+
function processTools(tools) {
|
|
12
|
+
let functionDeclarationTools = [];
|
|
13
|
+
const genAITools = [];
|
|
14
|
+
tools.forEach((tool) => {
|
|
15
|
+
if (isLangChainTool(tool)) {
|
|
16
|
+
const [convertedTool] = convertToGenerativeAITools([
|
|
17
|
+
tool,
|
|
18
|
+
]);
|
|
19
|
+
if (convertedTool.functionDeclarations) {
|
|
20
|
+
functionDeclarationTools.push(...convertedTool.functionDeclarations);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
genAITools.push(tool);
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
const genAIFunctionDeclaration = genAITools.find((t) => "functionDeclarations" in t);
|
|
28
|
+
if (genAIFunctionDeclaration) {
|
|
29
|
+
return genAITools.map((tool) => {
|
|
30
|
+
if (functionDeclarationTools?.length > 0 &&
|
|
31
|
+
"functionDeclarations" in tool) {
|
|
32
|
+
const newTool = {
|
|
33
|
+
functionDeclarations: [
|
|
34
|
+
...(tool.functionDeclarations || []),
|
|
35
|
+
...functionDeclarationTools,
|
|
36
|
+
],
|
|
37
|
+
};
|
|
38
|
+
// Clear the functionDeclarationTools array so it is not passed again
|
|
39
|
+
functionDeclarationTools = [];
|
|
40
|
+
return newTool;
|
|
41
|
+
}
|
|
42
|
+
return tool;
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
return [
|
|
46
|
+
...genAITools,
|
|
47
|
+
...(functionDeclarationTools.length > 0
|
|
48
|
+
? [
|
|
49
|
+
{
|
|
50
|
+
functionDeclarations: functionDeclarationTools,
|
|
51
|
+
},
|
|
52
|
+
]
|
|
53
|
+
: []),
|
|
54
|
+
];
|
|
55
|
+
}
|
|
56
|
+
function createToolConfig(genAITools, extra) {
|
|
57
|
+
if (!genAITools.length || !extra)
|
|
58
|
+
return undefined;
|
|
59
|
+
const { toolChoice, allowedFunctionNames } = extra;
|
|
60
|
+
const modeMap = {
|
|
61
|
+
any: FunctionCallingMode.ANY,
|
|
62
|
+
auto: FunctionCallingMode.AUTO,
|
|
63
|
+
none: FunctionCallingMode.NONE,
|
|
64
|
+
};
|
|
65
|
+
if (toolChoice && ["any", "auto", "none"].includes(toolChoice)) {
|
|
66
|
+
return {
|
|
67
|
+
functionCallingConfig: {
|
|
68
|
+
mode: modeMap[toolChoice] ?? "MODE_UNSPECIFIED",
|
|
69
|
+
allowedFunctionNames,
|
|
70
|
+
},
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
if (typeof toolChoice === "string" || allowedFunctionNames) {
|
|
74
|
+
return {
|
|
75
|
+
functionCallingConfig: {
|
|
76
|
+
mode: FunctionCallingMode.ANY,
|
|
77
|
+
allowedFunctionNames: [
|
|
78
|
+
...(allowedFunctionNames ?? []),
|
|
79
|
+
...(toolChoice && typeof toolChoice === "string" ? [toolChoice] : []),
|
|
80
|
+
],
|
|
81
|
+
},
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
return undefined;
|
|
85
|
+
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { z } from "zod";
|
|
2
|
-
import { type FunctionDeclarationSchema as GenerativeAIFunctionDeclarationSchema, FunctionDeclarationSchemaType } from "@google/generative-ai";
|
|
2
|
+
import { type FunctionDeclarationSchema as GenerativeAIFunctionDeclarationSchema, type SchemaType as FunctionDeclarationSchemaType } from "@google/generative-ai";
|
|
3
3
|
export interface GenerativeAIJsonSchema extends Record<string, unknown> {
|
|
4
4
|
properties?: Record<string, GenerativeAIJsonSchema>;
|
|
5
5
|
type: FunctionDeclarationSchemaType;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/google-genai",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.3-rc.0",
|
|
4
4
|
"description": "Google Generative AI integration for LangChain.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -35,11 +35,11 @@
|
|
|
35
35
|
"author": "LangChain",
|
|
36
36
|
"license": "MIT",
|
|
37
37
|
"dependencies": {
|
|
38
|
-
"@google/generative-ai": "^0.
|
|
38
|
+
"@google/generative-ai": "^0.21.0",
|
|
39
39
|
"zod-to-json-schema": "^3.22.4"
|
|
40
40
|
},
|
|
41
41
|
"peerDependencies": {
|
|
42
|
-
"@langchain/core": ">=0.
|
|
42
|
+
"@langchain/core": ">=0.3.17 <0.4.0"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@jest/globals": "^29.5.0",
|