@langchain/google-genai 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/chat_models.cjs +24 -3
- package/dist/chat_models.cjs.map +1 -1
- package/dist/chat_models.d.cts +19 -0
- package/dist/chat_models.d.cts.map +1 -1
- package/dist/chat_models.d.ts +19 -0
- package/dist/chat_models.d.ts.map +1 -1
- package/dist/chat_models.js +24 -3
- package/dist/chat_models.js.map +1 -1
- package/dist/profiles.cjs +345 -0
- package/dist/profiles.cjs.map +1 -0
- package/dist/profiles.js +344 -0
- package/dist/profiles.js.map +1 -0
- package/dist/utils/common.cjs +57 -21
- package/dist/utils/common.cjs.map +1 -1
- package/dist/utils/common.js +57 -21
- package/dist/utils/common.js.map +1 -1
- package/package.json +9 -6
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"profiles.js","names":["PROFILES: Record<string, ModelProfile>"],"sources":["../src/profiles.ts"],"sourcesContent":["/**\n * This file was automatically generated by an automated script. Do not edit manually.\n */\nimport type { ModelProfile } from \"@langchain/core/language_models/profile\";\nconst PROFILES: Record<string, ModelProfile> = {\n \"gemini-embedding-001\": {\n maxInputTokens: 2048,\n imageInputs: false,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 3072,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-image\": {\n maxInputTokens: 32768,\n imageInputs: true,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 32768,\n reasoningOutput: true,\n imageOutputs: true,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-05-20\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-flash-lite-latest\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-2.5-flash\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-flash-latest\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-2.5-pro-preview-05-06\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-2.5-flash-preview-tts\": {\n maxInputTokens: 8000,\n imageInputs: false,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 16000,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: true,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.0-flash-lite\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-live-2.5-flash-preview-native-audio\": {\n maxInputTokens: 131072,\n imageInputs: false,\n audioInputs: true,\n pdfInputs: false,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: true,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.0-flash\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-2.5-flash-lite\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-2.5-pro-preview-06-05\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-live-2.5-flash\": {\n maxInputTokens: 128000,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: false,\n videoInputs: true,\n maxOutputTokens: 8000,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: true,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite-preview-06-17\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-image-preview\": {\n maxInputTokens: 32768,\n imageInputs: true,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 32768,\n reasoningOutput: true,\n imageOutputs: true,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-preview-09-2025\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-2.5-flash-preview-04-17\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-pro-preview-tts\": {\n maxInputTokens: 8000,\n imageInputs: false,\n audioInputs: false,\n pdfInputs: false,\n videoInputs: false,\n maxOutputTokens: 16000,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: true,\n videoOutputs: false,\n toolCalling: false,\n structuredOutput: false,\n },\n \"gemini-2.5-pro\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-1.5-flash\": {\n maxInputTokens: 1000000,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: false,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-1.5-flash-8b\": {\n maxInputTokens: 1000000,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: false,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n \"gemini-2.5-flash-lite-preview-09-2025\": {\n maxInputTokens: 1048576,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: true,\n videoInputs: true,\n maxOutputTokens: 65536,\n reasoningOutput: true,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: true,\n },\n \"gemini-1.5-pro\": {\n maxInputTokens: 1000000,\n imageInputs: true,\n audioInputs: true,\n pdfInputs: false,\n videoInputs: true,\n maxOutputTokens: 8192,\n reasoningOutput: false,\n imageOutputs: false,\n audioOutputs: false,\n videoOutputs: false,\n toolCalling: true,\n structuredOutput: false,\n },\n};\nexport default PROFILES;\n"],"mappings":";AAIA,MAAMA,WAAyC;CAC7C,wBAAwB;EACtB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,0BAA0B;EACxB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,4BAA4B;EAC1B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uBAAuB;EACrB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,8CAA8C;EAC5C,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,gCAAgC;EAC9B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yBAAyB;EACvB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uCAAuC;EACrC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oCAAoC;EAClC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kCAAkC;EAChC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,8BAA8B;EAC5B,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kBAAkB;EAChB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,oBAAoB;EAClB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,uBAAuB;EACrB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,yCAAyC;EACvC,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;CACD,kBAAkB;EAChB,gBAAgB;EAChB,aAAa;EACb,aAAa;EACb,WAAW;EACX,aAAa;EACb,iBAAiB;EACjB,iBAAiB;EACjB,cAAc;EACd,cAAc;EACd,cAAc;EACd,aAAa;EACb,kBAAkB;CACnB;AACF;AACD,uBAAe"}
|
package/dist/utils/common.cjs
CHANGED
|
@@ -7,6 +7,9 @@ const __langchain_core_language_models_base = require_rolldown_runtime.__toESM(r
|
|
|
7
7
|
const uuid = require_rolldown_runtime.__toESM(require("uuid"));
|
|
8
8
|
|
|
9
9
|
//#region src/utils/common.ts
|
|
10
|
+
const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = "__gemini_function_call_thought_signatures__";
|
|
11
|
+
const DUMMY_SIGNATURE = "ErYCCrMCAdHtim9kOoOkrPiCNVsmlpMIKd7ZMxgiFbVQOkgp7nlLcDMzVsZwIzvuT7nQROivoXA72ccC2lSDvR0Gh7dkWaGuj7ctv6t7ZceHnecx0QYa+ix8tYpRfjhyWozQ49lWiws6+YGjCt10KRTyWsZ2h6O7iHTYJwKIRwGUHRKy/qK/6kFxJm5ML00gLq4D8s5Z6DBpp2ZlR+uF4G8jJgeWQgyHWVdx2wGYElaceVAc66tZdPQRdOHpWtgYSI1YdaXgVI8KHY3/EfNc2YqqMIulvkDBAnuMhkAjV9xmBa54Tq+ih3Im4+r3DzqhGqYdsSkhS0kZMwte4Hjs65dZzCw9lANxIqYi1DJ639WNPYihp/DCJCos7o+/EeSPJaio5sgWDyUnMGkY1atsJZ+m7pj7DD5tvQ==";
|
|
12
|
+
const iife = (fn) => fn();
|
|
10
13
|
function getMessageAuthor(message) {
|
|
11
14
|
const type = message._getType();
|
|
12
15
|
if (__langchain_core_messages.ChatMessage.isInstance(message)) return message.role;
|
|
@@ -149,7 +152,7 @@ function _convertLangChainContentToPart(content, isMultimodalModel) {
|
|
|
149
152
|
else if ("type" in content) throw new Error(`Unknown content type ${content.type}`);
|
|
150
153
|
else throw new Error(`Unknown content ${JSON.stringify(content)}`);
|
|
151
154
|
}
|
|
152
|
-
function convertMessageContentToParts(message, isMultimodalModel, previousMessages) {
|
|
155
|
+
function convertMessageContentToParts(message, isMultimodalModel, previousMessages, model) {
|
|
153
156
|
if ((0, __langchain_core_messages.isToolMessage)(message)) {
|
|
154
157
|
const messageName = message.name ?? inferToolNameFromPreviousMessages(message, previousMessages);
|
|
155
158
|
if (messageName === void 0) throw new Error(`Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage "${message.id}" from your passed messages. Please populate a "name" field on that ToolMessage explicitly.`);
|
|
@@ -167,15 +170,27 @@ function convertMessageContentToParts(message, isMultimodalModel, previousMessag
|
|
|
167
170
|
const messageParts = [];
|
|
168
171
|
if (typeof message.content === "string" && message.content) messageParts.push({ text: message.content });
|
|
169
172
|
if (Array.isArray(message.content)) messageParts.push(...message.content.map((c) => _convertLangChainContentToPart(c, isMultimodalModel)).filter((p) => p !== void 0));
|
|
173
|
+
const functionThoughtSignatures = message.additional_kwargs?.[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY];
|
|
170
174
|
if ((0, __langchain_core_messages.isAIMessage)(message) && message.tool_calls?.length) functionCalls = message.tool_calls.map((tc) => {
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
+
const thoughtSignature = iife(() => {
|
|
176
|
+
if (tc.id) {
|
|
177
|
+
const signature = functionThoughtSignatures?.[tc.id];
|
|
178
|
+
if (signature) return signature;
|
|
179
|
+
}
|
|
180
|
+
if (model?.includes("gemini-3")) return DUMMY_SIGNATURE;
|
|
181
|
+
return "";
|
|
182
|
+
});
|
|
183
|
+
return {
|
|
184
|
+
functionCall: {
|
|
185
|
+
name: tc.name,
|
|
186
|
+
args: tc.args
|
|
187
|
+
},
|
|
188
|
+
...thoughtSignature ? { thoughtSignature } : {}
|
|
189
|
+
};
|
|
175
190
|
});
|
|
176
191
|
return [...messageParts, ...functionCalls];
|
|
177
192
|
}
|
|
178
|
-
function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false) {
|
|
193
|
+
function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false, model) {
|
|
179
194
|
return messages.reduce((acc, message, index) => {
|
|
180
195
|
if (!(0, __langchain_core_messages.isBaseMessage)(message)) throw new Error("Unsupported message input");
|
|
181
196
|
const author = getMessageAuthor(message);
|
|
@@ -183,7 +198,7 @@ function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystem
|
|
|
183
198
|
const role = convertAuthorToRole(author);
|
|
184
199
|
const prevContent = acc.content[acc.content.length];
|
|
185
200
|
if (!acc.mergeWithPreviousContent && prevContent && prevContent.role === role) throw new Error("Google Generative AI requires alternate messages between authors");
|
|
186
|
-
const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index));
|
|
201
|
+
const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index), model);
|
|
187
202
|
if (acc.mergeWithPreviousContent) {
|
|
188
203
|
const prevContent$1 = acc.content[acc.content.length - 1];
|
|
189
204
|
if (!prevContent$1) throw new Error("There was a problem parsing your system message. Please try a prompt without one.");
|
|
@@ -213,9 +228,15 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
213
228
|
generations: [],
|
|
214
229
|
llmOutput: { filters: response.promptFeedback }
|
|
215
230
|
};
|
|
216
|
-
const functionCalls = response.functionCalls();
|
|
217
231
|
const [candidate] = response.candidates;
|
|
218
232
|
const { content: candidateContent,...generationInfo } = candidate;
|
|
233
|
+
const functionCalls = candidateContent.parts.reduce((acc, p) => {
|
|
234
|
+
if ("functionCall" in p && p.functionCall) acc.push({
|
|
235
|
+
...p,
|
|
236
|
+
id: "id" in p.functionCall && typeof p.functionCall.id === "string" ? p.functionCall.id : (0, uuid.v4)()
|
|
237
|
+
});
|
|
238
|
+
return acc;
|
|
239
|
+
}, []);
|
|
219
240
|
let content;
|
|
220
241
|
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) content = candidateContent.parts[0].text;
|
|
221
242
|
else if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length > 0) content = candidateContent.parts.map((p) => {
|
|
@@ -250,6 +271,10 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
250
271
|
return p;
|
|
251
272
|
});
|
|
252
273
|
else content = [];
|
|
274
|
+
const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {
|
|
275
|
+
if ("thoughtSignature" in fc && typeof fc.thoughtSignature === "string") acc[fc.id] = fc.thoughtSignature;
|
|
276
|
+
return acc;
|
|
277
|
+
}, {});
|
|
253
278
|
let text = "";
|
|
254
279
|
if (typeof content === "string") text = content;
|
|
255
280
|
else if (Array.isArray(content) && content.length > 0) {
|
|
@@ -260,14 +285,16 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
260
285
|
text,
|
|
261
286
|
message: new __langchain_core_messages.AIMessage({
|
|
262
287
|
content: content ?? "",
|
|
263
|
-
tool_calls: functionCalls?.map((fc) => {
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
288
|
+
tool_calls: functionCalls?.map((fc) => ({
|
|
289
|
+
type: "tool_call",
|
|
290
|
+
id: fc.id,
|
|
291
|
+
name: fc.functionCall.name,
|
|
292
|
+
args: fc.functionCall.args
|
|
293
|
+
})),
|
|
294
|
+
additional_kwargs: {
|
|
295
|
+
...generationInfo,
|
|
296
|
+
[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures
|
|
297
|
+
},
|
|
271
298
|
usage_metadata: extra?.usageMetadata
|
|
272
299
|
}),
|
|
273
300
|
generationInfo
|
|
@@ -283,9 +310,15 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
283
310
|
}
|
|
284
311
|
function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
285
312
|
if (!response.candidates || response.candidates.length === 0) return null;
|
|
286
|
-
const functionCalls = response.functionCalls();
|
|
287
313
|
const [candidate] = response.candidates;
|
|
288
314
|
const { content: candidateContent,...generationInfo } = candidate;
|
|
315
|
+
const functionCalls = candidateContent.parts.reduce((acc, p) => {
|
|
316
|
+
if ("functionCall" in p && p.functionCall) acc.push({
|
|
317
|
+
...p,
|
|
318
|
+
id: "id" in p.functionCall && typeof p.functionCall.id === "string" ? p.functionCall.id : (0, uuid.v4)()
|
|
319
|
+
});
|
|
320
|
+
return acc;
|
|
321
|
+
}, []);
|
|
289
322
|
let content;
|
|
290
323
|
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => "text" in p)) content = candidateContent.parts.map((p) => p.text).join("");
|
|
291
324
|
else if (Array.isArray(candidateContent?.parts)) content = candidateContent.parts.map((p) => {
|
|
@@ -329,18 +362,21 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
|
329
362
|
const toolCallChunks = [];
|
|
330
363
|
if (functionCalls) toolCallChunks.push(...functionCalls.map((fc) => ({
|
|
331
364
|
...fc,
|
|
332
|
-
args: JSON.stringify(fc.args),
|
|
365
|
+
args: JSON.stringify(fc.functionCall.args),
|
|
333
366
|
index: extra.index,
|
|
334
|
-
type: "tool_call_chunk"
|
|
335
|
-
id: "id" in fc && typeof fc.id === "string" ? fc.id : (0, uuid.v4)()
|
|
367
|
+
type: "tool_call_chunk"
|
|
336
368
|
})));
|
|
369
|
+
const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {
|
|
370
|
+
if ("thoughtSignature" in fc && typeof fc.thoughtSignature === "string") acc[fc.id] = fc.thoughtSignature;
|
|
371
|
+
return acc;
|
|
372
|
+
}, {});
|
|
337
373
|
return new __langchain_core_outputs.ChatGenerationChunk({
|
|
338
374
|
text,
|
|
339
375
|
message: new __langchain_core_messages.AIMessageChunk({
|
|
340
376
|
content: content || "",
|
|
341
377
|
name: !candidateContent ? void 0 : candidateContent.role,
|
|
342
378
|
tool_call_chunks: toolCallChunks,
|
|
343
|
-
additional_kwargs: {},
|
|
379
|
+
additional_kwargs: { [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures },
|
|
344
380
|
response_metadata: { model_provider: "google-genai" },
|
|
345
381
|
usage_metadata: extra.usageMetadata
|
|
346
382
|
}),
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"common.cjs","names":["message: BaseMessage","ChatMessage","author: string","content: MessageContentComplex","message: ToolMessage | ToolMessageChunk","previousMessages: BaseMessage[]","isMultimodalModel: boolean","standardContentBlockConverter: StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n }>","functionCalls: FunctionCallPart[]","messageParts: Part[]","messages: BaseMessage[]","convertSystemMessageToHumanContent: boolean","prevContent","content: Content","response: EnhancedGenerateContentResponse","extra?: {\n usageMetadata: UsageMetadata | undefined;\n }","content: MessageContent | undefined","generation: ChatGeneration","AIMessage","extra: {\n usageMetadata?: UsageMetadata | undefined;\n index: number;\n }","toolCallChunks: ToolCallChunk[]","ChatGenerationChunk","AIMessageChunk","tools: GoogleGenerativeAIToolType[]","schemaToGenerativeAIParameters","jsonSchemaToGeminiParameters"],"sources":["../../src/utils/common.ts"],"sourcesContent":["import {\n EnhancedGenerateContentResponse,\n Content,\n Part,\n type FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool,\n type FunctionDeclaration as GenerativeAIFunctionDeclaration,\n POSSIBLE_ROLES,\n FunctionCallPart,\n TextPart,\n FileDataPart,\n InlineDataPart,\n} from \"@google/generative-ai\";\nimport {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n ChatMessage,\n ToolMessage,\n ToolMessageChunk,\n MessageContent,\n MessageContentComplex,\n UsageMetadata,\n isAIMessage,\n isBaseMessage,\n isToolMessage,\n StandardContentBlockConverter,\n parseBase64DataUrl,\n convertToProviderContentBlock,\n isDataContentBlock,\n} from \"@langchain/core/messages\";\nimport {\n ChatGeneration,\n ChatGenerationChunk,\n ChatResult,\n} from \"@langchain/core/outputs\";\nimport { isLangChainTool } from \"@langchain/core/utils/function_calling\";\nimport { isOpenAITool } from \"@langchain/core/language_models/base\";\nimport { ToolCallChunk } from \"@langchain/core/messages/tool\";\nimport { v4 as uuidv4 } from \"uuid\";\nimport {\n jsonSchemaToGeminiParameters,\n schemaToGenerativeAIParameters,\n} from \"./zod_to_genai_parameters.js\";\nimport { GoogleGenerativeAIToolType } from \"../types.js\";\n\nexport function getMessageAuthor(message: BaseMessage) {\n const type = message._getType();\n if (ChatMessage.isInstance(message)) {\n return message.role;\n }\n if (type === \"tool\") {\n return type;\n }\n return message.name ?? type;\n}\n\n/**\n * Maps a message type to a Google Generative AI chat author.\n * @param message The message to map.\n * @param model The model to use for mapping.\n * @returns The message type mapped to a Google Generative AI chat author.\n */\nexport function convertAuthorToRole(\n author: string\n): (typeof POSSIBLE_ROLES)[number] {\n switch (author) {\n /**\n * Note: Gemini currently is not supporting system messages\n * we will convert them to human messages and merge with following\n * */\n case \"supervisor\":\n case \"ai\":\n case \"model\": // getMessageAuthor returns message.name. code ex.: return message.name ?? type;\n return \"model\";\n case \"system\":\n return \"system\";\n case \"human\":\n return \"user\";\n case \"tool\":\n case \"function\":\n return \"function\";\n default:\n throw new Error(`Unknown / unsupported author: ${author}`);\n }\n}\n\nfunction messageContentMedia(content: MessageContentComplex): Part {\n if (\"mimeType\" in content && \"data\" in content) {\n return {\n inlineData: {\n mimeType: content.mimeType,\n data: content.data,\n },\n };\n }\n if (\"mimeType\" in content && \"fileUri\" in content) {\n return {\n fileData: {\n mimeType: content.mimeType,\n fileUri: content.fileUri,\n },\n };\n }\n\n throw new Error(\"Invalid media content\");\n}\n\nfunction inferToolNameFromPreviousMessages(\n message: ToolMessage | ToolMessageChunk,\n previousMessages: BaseMessage[]\n): string | undefined {\n return previousMessages\n .map((msg) => {\n if (isAIMessage(msg)) {\n return msg.tool_calls ?? [];\n }\n return [];\n })\n .flat()\n .find((toolCall) => {\n return toolCall.id === message.tool_call_id;\n })?.name;\n}\n\nfunction _getStandardContentBlockConverter(isMultimodalModel: boolean) {\n const standardContentBlockConverter: StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n }> = {\n providerName: \"Google Gemini\",\n\n fromStandardTextBlock(block) {\n return {\n text: block.text,\n };\n },\n\n fromStandardImageBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error(\"This model does not support images\");\n }\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? \"\",\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === \"base64\") {\n return {\n inlineData: {\n mimeType: block.mime_type ?? \"\",\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardAudioBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error(\"This model does not support audio\");\n }\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? \"\",\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === \"base64\") {\n return {\n inlineData: {\n mimeType: block.mime_type ?? \"\",\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardFileBlock(block): FileDataPart | InlineDataPart | TextPart {\n if (!isMultimodalModel) {\n throw new Error(\"This model does not support files\");\n }\n if (block.source_type === \"text\") {\n return {\n text: block.text,\n };\n }\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? \"\",\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === \"base64\") {\n return {\n inlineData: {\n mimeType: block.mime_type ?? \"\",\n data: block.data,\n },\n };\n }\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n };\n return standardContentBlockConverter;\n}\n\nfunction _convertLangChainContentToPart(\n content: MessageContentComplex,\n isMultimodalModel: boolean\n): Part | undefined {\n if (isDataContentBlock(content)) {\n return convertToProviderContentBlock(\n content,\n _getStandardContentBlockConverter(isMultimodalModel)\n );\n }\n\n if (content.type === \"text\") {\n return { text: content.text };\n } else if (content.type === \"executableCode\") {\n return { executableCode: content.executableCode };\n } else if (content.type === \"codeExecutionResult\") {\n return { codeExecutionResult: content.codeExecutionResult };\n } else if (content.type === \"image_url\") {\n if (!isMultimodalModel) {\n throw new Error(`This model does not support images`);\n }\n let source;\n if (typeof content.image_url === \"string\") {\n source = content.image_url;\n } else if (\n typeof content.image_url === \"object\" &&\n \"url\" in content.image_url\n ) {\n source = content.image_url.url;\n } else {\n throw new Error(\"Please provide image as base64 encoded data URL\");\n }\n const [dm, data] = source.split(\",\");\n if (!dm.startsWith(\"data:\")) {\n throw new Error(\"Please provide image as base64 encoded data URL\");\n }\n\n const [mimeType, encoding] = dm.replace(/^data:/, \"\").split(\";\");\n if (encoding !== \"base64\") {\n throw new Error(\"Please provide image as base64 encoded data URL\");\n }\n\n return {\n inlineData: {\n data,\n mimeType,\n },\n };\n } else if (content.type === \"media\") {\n return messageContentMedia(content);\n } else if (content.type === \"tool_use\") {\n return {\n functionCall: {\n name: content.name,\n args: content.input,\n },\n };\n } else if (\n content.type?.includes(\"/\") &&\n // Ensure it's a single slash.\n content.type.split(\"/\").length === 2 &&\n \"data\" in content &&\n typeof content.data === \"string\"\n ) {\n return {\n inlineData: {\n mimeType: content.type,\n data: content.data,\n },\n };\n } else if (\"functionCall\" in content) {\n // No action needed here — function calls will be added later from message.tool_calls\n return undefined;\n } else {\n if (\"type\" in content) {\n throw new Error(`Unknown content type ${content.type}`);\n } else {\n throw new Error(`Unknown content ${JSON.stringify(content)}`);\n }\n }\n}\n\nexport function convertMessageContentToParts(\n message: BaseMessage,\n isMultimodalModel: boolean,\n previousMessages: BaseMessage[]\n): Part[] {\n if (isToolMessage(message)) {\n const messageName =\n message.name ??\n inferToolNameFromPreviousMessages(message, previousMessages);\n if (messageName === undefined) {\n throw new Error(\n `Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage \"${message.id}\" from your passed messages. Please populate a \"name\" field on that ToolMessage explicitly.`\n );\n }\n\n const result = Array.isArray(message.content)\n ? (message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n : message.content;\n\n if (message.status === \"error\") {\n return [\n {\n functionResponse: {\n name: messageName,\n // The API expects an object with an `error` field if the function call fails.\n // `error` must be a valid object (not a string or array), so we wrap `message.content` here\n response: { error: { details: result } },\n },\n },\n ];\n }\n\n return [\n {\n functionResponse: {\n name: messageName,\n // again, can't have a string or array value for `response`, so we wrap it as an object here\n response: { result },\n },\n },\n ];\n }\n\n let functionCalls: FunctionCallPart[] = [];\n const messageParts: Part[] = [];\n\n if (typeof message.content === \"string\" && message.content) {\n messageParts.push({ text: message.content });\n }\n\n if (Array.isArray(message.content)) {\n messageParts.push(\n ...(message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n );\n }\n\n if (isAIMessage(message) && message.tool_calls?.length) {\n functionCalls = message.tool_calls.map((tc) => {\n return {\n functionCall: {\n name: tc.name,\n args: tc.args,\n },\n };\n });\n }\n\n return [...messageParts, ...functionCalls];\n}\n\nexport function convertBaseMessagesToContent(\n messages: BaseMessage[],\n isMultimodalModel: boolean,\n convertSystemMessageToHumanContent: boolean = false\n) {\n return messages.reduce<{\n content: Content[];\n mergeWithPreviousContent: boolean;\n }>(\n (acc, message, index) => {\n if (!isBaseMessage(message)) {\n throw new Error(\"Unsupported message input\");\n }\n const author = getMessageAuthor(message);\n if (author === \"system\" && index !== 0) {\n throw new Error(\"System message should be the first one\");\n }\n const role = convertAuthorToRole(author);\n\n const prevContent = acc.content[acc.content.length];\n if (\n !acc.mergeWithPreviousContent &&\n prevContent &&\n prevContent.role === role\n ) {\n throw new Error(\n \"Google Generative AI requires alternate messages between authors\"\n );\n }\n\n const parts = convertMessageContentToParts(\n message,\n isMultimodalModel,\n messages.slice(0, index)\n );\n\n if (acc.mergeWithPreviousContent) {\n const prevContent = acc.content[acc.content.length - 1];\n if (!prevContent) {\n throw new Error(\n \"There was a problem parsing your system message. Please try a prompt without one.\"\n );\n }\n prevContent.parts.push(...parts);\n\n return {\n mergeWithPreviousContent: false,\n content: acc.content,\n };\n }\n let actualRole = role;\n if (\n actualRole === \"function\" ||\n (actualRole === \"system\" && !convertSystemMessageToHumanContent)\n ) {\n // GenerativeAI API will throw an error if the role is not \"user\" or \"model.\"\n actualRole = \"user\";\n }\n const content: Content = {\n role: actualRole,\n parts,\n };\n return {\n mergeWithPreviousContent:\n author === \"system\" && !convertSystemMessageToHumanContent,\n content: [...acc.content, content],\n };\n },\n { content: [], mergeWithPreviousContent: false }\n ).content;\n}\n\nexport function mapGenerateContentResultToChatResult(\n response: EnhancedGenerateContentResponse,\n extra?: {\n usageMetadata: UsageMetadata | undefined;\n }\n): ChatResult {\n // if rejected or error, return empty generations with reason in filters\n if (\n !response.candidates ||\n response.candidates.length === 0 ||\n !response.candidates[0]\n ) {\n return {\n generations: [],\n llmOutput: {\n filters: response.promptFeedback,\n },\n };\n }\n\n const functionCalls = response.functionCalls();\n const [candidate] = response.candidates;\n const { content: candidateContent, ...generationInfo } = candidate;\n let content: MessageContent | undefined;\n\n if (\n Array.isArray(candidateContent?.parts) &&\n candidateContent.parts.length === 1 &&\n candidateContent.parts[0].text\n ) {\n content = candidateContent.parts[0].text;\n } else if (\n Array.isArray(candidateContent?.parts) &&\n candidateContent.parts.length > 0\n ) {\n content = candidateContent.parts.map((p) => {\n if (\"text\" in p) {\n return {\n type: \"text\",\n text: p.text,\n };\n } else if (\"inlineData\" in p) {\n return {\n type: \"inlineData\",\n inlineData: p.inlineData,\n };\n } else if (\"functionCall\" in p) {\n return {\n type: \"functionCall\",\n functionCall: p.functionCall,\n };\n } else if (\"functionResponse\" in p) {\n return {\n type: \"functionResponse\",\n functionResponse: p.functionResponse,\n };\n } else if (\"fileData\" in p) {\n return {\n type: \"fileData\",\n fileData: p.fileData,\n };\n } else if (\"executableCode\" in p) {\n return {\n type: \"executableCode\",\n executableCode: p.executableCode,\n };\n } else if (\"codeExecutionResult\" in p) {\n return {\n type: \"codeExecutionResult\",\n codeExecutionResult: p.codeExecutionResult,\n };\n }\n return p;\n });\n } else {\n // no content returned - likely due to abnormal stop reason, e.g. malformed function call\n content = [];\n }\n\n let text = \"\";\n if (typeof content === \"string\") {\n text = content;\n } else if (Array.isArray(content) && content.length > 0) {\n const block = content.find((b) => \"text\" in b) as\n | { text: string }\n | undefined;\n text = block?.text ?? text;\n }\n\n const generation: ChatGeneration = {\n text,\n message: new AIMessage({\n content: content ?? \"\",\n tool_calls: functionCalls?.map((fc) => {\n return {\n ...fc,\n type: \"tool_call\",\n id: \"id\" in fc && typeof fc.id === \"string\" ? fc.id : uuidv4(),\n };\n }),\n additional_kwargs: {\n ...generationInfo,\n },\n usage_metadata: extra?.usageMetadata,\n }),\n generationInfo,\n };\n\n return {\n generations: [generation],\n llmOutput: {\n tokenUsage: {\n promptTokens: extra?.usageMetadata?.input_tokens,\n completionTokens: extra?.usageMetadata?.output_tokens,\n totalTokens: extra?.usageMetadata?.total_tokens,\n },\n },\n };\n}\n\nexport function convertResponseContentToChatGenerationChunk(\n response: EnhancedGenerateContentResponse,\n extra: {\n usageMetadata?: UsageMetadata | undefined;\n index: number;\n }\n): ChatGenerationChunk | null {\n if (!response.candidates || response.candidates.length === 0) {\n return null;\n }\n const functionCalls = response.functionCalls();\n const [candidate] = response.candidates;\n const { content: candidateContent, ...generationInfo } = candidate;\n let content: MessageContent | undefined;\n // Checks if some parts do not have text. If false, it means that the content is a string.\n if (\n Array.isArray(candidateContent?.parts) &&\n candidateContent.parts.every((p) => \"text\" in p)\n ) {\n content = candidateContent.parts.map((p) => p.text).join(\"\");\n } else if (Array.isArray(candidateContent?.parts)) {\n content = candidateContent.parts.map((p) => {\n if (\"text\" in p) {\n return {\n type: \"text\",\n text: p.text,\n };\n } else if (\"inlineData\" in p) {\n return {\n type: \"inlineData\",\n inlineData: p.inlineData,\n };\n } else if (\"functionCall\" in p) {\n return {\n type: \"functionCall\",\n functionCall: p.functionCall,\n };\n } else if (\"functionResponse\" in p) {\n return {\n type: \"functionResponse\",\n functionResponse: p.functionResponse,\n };\n } else if (\"fileData\" in p) {\n return {\n type: \"fileData\",\n fileData: p.fileData,\n };\n } else if (\"executableCode\" in p) {\n return {\n type: \"executableCode\",\n executableCode: p.executableCode,\n };\n } else if (\"codeExecutionResult\" in p) {\n return {\n type: \"codeExecutionResult\",\n codeExecutionResult: p.codeExecutionResult,\n };\n }\n return p;\n });\n } else {\n // no content returned - likely due to abnormal stop reason, e.g. malformed function call\n content = [];\n }\n\n let text = \"\";\n if (content && typeof content === \"string\") {\n text = content;\n } else if (Array.isArray(content)) {\n const block = content.find((b) => \"text\" in b) as\n | { text: string }\n | undefined;\n text = block?.text ?? \"\";\n }\n\n const toolCallChunks: ToolCallChunk[] = [];\n if (functionCalls) {\n toolCallChunks.push(\n ...functionCalls.map((fc) => ({\n ...fc,\n args: JSON.stringify(fc.args),\n index: extra.index,\n type: \"tool_call_chunk\" as const,\n id: \"id\" in fc && typeof fc.id === \"string\" ? fc.id : uuidv4(),\n }))\n );\n }\n\n return new ChatGenerationChunk({\n text,\n message: new AIMessageChunk({\n content: content || \"\",\n name: !candidateContent ? undefined : candidateContent.role,\n tool_call_chunks: toolCallChunks,\n // Each chunk can have unique \"generationInfo\", and merging strategy is unclear,\n // so leave blank for now.\n additional_kwargs: {},\n response_metadata: {\n model_provider: \"google-genai\",\n },\n usage_metadata: extra.usageMetadata,\n }),\n generationInfo,\n });\n}\n\nexport function convertToGenerativeAITools(\n tools: GoogleGenerativeAIToolType[]\n): GoogleGenerativeAIFunctionDeclarationsTool[] {\n if (\n tools.every(\n (tool) =>\n \"functionDeclarations\" in tool &&\n Array.isArray(tool.functionDeclarations)\n )\n ) {\n return tools as GoogleGenerativeAIFunctionDeclarationsTool[];\n }\n return [\n {\n functionDeclarations: tools.map(\n (tool): GenerativeAIFunctionDeclaration => {\n if (isLangChainTool(tool)) {\n const jsonSchema = schemaToGenerativeAIParameters(tool.schema);\n if (\n jsonSchema.type === \"object\" &&\n \"properties\" in jsonSchema &&\n Object.keys(jsonSchema.properties).length === 0\n ) {\n return {\n name: tool.name,\n description: tool.description,\n };\n }\n return {\n name: tool.name,\n description: tool.description,\n parameters: jsonSchema,\n };\n }\n if (isOpenAITool(tool)) {\n return {\n name: tool.function.name,\n description:\n tool.function.description ?? `A function available to call.`,\n parameters: jsonSchemaToGeminiParameters(\n tool.function.parameters\n ),\n };\n }\n return tool as unknown as GenerativeAIFunctionDeclaration;\n }\n ),\n },\n ];\n}\n"],"mappings":";;;;;;;;;AA6CA,SAAgB,iBAAiBA,SAAsB;CACrD,MAAM,OAAO,QAAQ,UAAU;AAC/B,KAAIC,sCAAY,WAAW,QAAQ,CACjC,QAAO,QAAQ;AAEjB,KAAI,SAAS,OACX,QAAO;AAET,QAAO,QAAQ,QAAQ;AACxB;;;;;;;AAQD,SAAgB,oBACdC,QACiC;AACjC,SAAQ,QAAR;EAKE,KAAK;EACL,KAAK;EACL,KAAK,QACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,QACH,QAAO;EACT,KAAK;EACL,KAAK,WACH,QAAO;EACT,QACE,OAAM,IAAI,MAAM,CAAC,8BAA8B,EAAE,QAAQ;CAC5D;AACF;AAED,SAAS,oBAAoBC,SAAsC;AACjE,KAAI,cAAc,WAAW,UAAU,QACrC,QAAO,EACL,YAAY;EACV,UAAU,QAAQ;EAClB,MAAM,QAAQ;CACf,EACF;AAEH,KAAI,cAAc,WAAW,aAAa,QACxC,QAAO,EACL,UAAU;EACR,UAAU,QAAQ;EAClB,SAAS,QAAQ;CAClB,EACF;AAGH,OAAM,IAAI,MAAM;AACjB;AAED,SAAS,kCACPC,SACAC,kBACoB;AACpB,QAAO,iBACJ,IAAI,CAAC,QAAQ;AACZ,iDAAgB,IAAI,CAClB,QAAO,IAAI,cAAc,CAAE;AAE7B,SAAO,CAAE;CACV,EAAC,CACD,MAAM,CACN,KAAK,CAAC,aAAa;AAClB,SAAO,SAAS,OAAO,QAAQ;CAChC,EAAC,EAAE;AACP;AAED,SAAS,kCAAkCC,mBAA4B;CACrE,MAAMC,gCAKD;EACH,cAAc;EAEd,sBAAsB,OAAO;AAC3B,UAAO,EACL,MAAM,MAAM,KACb;EACF;EAED,uBAAuB,OAAsC;AAC3D,OAAI,CAAC,kBACH,OAAM,IAAI,MAAM;AAElB,OAAI,MAAM,gBAAgB,OAAO;IAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,QAAI,KACF,QAAO,EACL,YAAY;KACV,UAAU,KAAK;KACf,MAAM,KAAK;IACZ,EACF;QAED,QAAO,EACL,UAAU;KACR,UAAU,MAAM,aAAa;KAC7B,SAAS,MAAM;IAChB,EACF;GAEJ;AAED,OAAI,MAAM,gBAAgB,SACxB,QAAO,EACL,YAAY;IACV,UAAU,MAAM,aAAa;IAC7B,MAAM,MAAM;GACb,EACF;AAGH,SAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,MAAM,aAAa;EAChE;EAED,uBAAuB,OAAsC;AAC3D,OAAI,CAAC,kBACH,OAAM,IAAI,MAAM;AAElB,OAAI,MAAM,gBAAgB,OAAO;IAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,QAAI,KACF,QAAO,EACL,YAAY;KACV,UAAU,KAAK;KACf,MAAM,KAAK;IACZ,EACF;QAED,QAAO,EACL,UAAU;KACR,UAAU,MAAM,aAAa;KAC7B,SAAS,MAAM;IAChB,EACF;GAEJ;AAED,OAAI,MAAM,gBAAgB,SACxB,QAAO,EACL,YAAY;IACV,UAAU,MAAM,aAAa;IAC7B,MAAM,MAAM;GACb,EACF;AAGH,SAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,MAAM,aAAa;EAChE;EAED,sBAAsB,OAAiD;AACrE,OAAI,CAAC,kBACH,OAAM,IAAI,MAAM;AAElB,OAAI,MAAM,gBAAgB,OACxB,QAAO,EACL,MAAM,MAAM,KACb;AAEH,OAAI,MAAM,gBAAgB,OAAO;IAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,QAAI,KACF,QAAO,EACL,YAAY;KACV,UAAU,KAAK;KACf,MAAM,KAAK;IACZ,EACF;QAED,QAAO,EACL,UAAU;KACR,UAAU,MAAM,aAAa;KAC7B,SAAS,MAAM;IAChB,EACF;GAEJ;AAED,OAAI,MAAM,gBAAgB,SACxB,QAAO,EACL,YAAY;IACV,UAAU,MAAM,aAAa;IAC7B,MAAM,MAAM;GACb,EACF;AAEH,SAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,MAAM,aAAa;EAChE;CACF;AACD,QAAO;AACR;AAED,SAAS,+BACPJ,SACAG,mBACkB;AAClB,uDAAuB,QAAQ,CAC7B,qEACE,SACA,kCAAkC,kBAAkB,CACrD;AAGH,KAAI,QAAQ,SAAS,OACnB,QAAO,EAAE,MAAM,QAAQ,KAAM;UACpB,QAAQ,SAAS,iBAC1B,QAAO,EAAE,gBAAgB,QAAQ,eAAgB;UACxC,QAAQ,SAAS,sBAC1B,QAAO,EAAE,qBAAqB,QAAQ,oBAAqB;UAClD,QAAQ,SAAS,aAAa;AACvC,MAAI,CAAC,kBACH,OAAM,IAAI,MAAM,CAAC,kCAAkC,CAAC;EAEtD,IAAI;AACJ,MAAI,OAAO,QAAQ,cAAc,UAC/B,SAAS,QAAQ;WAEjB,OAAO,QAAQ,cAAc,YAC7B,SAAS,QAAQ,WAEjB,SAAS,QAAQ,UAAU;MAE3B,OAAM,IAAI,MAAM;EAElB,MAAM,CAAC,IAAI,KAAK,GAAG,OAAO,MAAM,IAAI;AACpC,MAAI,CAAC,GAAG,WAAW,QAAQ,CACzB,OAAM,IAAI,MAAM;EAGlB,MAAM,CAAC,UAAU,SAAS,GAAG,GAAG,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;AAChE,MAAI,aAAa,SACf,OAAM,IAAI,MAAM;AAGlB,SAAO,EACL,YAAY;GACV;GACA;EACD,EACF;CACF,WAAU,QAAQ,SAAS,QAC1B,QAAO,oBAAoB,QAAQ;UAC1B,QAAQ,SAAS,WAC1B,QAAO,EACL,cAAc;EACZ,MAAM,QAAQ;EACd,MAAM,QAAQ;CACf,EACF;UAED,QAAQ,MAAM,SAAS,IAAI,IAE3B,QAAQ,KAAK,MAAM,IAAI,CAAC,WAAW,KACnC,UAAU,WACV,OAAO,QAAQ,SAAS,SAExB,QAAO,EACL,YAAY;EACV,UAAU,QAAQ;EAClB,MAAM,QAAQ;CACf,EACF;UACQ,kBAAkB,QAE3B,QAAO;UAEH,UAAU,QACZ,OAAM,IAAI,MAAM,CAAC,qBAAqB,EAAE,QAAQ,MAAM;KAEtD,OAAM,IAAI,MAAM,CAAC,gBAAgB,EAAE,KAAK,UAAU,QAAQ,EAAE;AAGjE;AAED,SAAgB,6BACdN,SACAM,mBACAD,kBACQ;AACR,kDAAkB,QAAQ,EAAE;EAC1B,MAAM,cACJ,QAAQ,QACR,kCAAkC,SAAS,iBAAiB;AAC9D,MAAI,gBAAgB,OAClB,OAAM,IAAI,MACR,CAAC,oHAAoH,EAAE,QAAQ,GAAG,2FAA2F,CAAC;EAIlO,MAAM,SAAS,MAAM,QAAQ,QAAQ,QAAQ,GACxC,QAAQ,QACN,IAAI,CAAC,MAAM,+BAA+B,GAAG,kBAAkB,CAAC,CAChE,OAAO,CAAC,MAAM,MAAM,OAAU,GACjC,QAAQ;AAEZ,MAAI,QAAQ,WAAW,QACrB,QAAO,CACL,EACE,kBAAkB;GAChB,MAAM;GAGN,UAAU,EAAE,OAAO,EAAE,SAAS,OAAQ,EAAE;EACzC,EACF,CACF;AAGH,SAAO,CACL,EACE,kBAAkB;GAChB,MAAM;GAEN,UAAU,EAAE,OAAQ;EACrB,EACF,CACF;CACF;CAED,IAAIG,gBAAoC,CAAE;CAC1C,MAAMC,eAAuB,CAAE;AAE/B,KAAI,OAAO,QAAQ,YAAY,YAAY,QAAQ,SACjD,aAAa,KAAK,EAAE,MAAM,QAAQ,QAAS,EAAC;AAG9C,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAChC,aAAa,KACX,GAAI,QAAQ,QACT,IAAI,CAAC,MAAM,+BAA+B,GAAG,kBAAkB,CAAC,CAChE,OAAO,CAAC,MAAM,MAAM,OAAU,CAClC;AAGH,gDAAgB,QAAQ,IAAI,QAAQ,YAAY,QAC9C,gBAAgB,QAAQ,WAAW,IAAI,CAAC,OAAO;AAC7C,SAAO,EACL,cAAc;GACZ,MAAM,GAAG;GACT,MAAM,GAAG;EACV,EACF;CACF,EAAC;AAGJ,QAAO,CAAC,GAAG,cAAc,GAAG,aAAc;AAC3C;AAED,SAAgB,6BACdC,UACAJ,mBACAK,qCAA8C,OAC9C;AACA,QAAO,SAAS,OAId,CAAC,KAAK,SAAS,UAAU;AACvB,MAAI,8CAAe,QAAQ,CACzB,OAAM,IAAI,MAAM;EAElB,MAAM,SAAS,iBAAiB,QAAQ;AACxC,MAAI,WAAW,YAAY,UAAU,EACnC,OAAM,IAAI,MAAM;EAElB,MAAM,OAAO,oBAAoB,OAAO;EAExC,MAAM,cAAc,IAAI,QAAQ,IAAI,QAAQ;AAC5C,MACE,CAAC,IAAI,4BACL,eACA,YAAY,SAAS,KAErB,OAAM,IAAI,MACR;EAIJ,MAAM,QAAQ,6BACZ,SACA,mBACA,SAAS,MAAM,GAAG,MAAM,CACzB;AAED,MAAI,IAAI,0BAA0B;GAChC,MAAMC,gBAAc,IAAI,QAAQ,IAAI,QAAQ,SAAS;AACrD,OAAI,CAACA,cACH,OAAM,IAAI,MACR;GAGJA,cAAY,MAAM,KAAK,GAAG,MAAM;AAEhC,UAAO;IACL,0BAA0B;IAC1B,SAAS,IAAI;GACd;EACF;EACD,IAAI,aAAa;AACjB,MACE,eAAe,cACd,eAAe,YAAY,CAAC,oCAG7B,aAAa;EAEf,MAAMC,UAAmB;GACvB,MAAM;GACN;EACD;AACD,SAAO;GACL,0BACE,WAAW,YAAY,CAAC;GAC1B,SAAS,CAAC,GAAG,IAAI,SAAS,OAAQ;EACnC;CACF,GACD;EAAE,SAAS,CAAE;EAAE,0BAA0B;CAAO,EACjD,CAAC;AACH;AAED,SAAgB,qCACdC,UACAC,OAGY;AAEZ,KACE,CAAC,SAAS,cACV,SAAS,WAAW,WAAW,KAC/B,CAAC,SAAS,WAAW,GAErB,QAAO;EACL,aAAa,CAAE;EACf,WAAW,EACT,SAAS,SAAS,eACnB;CACF;CAGH,MAAM,gBAAgB,SAAS,eAAe;CAC9C,MAAM,CAAC,UAAU,GAAG,SAAS;CAC7B,MAAM,EAAE,SAAS,iBAAkB,GAAG,gBAAgB,GAAG;CACzD,IAAIC;AAEJ,KACE,MAAM,QAAQ,kBAAkB,MAAM,IACtC,iBAAiB,MAAM,WAAW,KAClC,iBAAiB,MAAM,GAAG,MAE1B,UAAU,iBAAiB,MAAM,GAAG;UAEpC,MAAM,QAAQ,kBAAkB,MAAM,IACtC,iBAAiB,MAAM,SAAS,GAEhC,UAAU,iBAAiB,MAAM,IAAI,CAAC,MAAM;AAC1C,MAAI,UAAU,EACZ,QAAO;GACL,MAAM;GACN,MAAM,EAAE;EACT;WACQ,gBAAgB,EACzB,QAAO;GACL,MAAM;GACN,YAAY,EAAE;EACf;WACQ,kBAAkB,EAC3B,QAAO;GACL,MAAM;GACN,cAAc,EAAE;EACjB;WACQ,sBAAsB,EAC/B,QAAO;GACL,MAAM;GACN,kBAAkB,EAAE;EACrB;WACQ,cAAc,EACvB,QAAO;GACL,MAAM;GACN,UAAU,EAAE;EACb;WACQ,oBAAoB,EAC7B,QAAO;GACL,MAAM;GACN,gBAAgB,EAAE;EACnB;WACQ,yBAAyB,EAClC,QAAO;GACL,MAAM;GACN,qBAAqB,EAAE;EACxB;AAEH,SAAO;CACR,EAAC;MAGF,UAAU,CAAE;CAGd,IAAI,OAAO;AACX,KAAI,OAAO,YAAY,UACrB,OAAO;UACE,MAAM,QAAQ,QAAQ,IAAI,QAAQ,SAAS,GAAG;EACvD,MAAM,QAAQ,QAAQ,KAAK,CAAC,MAAM,UAAU,EAAE;EAG9C,OAAO,OAAO,QAAQ;CACvB;CAED,MAAMC,aAA6B;EACjC;EACA,SAAS,IAAIC,oCAAU;GACrB,SAAS,WAAW;GACpB,YAAY,eAAe,IAAI,CAAC,OAAO;AACrC,WAAO;KACL,GAAG;KACH,MAAM;KACN,IAAI,QAAQ,MAAM,OAAO,GAAG,OAAO,WAAW,GAAG,mBAAa;IAC/D;GACF,EAAC;GACF,mBAAmB,EACjB,GAAG,eACJ;GACD,gBAAgB,OAAO;EACxB;EACD;CACD;AAED,QAAO;EACL,aAAa,CAAC,UAAW;EACzB,WAAW,EACT,YAAY;GACV,cAAc,OAAO,eAAe;GACpC,kBAAkB,OAAO,eAAe;GACxC,aAAa,OAAO,eAAe;EACpC,EACF;CACF;AACF;AAED,SAAgB,4CACdJ,UACAK,OAI4B;AAC5B,KAAI,CAAC,SAAS,cAAc,SAAS,WAAW,WAAW,EACzD,QAAO;CAET,MAAM,gBAAgB,SAAS,eAAe;CAC9C,MAAM,CAAC,UAAU,GAAG,SAAS;CAC7B,MAAM,EAAE,SAAS,iBAAkB,GAAG,gBAAgB,GAAG;CACzD,IAAIH;AAEJ,KACE,MAAM,QAAQ,kBAAkB,MAAM,IACtC,iBAAiB,MAAM,MAAM,CAAC,MAAM,UAAU,EAAE,EAEhD,UAAU,iBAAiB,MAAM,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,GAAG;UACnD,MAAM,QAAQ,kBAAkB,MAAM,EAC/C,UAAU,iBAAiB,MAAM,IAAI,CAAC,MAAM;AAC1C,MAAI,UAAU,EACZ,QAAO;GACL,MAAM;GACN,MAAM,EAAE;EACT;WACQ,gBAAgB,EACzB,QAAO;GACL,MAAM;GACN,YAAY,EAAE;EACf;WACQ,kBAAkB,EAC3B,QAAO;GACL,MAAM;GACN,cAAc,EAAE;EACjB;WACQ,sBAAsB,EAC/B,QAAO;GACL,MAAM;GACN,kBAAkB,EAAE;EACrB;WACQ,cAAc,EACvB,QAAO;GACL,MAAM;GACN,UAAU,EAAE;EACb;WACQ,oBAAoB,EAC7B,QAAO;GACL,MAAM;GACN,gBAAgB,EAAE;EACnB;WACQ,yBAAyB,EAClC,QAAO;GACL,MAAM;GACN,qBAAqB,EAAE;EACxB;AAEH,SAAO;CACR,EAAC;MAGF,UAAU,CAAE;CAGd,IAAI,OAAO;AACX,KAAI,WAAW,OAAO,YAAY,UAChC,OAAO;UACE,MAAM,QAAQ,QAAQ,EAAE;EACjC,MAAM,QAAQ,QAAQ,KAAK,CAAC,MAAM,UAAU,EAAE;EAG9C,OAAO,OAAO,QAAQ;CACvB;CAED,MAAMI,iBAAkC,CAAE;AAC1C,KAAI,eACF,eAAe,KACb,GAAG,cAAc,IAAI,CAAC,QAAQ;EAC5B,GAAG;EACH,MAAM,KAAK,UAAU,GAAG,KAAK;EAC7B,OAAO,MAAM;EACb,MAAM;EACN,IAAI,QAAQ,MAAM,OAAO,GAAG,OAAO,WAAW,GAAG,mBAAa;CAC/D,GAAE,CACJ;AAGH,QAAO,IAAIC,6CAAoB;EAC7B;EACA,SAAS,IAAIC,yCAAe;GAC1B,SAAS,WAAW;GACpB,MAAM,CAAC,mBAAmB,SAAY,iBAAiB;GACvD,kBAAkB;GAGlB,mBAAmB,CAAE;GACrB,mBAAmB,EACjB,gBAAgB,eACjB;GACD,gBAAgB,MAAM;EACvB;EACD;CACD;AACF;AAED,SAAgB,2BACdC,OAC8C;AAC9C,KACE,MAAM,MACJ,CAAC,SACC,0BAA0B,QAC1B,MAAM,QAAQ,KAAK,qBAAqB,CAC3C,CAED,QAAO;AAET,QAAO,CACL,EACE,sBAAsB,MAAM,IAC1B,CAAC,SAA0C;AACzC,mEAAoB,KAAK,EAAE;GACzB,MAAM,aAAaC,+DAA+B,KAAK,OAAO;AAC9D,OACE,WAAW,SAAS,YACpB,gBAAgB,cAChB,OAAO,KAAK,WAAW,WAAW,CAAC,WAAW,EAE9C,QAAO;IACL,MAAM,KAAK;IACX,aAAa,KAAK;GACnB;AAEH,UAAO;IACL,MAAM,KAAK;IACX,aAAa,KAAK;IAClB,YAAY;GACb;EACF;AACD,8DAAiB,KAAK,CACpB,QAAO;GACL,MAAM,KAAK,SAAS;GACpB,aACE,KAAK,SAAS,eAAe,CAAC,6BAA6B,CAAC;GAC9D,YAAYC,6DACV,KAAK,SAAS,WACf;EACF;AAEH,SAAO;CACR,EACF,CACF,CACF;AACF"}
|
|
1
|
+
{"version":3,"file":"common.cjs","names":["fn: () => string","message: BaseMessage","ChatMessage","author: string","content: MessageContentComplex","message: ToolMessage | ToolMessageChunk","previousMessages: BaseMessage[]","isMultimodalModel: boolean","standardContentBlockConverter: StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n }>","model?: string","functionCalls: FunctionCallPart[]","messageParts: Part[]","messages: BaseMessage[]","convertSystemMessageToHumanContent: boolean","model: string","prevContent","content: Content","response: EnhancedGenerateContentResponse","extra?: {\n usageMetadata: UsageMetadata | undefined;\n }","content: MessageContent | undefined","generation: ChatGeneration","AIMessage","extra: {\n usageMetadata?: UsageMetadata | undefined;\n index: number;\n }","toolCallChunks: ToolCallChunk[]","ChatGenerationChunk","AIMessageChunk","tools: GoogleGenerativeAIToolType[]","schemaToGenerativeAIParameters","jsonSchemaToGeminiParameters"],"sources":["../../src/utils/common.ts"],"sourcesContent":["import {\n EnhancedGenerateContentResponse,\n Content,\n Part,\n type FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool,\n type FunctionDeclaration as GenerativeAIFunctionDeclaration,\n POSSIBLE_ROLES,\n FunctionCallPart,\n TextPart,\n FileDataPart,\n InlineDataPart,\n} from \"@google/generative-ai\";\nimport {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n ChatMessage,\n ToolMessage,\n ToolMessageChunk,\n MessageContent,\n MessageContentComplex,\n UsageMetadata,\n isAIMessage,\n isBaseMessage,\n isToolMessage,\n StandardContentBlockConverter,\n parseBase64DataUrl,\n convertToProviderContentBlock,\n isDataContentBlock,\n} from \"@langchain/core/messages\";\nimport {\n ChatGeneration,\n ChatGenerationChunk,\n ChatResult,\n} from \"@langchain/core/outputs\";\nimport { isLangChainTool } from \"@langchain/core/utils/function_calling\";\nimport { isOpenAITool } from \"@langchain/core/language_models/base\";\nimport { ToolCallChunk } from \"@langchain/core/messages/tool\";\nimport { v4 as uuidv4 } from \"uuid\";\nimport {\n jsonSchemaToGeminiParameters,\n schemaToGenerativeAIParameters,\n} from \"./zod_to_genai_parameters.js\";\nimport { GoogleGenerativeAIToolType } from \"../types.js\";\n\nexport const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY =\n \"__gemini_function_call_thought_signatures__\";\nconst DUMMY_SIGNATURE =\n \"ErYCCrMCAdHtim9kOoOkrPiCNVsmlpMIKd7ZMxgiFbVQOkgp7nlLcDMzVsZwIzvuT7nQROivoXA72ccC2lSDvR0Gh7dkWaGuj7ctv6t7ZceHnecx0QYa+ix8tYpRfjhyWozQ49lWiws6+YGjCt10KRTyWsZ2h6O7iHTYJwKIRwGUHRKy/qK/6kFxJm5ML00gLq4D8s5Z6DBpp2ZlR+uF4G8jJgeWQgyHWVdx2wGYElaceVAc66tZdPQRdOHpWtgYSI1YdaXgVI8KHY3/EfNc2YqqMIulvkDBAnuMhkAjV9xmBa54Tq+ih3Im4+r3DzqhGqYdsSkhS0kZMwte4Hjs65dZzCw9lANxIqYi1DJ639WNPYihp/DCJCos7o+/EeSPJaio5sgWDyUnMGkY1atsJZ+m7pj7DD5tvQ==\";\n\nconst iife = (fn: () => string) => fn();\n\nexport function getMessageAuthor(message: BaseMessage) {\n const type = message._getType();\n if (ChatMessage.isInstance(message)) {\n return message.role;\n }\n if (type === \"tool\") {\n return type;\n }\n return message.name ?? type;\n}\n\n/**\n * Maps a message type to a Google Generative AI chat author.\n * @param message The message to map.\n * @param model The model to use for mapping.\n * @returns The message type mapped to a Google Generative AI chat author.\n */\nexport function convertAuthorToRole(\n author: string\n): (typeof POSSIBLE_ROLES)[number] {\n switch (author) {\n /**\n * Note: Gemini currently is not supporting system messages\n * we will convert them to human messages and merge with following\n * */\n case \"supervisor\":\n case \"ai\":\n case \"model\": // getMessageAuthor returns message.name. code ex.: return message.name ?? type;\n return \"model\";\n case \"system\":\n return \"system\";\n case \"human\":\n return \"user\";\n case \"tool\":\n case \"function\":\n return \"function\";\n default:\n throw new Error(`Unknown / unsupported author: ${author}`);\n }\n}\n\nfunction messageContentMedia(content: MessageContentComplex): Part {\n if (\"mimeType\" in content && \"data\" in content) {\n return {\n inlineData: {\n mimeType: content.mimeType,\n data: content.data,\n },\n };\n }\n if (\"mimeType\" in content && \"fileUri\" in content) {\n return {\n fileData: {\n mimeType: content.mimeType,\n fileUri: content.fileUri,\n },\n };\n }\n\n throw new Error(\"Invalid media content\");\n}\n\nfunction inferToolNameFromPreviousMessages(\n message: ToolMessage | ToolMessageChunk,\n previousMessages: BaseMessage[]\n): string | undefined {\n return previousMessages\n .map((msg) => {\n if (isAIMessage(msg)) {\n return msg.tool_calls ?? [];\n }\n return [];\n })\n .flat()\n .find((toolCall) => {\n return toolCall.id === message.tool_call_id;\n })?.name;\n}\n\nfunction _getStandardContentBlockConverter(isMultimodalModel: boolean) {\n const standardContentBlockConverter: StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n }> = {\n providerName: \"Google Gemini\",\n\n fromStandardTextBlock(block) {\n return {\n text: block.text,\n };\n },\n\n fromStandardImageBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error(\"This model does not support images\");\n }\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? \"\",\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === \"base64\") {\n return {\n inlineData: {\n mimeType: block.mime_type ?? \"\",\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardAudioBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error(\"This model does not support audio\");\n }\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? \"\",\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === \"base64\") {\n return {\n inlineData: {\n mimeType: block.mime_type ?? \"\",\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardFileBlock(block): FileDataPart | InlineDataPart | TextPart {\n if (!isMultimodalModel) {\n throw new Error(\"This model does not support files\");\n }\n if (block.source_type === \"text\") {\n return {\n text: block.text,\n };\n }\n if (block.source_type === \"url\") {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? \"\",\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === \"base64\") {\n return {\n inlineData: {\n mimeType: block.mime_type ?? \"\",\n data: block.data,\n },\n };\n }\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n };\n return standardContentBlockConverter;\n}\n\nfunction _convertLangChainContentToPart(\n content: MessageContentComplex,\n isMultimodalModel: boolean\n): Part | undefined {\n if (isDataContentBlock(content)) {\n return convertToProviderContentBlock(\n content,\n _getStandardContentBlockConverter(isMultimodalModel)\n );\n }\n\n if (content.type === \"text\") {\n return { text: content.text };\n } else if (content.type === \"executableCode\") {\n return { executableCode: content.executableCode };\n } else if (content.type === \"codeExecutionResult\") {\n return { codeExecutionResult: content.codeExecutionResult };\n } else if (content.type === \"image_url\") {\n if (!isMultimodalModel) {\n throw new Error(`This model does not support images`);\n }\n let source;\n if (typeof content.image_url === \"string\") {\n source = content.image_url;\n } else if (\n typeof content.image_url === \"object\" &&\n \"url\" in content.image_url\n ) {\n source = content.image_url.url;\n } else {\n throw new Error(\"Please provide image as base64 encoded data URL\");\n }\n const [dm, data] = source.split(\",\");\n if (!dm.startsWith(\"data:\")) {\n throw new Error(\"Please provide image as base64 encoded data URL\");\n }\n\n const [mimeType, encoding] = dm.replace(/^data:/, \"\").split(\";\");\n if (encoding !== \"base64\") {\n throw new Error(\"Please provide image as base64 encoded data URL\");\n }\n\n return {\n inlineData: {\n data,\n mimeType,\n },\n };\n } else if (content.type === \"media\") {\n return messageContentMedia(content);\n } else if (content.type === \"tool_use\") {\n return {\n functionCall: {\n name: content.name,\n args: content.input,\n },\n };\n } else if (\n content.type?.includes(\"/\") &&\n // Ensure it's a single slash.\n content.type.split(\"/\").length === 2 &&\n \"data\" in content &&\n typeof content.data === \"string\"\n ) {\n return {\n inlineData: {\n mimeType: content.type,\n data: content.data,\n },\n };\n } else if (\"functionCall\" in content) {\n // No action needed here — function calls will be added later from message.tool_calls\n return undefined;\n } else {\n if (\"type\" in content) {\n throw new Error(`Unknown content type ${content.type}`);\n } else {\n throw new Error(`Unknown content ${JSON.stringify(content)}`);\n }\n }\n}\n\nexport function convertMessageContentToParts(\n message: BaseMessage,\n isMultimodalModel: boolean,\n previousMessages: BaseMessage[],\n model?: string\n): Part[] {\n if (isToolMessage(message)) {\n const messageName =\n message.name ??\n inferToolNameFromPreviousMessages(message, previousMessages);\n if (messageName === undefined) {\n throw new Error(\n `Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage \"${message.id}\" from your passed messages. Please populate a \"name\" field on that ToolMessage explicitly.`\n );\n }\n\n const result = Array.isArray(message.content)\n ? (message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n : message.content;\n\n if (message.status === \"error\") {\n return [\n {\n functionResponse: {\n name: messageName,\n // The API expects an object with an `error` field if the function call fails.\n // `error` must be a valid object (not a string or array), so we wrap `message.content` here\n response: { error: { details: result } },\n },\n },\n ];\n }\n\n return [\n {\n functionResponse: {\n name: messageName,\n // again, can't have a string or array value for `response`, so we wrap it as an object here\n response: { result },\n },\n },\n ];\n }\n\n let functionCalls: FunctionCallPart[] = [];\n const messageParts: Part[] = [];\n\n if (typeof message.content === \"string\" && message.content) {\n messageParts.push({ text: message.content });\n }\n\n if (Array.isArray(message.content)) {\n messageParts.push(\n ...(message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n );\n }\n\n const functionThoughtSignatures = message.additional_kwargs?.[\n _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY\n ] as Record<string, string>;\n\n if (isAIMessage(message) && message.tool_calls?.length) {\n functionCalls = message.tool_calls.map((tc) => {\n const thoughtSignature = iife(() => {\n if (tc.id) {\n const signature = functionThoughtSignatures?.[tc.id];\n if (signature) {\n return signature;\n }\n }\n if (model?.includes(\"gemini-3\")) {\n return DUMMY_SIGNATURE;\n }\n return \"\";\n });\n return {\n functionCall: {\n name: tc.name,\n args: tc.args,\n },\n ...(thoughtSignature ? { thoughtSignature } : {}),\n };\n });\n }\n\n return [...messageParts, ...functionCalls];\n}\n\nexport function convertBaseMessagesToContent(\n messages: BaseMessage[],\n isMultimodalModel: boolean,\n convertSystemMessageToHumanContent: boolean = false,\n model: string\n) {\n return messages.reduce<{\n content: Content[];\n mergeWithPreviousContent: boolean;\n }>(\n (acc, message, index) => {\n if (!isBaseMessage(message)) {\n throw new Error(\"Unsupported message input\");\n }\n const author = getMessageAuthor(message);\n if (author === \"system\" && index !== 0) {\n throw new Error(\"System message should be the first one\");\n }\n const role = convertAuthorToRole(author);\n\n const prevContent = acc.content[acc.content.length];\n if (\n !acc.mergeWithPreviousContent &&\n prevContent &&\n prevContent.role === role\n ) {\n throw new Error(\n \"Google Generative AI requires alternate messages between authors\"\n );\n }\n\n const parts = convertMessageContentToParts(\n message,\n isMultimodalModel,\n messages.slice(0, index),\n model\n );\n\n if (acc.mergeWithPreviousContent) {\n const prevContent = acc.content[acc.content.length - 1];\n if (!prevContent) {\n throw new Error(\n \"There was a problem parsing your system message. Please try a prompt without one.\"\n );\n }\n prevContent.parts.push(...parts);\n\n return {\n mergeWithPreviousContent: false,\n content: acc.content,\n };\n }\n let actualRole = role;\n if (\n actualRole === \"function\" ||\n (actualRole === \"system\" && !convertSystemMessageToHumanContent)\n ) {\n // GenerativeAI API will throw an error if the role is not \"user\" or \"model.\"\n actualRole = \"user\";\n }\n const content: Content = {\n role: actualRole,\n parts,\n };\n return {\n mergeWithPreviousContent:\n author === \"system\" && !convertSystemMessageToHumanContent,\n content: [...acc.content, content],\n };\n },\n { content: [], mergeWithPreviousContent: false }\n ).content;\n}\n\nexport function mapGenerateContentResultToChatResult(\n response: EnhancedGenerateContentResponse,\n extra?: {\n usageMetadata: UsageMetadata | undefined;\n }\n): ChatResult {\n // if rejected or error, return empty generations with reason in filters\n if (\n !response.candidates ||\n response.candidates.length === 0 ||\n !response.candidates[0]\n ) {\n return {\n generations: [],\n llmOutput: {\n filters: response.promptFeedback,\n },\n };\n }\n const [candidate] = response.candidates;\n const { content: candidateContent, ...generationInfo } = candidate;\n const functionCalls = candidateContent.parts.reduce((acc, p) => {\n if (\"functionCall\" in p && p.functionCall) {\n acc.push({\n ...p,\n id:\n \"id\" in p.functionCall && typeof p.functionCall.id === \"string\"\n ? p.functionCall.id\n : uuidv4(),\n });\n }\n return acc;\n }, [] as (FunctionCallPart & { id: string })[]);\n let content: MessageContent | undefined;\n\n if (\n Array.isArray(candidateContent?.parts) &&\n candidateContent.parts.length === 1 &&\n candidateContent.parts[0].text\n ) {\n content = candidateContent.parts[0].text;\n } else if (\n Array.isArray(candidateContent?.parts) &&\n candidateContent.parts.length > 0\n ) {\n content = candidateContent.parts.map((p) => {\n if (\"text\" in p) {\n return {\n type: \"text\",\n text: p.text,\n };\n } else if (\"inlineData\" in p) {\n return {\n type: \"inlineData\",\n inlineData: p.inlineData,\n };\n } else if (\"functionCall\" in p) {\n return {\n type: \"functionCall\",\n functionCall: p.functionCall,\n };\n } else if (\"functionResponse\" in p) {\n return {\n type: \"functionResponse\",\n functionResponse: p.functionResponse,\n };\n } else if (\"fileData\" in p) {\n return {\n type: \"fileData\",\n fileData: p.fileData,\n };\n } else if (\"executableCode\" in p) {\n return {\n type: \"executableCode\",\n executableCode: p.executableCode,\n };\n } else if (\"codeExecutionResult\" in p) {\n return {\n type: \"codeExecutionResult\",\n codeExecutionResult: p.codeExecutionResult,\n };\n }\n return p;\n });\n } else {\n // no content returned - likely due to abnormal stop reason, e.g. malformed function call\n content = [];\n }\n\n const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {\n if (\"thoughtSignature\" in fc && typeof fc.thoughtSignature === \"string\") {\n acc[fc.id] = fc.thoughtSignature;\n }\n return acc;\n }, {} as Record<string, string>);\n\n let text = \"\";\n if (typeof content === \"string\") {\n text = content;\n } else if (Array.isArray(content) && content.length > 0) {\n const block = content.find((b) => \"text\" in b) as\n | { text: string }\n | undefined;\n text = block?.text ?? text;\n }\n\n const generation: ChatGeneration = {\n text,\n message: new AIMessage({\n content: content ?? \"\",\n tool_calls: functionCalls?.map((fc) => ({\n type: \"tool_call\",\n id: fc.id,\n name: fc.functionCall.name,\n args: fc.functionCall.args,\n })),\n additional_kwargs: {\n ...generationInfo,\n [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures,\n },\n usage_metadata: extra?.usageMetadata,\n }),\n generationInfo,\n };\n\n return {\n generations: [generation],\n llmOutput: {\n tokenUsage: {\n promptTokens: extra?.usageMetadata?.input_tokens,\n completionTokens: extra?.usageMetadata?.output_tokens,\n totalTokens: extra?.usageMetadata?.total_tokens,\n },\n },\n };\n}\n\nexport function convertResponseContentToChatGenerationChunk(\n response: EnhancedGenerateContentResponse,\n extra: {\n usageMetadata?: UsageMetadata | undefined;\n index: number;\n }\n): ChatGenerationChunk | null {\n if (!response.candidates || response.candidates.length === 0) {\n return null;\n }\n const [candidate] = response.candidates;\n const { content: candidateContent, ...generationInfo } = candidate;\n const functionCalls = candidateContent.parts.reduce((acc, p) => {\n if (\"functionCall\" in p && p.functionCall) {\n acc.push({\n ...p,\n id:\n \"id\" in p.functionCall && typeof p.functionCall.id === \"string\"\n ? p.functionCall.id\n : uuidv4(),\n });\n }\n return acc;\n }, [] as (FunctionCallPart & { id: string })[]);\n let content: MessageContent | undefined;\n // Checks if some parts do not have text. If false, it means that the content is a string.\n if (\n Array.isArray(candidateContent?.parts) &&\n candidateContent.parts.every((p) => \"text\" in p)\n ) {\n content = candidateContent.parts.map((p) => p.text).join(\"\");\n } else if (Array.isArray(candidateContent?.parts)) {\n content = candidateContent.parts.map((p) => {\n if (\"text\" in p) {\n return {\n type: \"text\",\n text: p.text,\n };\n } else if (\"inlineData\" in p) {\n return {\n type: \"inlineData\",\n inlineData: p.inlineData,\n };\n } else if (\"functionCall\" in p) {\n return {\n type: \"functionCall\",\n functionCall: p.functionCall,\n };\n } else if (\"functionResponse\" in p) {\n return {\n type: \"functionResponse\",\n functionResponse: p.functionResponse,\n };\n } else if (\"fileData\" in p) {\n return {\n type: \"fileData\",\n fileData: p.fileData,\n };\n } else if (\"executableCode\" in p) {\n return {\n type: \"executableCode\",\n executableCode: p.executableCode,\n };\n } else if (\"codeExecutionResult\" in p) {\n return {\n type: \"codeExecutionResult\",\n codeExecutionResult: p.codeExecutionResult,\n };\n }\n return p;\n });\n } else {\n // no content returned - likely due to abnormal stop reason, e.g. malformed function call\n content = [];\n }\n\n let text = \"\";\n if (content && typeof content === \"string\") {\n text = content;\n } else if (Array.isArray(content)) {\n const block = content.find((b) => \"text\" in b) as\n | { text: string }\n | undefined;\n text = block?.text ?? \"\";\n }\n\n const toolCallChunks: ToolCallChunk[] = [];\n if (functionCalls) {\n toolCallChunks.push(\n ...functionCalls.map((fc) => ({\n ...fc,\n args: JSON.stringify(fc.functionCall.args),\n index: extra.index,\n type: \"tool_call_chunk\" as const,\n }))\n );\n }\n\n const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {\n if (\"thoughtSignature\" in fc && typeof fc.thoughtSignature === \"string\") {\n acc[fc.id] = fc.thoughtSignature;\n }\n return acc;\n }, {} as Record<string, string>);\n\n return new ChatGenerationChunk({\n text,\n message: new AIMessageChunk({\n content: content || \"\",\n name: !candidateContent ? undefined : candidateContent.role,\n tool_call_chunks: toolCallChunks,\n // Each chunk can have unique \"generationInfo\", and merging strategy is unclear,\n // so leave blank for now.\n additional_kwargs: {\n [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures,\n },\n response_metadata: {\n model_provider: \"google-genai\",\n },\n usage_metadata: extra.usageMetadata,\n }),\n generationInfo,\n });\n}\n\nexport function convertToGenerativeAITools(\n tools: GoogleGenerativeAIToolType[]\n): GoogleGenerativeAIFunctionDeclarationsTool[] {\n if (\n tools.every(\n (tool) =>\n \"functionDeclarations\" in tool &&\n Array.isArray(tool.functionDeclarations)\n )\n ) {\n return tools as GoogleGenerativeAIFunctionDeclarationsTool[];\n }\n return [\n {\n functionDeclarations: tools.map(\n (tool): GenerativeAIFunctionDeclaration => {\n if (isLangChainTool(tool)) {\n const jsonSchema = schemaToGenerativeAIParameters(tool.schema);\n if (\n jsonSchema.type === \"object\" &&\n \"properties\" in jsonSchema &&\n Object.keys(jsonSchema.properties).length === 0\n ) {\n return {\n name: tool.name,\n description: tool.description,\n };\n }\n return {\n name: tool.name,\n description: tool.description,\n parameters: jsonSchema,\n };\n }\n if (isOpenAITool(tool)) {\n return {\n name: tool.function.name,\n description:\n tool.function.description ?? `A function available to call.`,\n parameters: jsonSchemaToGeminiParameters(\n tool.function.parameters\n ),\n };\n }\n return tool as unknown as GenerativeAIFunctionDeclaration;\n }\n ),\n },\n ];\n}\n"],"mappings":";;;;;;;;;AA6CA,MAAa,4CACX;AACF,MAAM,kBACJ;AAEF,MAAM,OAAO,CAACA,OAAqB,IAAI;AAEvC,SAAgB,iBAAiBC,SAAsB;CACrD,MAAM,OAAO,QAAQ,UAAU;AAC/B,KAAIC,sCAAY,WAAW,QAAQ,CACjC,QAAO,QAAQ;AAEjB,KAAI,SAAS,OACX,QAAO;AAET,QAAO,QAAQ,QAAQ;AACxB;;;;;;;AAQD,SAAgB,oBACdC,QACiC;AACjC,SAAQ,QAAR;EAKE,KAAK;EACL,KAAK;EACL,KAAK,QACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,QACH,QAAO;EACT,KAAK;EACL,KAAK,WACH,QAAO;EACT,QACE,OAAM,IAAI,MAAM,CAAC,8BAA8B,EAAE,QAAQ;CAC5D;AACF;AAED,SAAS,oBAAoBC,SAAsC;AACjE,KAAI,cAAc,WAAW,UAAU,QACrC,QAAO,EACL,YAAY;EACV,UAAU,QAAQ;EAClB,MAAM,QAAQ;CACf,EACF;AAEH,KAAI,cAAc,WAAW,aAAa,QACxC,QAAO,EACL,UAAU;EACR,UAAU,QAAQ;EAClB,SAAS,QAAQ;CAClB,EACF;AAGH,OAAM,IAAI,MAAM;AACjB;AAED,SAAS,kCACPC,SACAC,kBACoB;AACpB,QAAO,iBACJ,IAAI,CAAC,QAAQ;AACZ,iDAAgB,IAAI,CAClB,QAAO,IAAI,cAAc,CAAE;AAE7B,SAAO,CAAE;CACV,EAAC,CACD,MAAM,CACN,KAAK,CAAC,aAAa;AAClB,SAAO,SAAS,OAAO,QAAQ;CAChC,EAAC,EAAE;AACP;AAED,SAAS,kCAAkCC,mBAA4B;CACrE,MAAMC,gCAKD;EACH,cAAc;EAEd,sBAAsB,OAAO;AAC3B,UAAO,EACL,MAAM,MAAM,KACb;EACF;EAED,uBAAuB,OAAsC;AAC3D,OAAI,CAAC,kBACH,OAAM,IAAI,MAAM;AAElB,OAAI,MAAM,gBAAgB,OAAO;IAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,QAAI,KACF,QAAO,EACL,YAAY;KACV,UAAU,KAAK;KACf,MAAM,KAAK;IACZ,EACF;QAED,QAAO,EACL,UAAU;KACR,UAAU,MAAM,aAAa;KAC7B,SAAS,MAAM;IAChB,EACF;GAEJ;AAED,OAAI,MAAM,gBAAgB,SACxB,QAAO,EACL,YAAY;IACV,UAAU,MAAM,aAAa;IAC7B,MAAM,MAAM;GACb,EACF;AAGH,SAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,MAAM,aAAa;EAChE;EAED,uBAAuB,OAAsC;AAC3D,OAAI,CAAC,kBACH,OAAM,IAAI,MAAM;AAElB,OAAI,MAAM,gBAAgB,OAAO;IAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,QAAI,KACF,QAAO,EACL,YAAY;KACV,UAAU,KAAK;KACf,MAAM,KAAK;IACZ,EACF;QAED,QAAO,EACL,UAAU;KACR,UAAU,MAAM,aAAa;KAC7B,SAAS,MAAM;IAChB,EACF;GAEJ;AAED,OAAI,MAAM,gBAAgB,SACxB,QAAO,EACL,YAAY;IACV,UAAU,MAAM,aAAa;IAC7B,MAAM,MAAM;GACb,EACF;AAGH,SAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,MAAM,aAAa;EAChE;EAED,sBAAsB,OAAiD;AACrE,OAAI,CAAC,kBACH,OAAM,IAAI,MAAM;AAElB,OAAI,MAAM,gBAAgB,OACxB,QAAO,EACL,MAAM,MAAM,KACb;AAEH,OAAI,MAAM,gBAAgB,OAAO;IAC/B,MAAM,yDAA0B,EAAE,SAAS,MAAM,IAAK,EAAC;AACvD,QAAI,KACF,QAAO,EACL,YAAY;KACV,UAAU,KAAK;KACf,MAAM,KAAK;IACZ,EACF;QAED,QAAO,EACL,UAAU;KACR,UAAU,MAAM,aAAa;KAC7B,SAAS,MAAM;IAChB,EACF;GAEJ;AAED,OAAI,MAAM,gBAAgB,SACxB,QAAO,EACL,YAAY;IACV,UAAU,MAAM,aAAa;IAC7B,MAAM,MAAM;GACb,EACF;AAEH,SAAM,IAAI,MAAM,CAAC,yBAAyB,EAAE,MAAM,aAAa;EAChE;CACF;AACD,QAAO;AACR;AAED,SAAS,+BACPJ,SACAG,mBACkB;AAClB,uDAAuB,QAAQ,CAC7B,qEACE,SACA,kCAAkC,kBAAkB,CACrD;AAGH,KAAI,QAAQ,SAAS,OACnB,QAAO,EAAE,MAAM,QAAQ,KAAM;UACpB,QAAQ,SAAS,iBAC1B,QAAO,EAAE,gBAAgB,QAAQ,eAAgB;UACxC,QAAQ,SAAS,sBAC1B,QAAO,EAAE,qBAAqB,QAAQ,oBAAqB;UAClD,QAAQ,SAAS,aAAa;AACvC,MAAI,CAAC,kBACH,OAAM,IAAI,MAAM,CAAC,kCAAkC,CAAC;EAEtD,IAAI;AACJ,MAAI,OAAO,QAAQ,cAAc,UAC/B,SAAS,QAAQ;WAEjB,OAAO,QAAQ,cAAc,YAC7B,SAAS,QAAQ,WAEjB,SAAS,QAAQ,UAAU;MAE3B,OAAM,IAAI,MAAM;EAElB,MAAM,CAAC,IAAI,KAAK,GAAG,OAAO,MAAM,IAAI;AACpC,MAAI,CAAC,GAAG,WAAW,QAAQ,CACzB,OAAM,IAAI,MAAM;EAGlB,MAAM,CAAC,UAAU,SAAS,GAAG,GAAG,QAAQ,UAAU,GAAG,CAAC,MAAM,IAAI;AAChE,MAAI,aAAa,SACf,OAAM,IAAI,MAAM;AAGlB,SAAO,EACL,YAAY;GACV;GACA;EACD,EACF;CACF,WAAU,QAAQ,SAAS,QAC1B,QAAO,oBAAoB,QAAQ;UAC1B,QAAQ,SAAS,WAC1B,QAAO,EACL,cAAc;EACZ,MAAM,QAAQ;EACd,MAAM,QAAQ;CACf,EACF;UAED,QAAQ,MAAM,SAAS,IAAI,IAE3B,QAAQ,KAAK,MAAM,IAAI,CAAC,WAAW,KACnC,UAAU,WACV,OAAO,QAAQ,SAAS,SAExB,QAAO,EACL,YAAY;EACV,UAAU,QAAQ;EAClB,MAAM,QAAQ;CACf,EACF;UACQ,kBAAkB,QAE3B,QAAO;UAEH,UAAU,QACZ,OAAM,IAAI,MAAM,CAAC,qBAAqB,EAAE,QAAQ,MAAM;KAEtD,OAAM,IAAI,MAAM,CAAC,gBAAgB,EAAE,KAAK,UAAU,QAAQ,EAAE;AAGjE;AAED,SAAgB,6BACdN,SACAM,mBACAD,kBACAG,OACQ;AACR,kDAAkB,QAAQ,EAAE;EAC1B,MAAM,cACJ,QAAQ,QACR,kCAAkC,SAAS,iBAAiB;AAC9D,MAAI,gBAAgB,OAClB,OAAM,IAAI,MACR,CAAC,oHAAoH,EAAE,QAAQ,GAAG,2FAA2F,CAAC;EAIlO,MAAM,SAAS,MAAM,QAAQ,QAAQ,QAAQ,GACxC,QAAQ,QACN,IAAI,CAAC,MAAM,+BAA+B,GAAG,kBAAkB,CAAC,CAChE,OAAO,CAAC,MAAM,MAAM,OAAU,GACjC,QAAQ;AAEZ,MAAI,QAAQ,WAAW,QACrB,QAAO,CACL,EACE,kBAAkB;GAChB,MAAM;GAGN,UAAU,EAAE,OAAO,EAAE,SAAS,OAAQ,EAAE;EACzC,EACF,CACF;AAGH,SAAO,CACL,EACE,kBAAkB;GAChB,MAAM;GAEN,UAAU,EAAE,OAAQ;EACrB,EACF,CACF;CACF;CAED,IAAIC,gBAAoC,CAAE;CAC1C,MAAMC,eAAuB,CAAE;AAE/B,KAAI,OAAO,QAAQ,YAAY,YAAY,QAAQ,SACjD,aAAa,KAAK,EAAE,MAAM,QAAQ,QAAS,EAAC;AAG9C,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAChC,aAAa,KACX,GAAI,QAAQ,QACT,IAAI,CAAC,MAAM,+BAA+B,GAAG,kBAAkB,CAAC,CAChE,OAAO,CAAC,MAAM,MAAM,OAAU,CAClC;CAGH,MAAM,4BAA4B,QAAQ,oBACxC;AAGF,gDAAgB,QAAQ,IAAI,QAAQ,YAAY,QAC9C,gBAAgB,QAAQ,WAAW,IAAI,CAAC,OAAO;EAC7C,MAAM,mBAAmB,KAAK,MAAM;AAClC,OAAI,GAAG,IAAI;IACT,MAAM,YAAY,4BAA4B,GAAG;AACjD,QAAI,UACF,QAAO;GAEV;AACD,OAAI,OAAO,SAAS,WAAW,CAC7B,QAAO;AAET,UAAO;EACR,EAAC;AACF,SAAO;GACL,cAAc;IACZ,MAAM,GAAG;IACT,MAAM,GAAG;GACV;GACD,GAAI,mBAAmB,EAAE,iBAAkB,IAAG,CAAE;EACjD;CACF,EAAC;AAGJ,QAAO,CAAC,GAAG,cAAc,GAAG,aAAc;AAC3C;AAED,SAAgB,6BACdC,UACAL,mBACAM,qCAA8C,OAC9CC,OACA;AACA,QAAO,SAAS,OAId,CAAC,KAAK,SAAS,UAAU;AACvB,MAAI,8CAAe,QAAQ,CACzB,OAAM,IAAI,MAAM;EAElB,MAAM,SAAS,iBAAiB,QAAQ;AACxC,MAAI,WAAW,YAAY,UAAU,EACnC,OAAM,IAAI,MAAM;EAElB,MAAM,OAAO,oBAAoB,OAAO;EAExC,MAAM,cAAc,IAAI,QAAQ,IAAI,QAAQ;AAC5C,MACE,CAAC,IAAI,4BACL,eACA,YAAY,SAAS,KAErB,OAAM,IAAI,MACR;EAIJ,MAAM,QAAQ,6BACZ,SACA,mBACA,SAAS,MAAM,GAAG,MAAM,EACxB,MACD;AAED,MAAI,IAAI,0BAA0B;GAChC,MAAMC,gBAAc,IAAI,QAAQ,IAAI,QAAQ,SAAS;AACrD,OAAI,CAACA,cACH,OAAM,IAAI,MACR;GAGJA,cAAY,MAAM,KAAK,GAAG,MAAM;AAEhC,UAAO;IACL,0BAA0B;IAC1B,SAAS,IAAI;GACd;EACF;EACD,IAAI,aAAa;AACjB,MACE,eAAe,cACd,eAAe,YAAY,CAAC,oCAG7B,aAAa;EAEf,MAAMC,UAAmB;GACvB,MAAM;GACN;EACD;AACD,SAAO;GACL,0BACE,WAAW,YAAY,CAAC;GAC1B,SAAS,CAAC,GAAG,IAAI,SAAS,OAAQ;EACnC;CACF,GACD;EAAE,SAAS,CAAE;EAAE,0BAA0B;CAAO,EACjD,CAAC;AACH;AAED,SAAgB,qCACdC,UACAC,OAGY;AAEZ,KACE,CAAC,SAAS,cACV,SAAS,WAAW,WAAW,KAC/B,CAAC,SAAS,WAAW,GAErB,QAAO;EACL,aAAa,CAAE;EACf,WAAW,EACT,SAAS,SAAS,eACnB;CACF;CAEH,MAAM,CAAC,UAAU,GAAG,SAAS;CAC7B,MAAM,EAAE,SAAS,iBAAkB,GAAG,gBAAgB,GAAG;CACzD,MAAM,gBAAgB,iBAAiB,MAAM,OAAO,CAAC,KAAK,MAAM;AAC9D,MAAI,kBAAkB,KAAK,EAAE,cAC3B,IAAI,KAAK;GACP,GAAG;GACH,IACE,QAAQ,EAAE,gBAAgB,OAAO,EAAE,aAAa,OAAO,WACnD,EAAE,aAAa,mBACP;EACf,EAAC;AAEJ,SAAO;CACR,GAAE,CAAE,EAA0C;CAC/C,IAAIC;AAEJ,KACE,MAAM,QAAQ,kBAAkB,MAAM,IACtC,iBAAiB,MAAM,WAAW,KAClC,iBAAiB,MAAM,GAAG,MAE1B,UAAU,iBAAiB,MAAM,GAAG;UAEpC,MAAM,QAAQ,kBAAkB,MAAM,IACtC,iBAAiB,MAAM,SAAS,GAEhC,UAAU,iBAAiB,MAAM,IAAI,CAAC,MAAM;AAC1C,MAAI,UAAU,EACZ,QAAO;GACL,MAAM;GACN,MAAM,EAAE;EACT;WACQ,gBAAgB,EACzB,QAAO;GACL,MAAM;GACN,YAAY,EAAE;EACf;WACQ,kBAAkB,EAC3B,QAAO;GACL,MAAM;GACN,cAAc,EAAE;EACjB;WACQ,sBAAsB,EAC/B,QAAO;GACL,MAAM;GACN,kBAAkB,EAAE;EACrB;WACQ,cAAc,EACvB,QAAO;GACL,MAAM;GACN,UAAU,EAAE;EACb;WACQ,oBAAoB,EAC7B,QAAO;GACL,MAAM;GACN,gBAAgB,EAAE;EACnB;WACQ,yBAAyB,EAClC,QAAO;GACL,MAAM;GACN,qBAAqB,EAAE;EACxB;AAEH,SAAO;CACR,EAAC;MAGF,UAAU,CAAE;CAGd,MAAM,4BAA4B,eAAe,OAAO,CAAC,KAAK,OAAO;AACnE,MAAI,sBAAsB,MAAM,OAAO,GAAG,qBAAqB,UAC7D,IAAI,GAAG,MAAM,GAAG;AAElB,SAAO;CACR,GAAE,CAAE,EAA2B;CAEhC,IAAI,OAAO;AACX,KAAI,OAAO,YAAY,UACrB,OAAO;UACE,MAAM,QAAQ,QAAQ,IAAI,QAAQ,SAAS,GAAG;EACvD,MAAM,QAAQ,QAAQ,KAAK,CAAC,MAAM,UAAU,EAAE;EAG9C,OAAO,OAAO,QAAQ;CACvB;CAED,MAAMC,aAA6B;EACjC;EACA,SAAS,IAAIC,oCAAU;GACrB,SAAS,WAAW;GACpB,YAAY,eAAe,IAAI,CAAC,QAAQ;IACtC,MAAM;IACN,IAAI,GAAG;IACP,MAAM,GAAG,aAAa;IACtB,MAAM,GAAG,aAAa;GACvB,GAAE;GACH,mBAAmB;IACjB,GAAG;KACF,4CAA4C;GAC9C;GACD,gBAAgB,OAAO;EACxB;EACD;CACD;AAED,QAAO;EACL,aAAa,CAAC,UAAW;EACzB,WAAW,EACT,YAAY;GACV,cAAc,OAAO,eAAe;GACpC,kBAAkB,OAAO,eAAe;GACxC,aAAa,OAAO,eAAe;EACpC,EACF;CACF;AACF;AAED,SAAgB,4CACdJ,UACAK,OAI4B;AAC5B,KAAI,CAAC,SAAS,cAAc,SAAS,WAAW,WAAW,EACzD,QAAO;CAET,MAAM,CAAC,UAAU,GAAG,SAAS;CAC7B,MAAM,EAAE,SAAS,iBAAkB,GAAG,gBAAgB,GAAG;CACzD,MAAM,gBAAgB,iBAAiB,MAAM,OAAO,CAAC,KAAK,MAAM;AAC9D,MAAI,kBAAkB,KAAK,EAAE,cAC3B,IAAI,KAAK;GACP,GAAG;GACH,IACE,QAAQ,EAAE,gBAAgB,OAAO,EAAE,aAAa,OAAO,WACnD,EAAE,aAAa,mBACP;EACf,EAAC;AAEJ,SAAO;CACR,GAAE,CAAE,EAA0C;CAC/C,IAAIH;AAEJ,KACE,MAAM,QAAQ,kBAAkB,MAAM,IACtC,iBAAiB,MAAM,MAAM,CAAC,MAAM,UAAU,EAAE,EAEhD,UAAU,iBAAiB,MAAM,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,KAAK,GAAG;UACnD,MAAM,QAAQ,kBAAkB,MAAM,EAC/C,UAAU,iBAAiB,MAAM,IAAI,CAAC,MAAM;AAC1C,MAAI,UAAU,EACZ,QAAO;GACL,MAAM;GACN,MAAM,EAAE;EACT;WACQ,gBAAgB,EACzB,QAAO;GACL,MAAM;GACN,YAAY,EAAE;EACf;WACQ,kBAAkB,EAC3B,QAAO;GACL,MAAM;GACN,cAAc,EAAE;EACjB;WACQ,sBAAsB,EAC/B,QAAO;GACL,MAAM;GACN,kBAAkB,EAAE;EACrB;WACQ,cAAc,EACvB,QAAO;GACL,MAAM;GACN,UAAU,EAAE;EACb;WACQ,oBAAoB,EAC7B,QAAO;GACL,MAAM;GACN,gBAAgB,EAAE;EACnB;WACQ,yBAAyB,EAClC,QAAO;GACL,MAAM;GACN,qBAAqB,EAAE;EACxB;AAEH,SAAO;CACR,EAAC;MAGF,UAAU,CAAE;CAGd,IAAI,OAAO;AACX,KAAI,WAAW,OAAO,YAAY,UAChC,OAAO;UACE,MAAM,QAAQ,QAAQ,EAAE;EACjC,MAAM,QAAQ,QAAQ,KAAK,CAAC,MAAM,UAAU,EAAE;EAG9C,OAAO,OAAO,QAAQ;CACvB;CAED,MAAMI,iBAAkC,CAAE;AAC1C,KAAI,eACF,eAAe,KACb,GAAG,cAAc,IAAI,CAAC,QAAQ;EAC5B,GAAG;EACH,MAAM,KAAK,UAAU,GAAG,aAAa,KAAK;EAC1C,OAAO,MAAM;EACb,MAAM;CACP,GAAE,CACJ;CAGH,MAAM,4BAA4B,eAAe,OAAO,CAAC,KAAK,OAAO;AACnE,MAAI,sBAAsB,MAAM,OAAO,GAAG,qBAAqB,UAC7D,IAAI,GAAG,MAAM,GAAG;AAElB,SAAO;CACR,GAAE,CAAE,EAA2B;AAEhC,QAAO,IAAIC,6CAAoB;EAC7B;EACA,SAAS,IAAIC,yCAAe;GAC1B,SAAS,WAAW;GACpB,MAAM,CAAC,mBAAmB,SAAY,iBAAiB;GACvD,kBAAkB;GAGlB,mBAAmB,GAChB,4CAA4C,0BAC9C;GACD,mBAAmB,EACjB,gBAAgB,eACjB;GACD,gBAAgB,MAAM;EACvB;EACD;CACD;AACF;AAED,SAAgB,2BACdC,OAC8C;AAC9C,KACE,MAAM,MACJ,CAAC,SACC,0BAA0B,QAC1B,MAAM,QAAQ,KAAK,qBAAqB,CAC3C,CAED,QAAO;AAET,QAAO,CACL,EACE,sBAAsB,MAAM,IAC1B,CAAC,SAA0C;AACzC,mEAAoB,KAAK,EAAE;GACzB,MAAM,aAAaC,+DAA+B,KAAK,OAAO;AAC9D,OACE,WAAW,SAAS,YACpB,gBAAgB,cAChB,OAAO,KAAK,WAAW,WAAW,CAAC,WAAW,EAE9C,QAAO;IACL,MAAM,KAAK;IACX,aAAa,KAAK;GACnB;AAEH,UAAO;IACL,MAAM,KAAK;IACX,aAAa,KAAK;IAClB,YAAY;GACb;EACF;AACD,8DAAiB,KAAK,CACpB,QAAO;GACL,MAAM,KAAK,SAAS;GACpB,aACE,KAAK,SAAS,eAAe,CAAC,6BAA6B,CAAC;GAC9D,YAAYC,6DACV,KAAK,SAAS,WACf;EACF;AAEH,SAAO;CACR,EACF,CACF,CACF;AACF"}
|
package/dist/utils/common.js
CHANGED
|
@@ -6,6 +6,9 @@ import { isOpenAITool } from "@langchain/core/language_models/base";
|
|
|
6
6
|
import { v4 } from "uuid";
|
|
7
7
|
|
|
8
8
|
//#region src/utils/common.ts
|
|
9
|
+
const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = "__gemini_function_call_thought_signatures__";
|
|
10
|
+
const DUMMY_SIGNATURE = "ErYCCrMCAdHtim9kOoOkrPiCNVsmlpMIKd7ZMxgiFbVQOkgp7nlLcDMzVsZwIzvuT7nQROivoXA72ccC2lSDvR0Gh7dkWaGuj7ctv6t7ZceHnecx0QYa+ix8tYpRfjhyWozQ49lWiws6+YGjCt10KRTyWsZ2h6O7iHTYJwKIRwGUHRKy/qK/6kFxJm5ML00gLq4D8s5Z6DBpp2ZlR+uF4G8jJgeWQgyHWVdx2wGYElaceVAc66tZdPQRdOHpWtgYSI1YdaXgVI8KHY3/EfNc2YqqMIulvkDBAnuMhkAjV9xmBa54Tq+ih3Im4+r3DzqhGqYdsSkhS0kZMwte4Hjs65dZzCw9lANxIqYi1DJ639WNPYihp/DCJCos7o+/EeSPJaio5sgWDyUnMGkY1atsJZ+m7pj7DD5tvQ==";
|
|
11
|
+
const iife = (fn) => fn();
|
|
9
12
|
function getMessageAuthor(message) {
|
|
10
13
|
const type = message._getType();
|
|
11
14
|
if (ChatMessage.isInstance(message)) return message.role;
|
|
@@ -148,7 +151,7 @@ function _convertLangChainContentToPart(content, isMultimodalModel) {
|
|
|
148
151
|
else if ("type" in content) throw new Error(`Unknown content type ${content.type}`);
|
|
149
152
|
else throw new Error(`Unknown content ${JSON.stringify(content)}`);
|
|
150
153
|
}
|
|
151
|
-
function convertMessageContentToParts(message, isMultimodalModel, previousMessages) {
|
|
154
|
+
function convertMessageContentToParts(message, isMultimodalModel, previousMessages, model) {
|
|
152
155
|
if (isToolMessage(message)) {
|
|
153
156
|
const messageName = message.name ?? inferToolNameFromPreviousMessages(message, previousMessages);
|
|
154
157
|
if (messageName === void 0) throw new Error(`Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage "${message.id}" from your passed messages. Please populate a "name" field on that ToolMessage explicitly.`);
|
|
@@ -166,15 +169,27 @@ function convertMessageContentToParts(message, isMultimodalModel, previousMessag
|
|
|
166
169
|
const messageParts = [];
|
|
167
170
|
if (typeof message.content === "string" && message.content) messageParts.push({ text: message.content });
|
|
168
171
|
if (Array.isArray(message.content)) messageParts.push(...message.content.map((c) => _convertLangChainContentToPart(c, isMultimodalModel)).filter((p) => p !== void 0));
|
|
172
|
+
const functionThoughtSignatures = message.additional_kwargs?.[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY];
|
|
169
173
|
if (isAIMessage(message) && message.tool_calls?.length) functionCalls = message.tool_calls.map((tc) => {
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
+
const thoughtSignature = iife(() => {
|
|
175
|
+
if (tc.id) {
|
|
176
|
+
const signature = functionThoughtSignatures?.[tc.id];
|
|
177
|
+
if (signature) return signature;
|
|
178
|
+
}
|
|
179
|
+
if (model?.includes("gemini-3")) return DUMMY_SIGNATURE;
|
|
180
|
+
return "";
|
|
181
|
+
});
|
|
182
|
+
return {
|
|
183
|
+
functionCall: {
|
|
184
|
+
name: tc.name,
|
|
185
|
+
args: tc.args
|
|
186
|
+
},
|
|
187
|
+
...thoughtSignature ? { thoughtSignature } : {}
|
|
188
|
+
};
|
|
174
189
|
});
|
|
175
190
|
return [...messageParts, ...functionCalls];
|
|
176
191
|
}
|
|
177
|
-
function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false) {
|
|
192
|
+
function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false, model) {
|
|
178
193
|
return messages.reduce((acc, message, index) => {
|
|
179
194
|
if (!isBaseMessage(message)) throw new Error("Unsupported message input");
|
|
180
195
|
const author = getMessageAuthor(message);
|
|
@@ -182,7 +197,7 @@ function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystem
|
|
|
182
197
|
const role = convertAuthorToRole(author);
|
|
183
198
|
const prevContent = acc.content[acc.content.length];
|
|
184
199
|
if (!acc.mergeWithPreviousContent && prevContent && prevContent.role === role) throw new Error("Google Generative AI requires alternate messages between authors");
|
|
185
|
-
const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index));
|
|
200
|
+
const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index), model);
|
|
186
201
|
if (acc.mergeWithPreviousContent) {
|
|
187
202
|
const prevContent$1 = acc.content[acc.content.length - 1];
|
|
188
203
|
if (!prevContent$1) throw new Error("There was a problem parsing your system message. Please try a prompt without one.");
|
|
@@ -212,9 +227,15 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
212
227
|
generations: [],
|
|
213
228
|
llmOutput: { filters: response.promptFeedback }
|
|
214
229
|
};
|
|
215
|
-
const functionCalls = response.functionCalls();
|
|
216
230
|
const [candidate] = response.candidates;
|
|
217
231
|
const { content: candidateContent,...generationInfo } = candidate;
|
|
232
|
+
const functionCalls = candidateContent.parts.reduce((acc, p) => {
|
|
233
|
+
if ("functionCall" in p && p.functionCall) acc.push({
|
|
234
|
+
...p,
|
|
235
|
+
id: "id" in p.functionCall && typeof p.functionCall.id === "string" ? p.functionCall.id : v4()
|
|
236
|
+
});
|
|
237
|
+
return acc;
|
|
238
|
+
}, []);
|
|
218
239
|
let content;
|
|
219
240
|
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) content = candidateContent.parts[0].text;
|
|
220
241
|
else if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length > 0) content = candidateContent.parts.map((p) => {
|
|
@@ -249,6 +270,10 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
249
270
|
return p;
|
|
250
271
|
});
|
|
251
272
|
else content = [];
|
|
273
|
+
const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {
|
|
274
|
+
if ("thoughtSignature" in fc && typeof fc.thoughtSignature === "string") acc[fc.id] = fc.thoughtSignature;
|
|
275
|
+
return acc;
|
|
276
|
+
}, {});
|
|
252
277
|
let text = "";
|
|
253
278
|
if (typeof content === "string") text = content;
|
|
254
279
|
else if (Array.isArray(content) && content.length > 0) {
|
|
@@ -259,14 +284,16 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
259
284
|
text,
|
|
260
285
|
message: new AIMessage({
|
|
261
286
|
content: content ?? "",
|
|
262
|
-
tool_calls: functionCalls?.map((fc) => {
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
287
|
+
tool_calls: functionCalls?.map((fc) => ({
|
|
288
|
+
type: "tool_call",
|
|
289
|
+
id: fc.id,
|
|
290
|
+
name: fc.functionCall.name,
|
|
291
|
+
args: fc.functionCall.args
|
|
292
|
+
})),
|
|
293
|
+
additional_kwargs: {
|
|
294
|
+
...generationInfo,
|
|
295
|
+
[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures
|
|
296
|
+
},
|
|
270
297
|
usage_metadata: extra?.usageMetadata
|
|
271
298
|
}),
|
|
272
299
|
generationInfo
|
|
@@ -282,9 +309,15 @@ function mapGenerateContentResultToChatResult(response, extra) {
|
|
|
282
309
|
}
|
|
283
310
|
function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
284
311
|
if (!response.candidates || response.candidates.length === 0) return null;
|
|
285
|
-
const functionCalls = response.functionCalls();
|
|
286
312
|
const [candidate] = response.candidates;
|
|
287
313
|
const { content: candidateContent,...generationInfo } = candidate;
|
|
314
|
+
const functionCalls = candidateContent.parts.reduce((acc, p) => {
|
|
315
|
+
if ("functionCall" in p && p.functionCall) acc.push({
|
|
316
|
+
...p,
|
|
317
|
+
id: "id" in p.functionCall && typeof p.functionCall.id === "string" ? p.functionCall.id : v4()
|
|
318
|
+
});
|
|
319
|
+
return acc;
|
|
320
|
+
}, []);
|
|
288
321
|
let content;
|
|
289
322
|
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => "text" in p)) content = candidateContent.parts.map((p) => p.text).join("");
|
|
290
323
|
else if (Array.isArray(candidateContent?.parts)) content = candidateContent.parts.map((p) => {
|
|
@@ -328,18 +361,21 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
|
328
361
|
const toolCallChunks = [];
|
|
329
362
|
if (functionCalls) toolCallChunks.push(...functionCalls.map((fc) => ({
|
|
330
363
|
...fc,
|
|
331
|
-
args: JSON.stringify(fc.args),
|
|
364
|
+
args: JSON.stringify(fc.functionCall.args),
|
|
332
365
|
index: extra.index,
|
|
333
|
-
type: "tool_call_chunk"
|
|
334
|
-
id: "id" in fc && typeof fc.id === "string" ? fc.id : v4()
|
|
366
|
+
type: "tool_call_chunk"
|
|
335
367
|
})));
|
|
368
|
+
const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {
|
|
369
|
+
if ("thoughtSignature" in fc && typeof fc.thoughtSignature === "string") acc[fc.id] = fc.thoughtSignature;
|
|
370
|
+
return acc;
|
|
371
|
+
}, {});
|
|
336
372
|
return new ChatGenerationChunk({
|
|
337
373
|
text,
|
|
338
374
|
message: new AIMessageChunk({
|
|
339
375
|
content: content || "",
|
|
340
376
|
name: !candidateContent ? void 0 : candidateContent.role,
|
|
341
377
|
tool_call_chunks: toolCallChunks,
|
|
342
|
-
additional_kwargs: {},
|
|
378
|
+
additional_kwargs: { [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures },
|
|
343
379
|
response_metadata: { model_provider: "google-genai" },
|
|
344
380
|
usage_metadata: extra.usageMetadata
|
|
345
381
|
}),
|