@nocobase/plugin-ai 2.0.0-beta.6 → 2.0.0-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/6a4c1ee12f864e38.js +10 -0
- package/dist/client/index.js +1 -1
- package/dist/collections/ai-employees.d.ts +1 -0
- package/dist/collections/ai-employees.js +4 -0
- package/dist/externalVersion.js +11 -11
- package/dist/locale/en-US.json +2 -0
- package/dist/locale/zh-CN.json +2 -0
- package/dist/node_modules/@langchain/anthropic/package.json +1 -1
- package/dist/node_modules/@langchain/core/package.json +1 -1
- package/dist/node_modules/@langchain/deepseek/package.json +1 -1
- package/dist/node_modules/@langchain/google-genai/LICENSE +6 -6
- package/dist/node_modules/@langchain/google-genai/dist/_virtual/rolldown_runtime.cjs +25 -0
- package/dist/node_modules/@langchain/google-genai/dist/chat_models.cjs +680 -842
- package/dist/node_modules/@langchain/google-genai/dist/chat_models.d.cts +581 -0
- package/dist/node_modules/@langchain/google-genai/dist/chat_models.d.ts +196 -157
- package/dist/node_modules/@langchain/google-genai/dist/chat_models.js +678 -837
- package/dist/node_modules/@langchain/google-genai/dist/embeddings.cjs +97 -151
- package/dist/node_modules/@langchain/google-genai/dist/embeddings.d.cts +104 -0
- package/dist/node_modules/@langchain/google-genai/dist/embeddings.d.ts +76 -70
- package/dist/node_modules/@langchain/google-genai/dist/embeddings.js +93 -144
- package/dist/node_modules/@langchain/google-genai/dist/index.cjs +252 -18
- package/dist/node_modules/@langchain/google-genai/dist/index.d.cts +3 -0
- package/dist/node_modules/@langchain/google-genai/dist/index.d.ts +3 -2
- package/dist/node_modules/@langchain/google-genai/dist/index.js +4 -2
- package/dist/node_modules/@langchain/google-genai/dist/output_parsers.cjs +47 -75
- package/dist/node_modules/@langchain/google-genai/dist/output_parsers.js +47 -72
- package/dist/node_modules/@langchain/google-genai/dist/profiles.cjs +345 -0
- package/dist/node_modules/@langchain/google-genai/dist/profiles.js +344 -0
- package/dist/node_modules/@langchain/google-genai/dist/types.d.cts +17 -0
- package/dist/node_modules/@langchain/google-genai/dist/types.d.ts +16 -2
- package/dist/node_modules/@langchain/google-genai/dist/utils/common.cjs +419 -551
- package/dist/node_modules/@langchain/google-genai/dist/utils/common.js +417 -545
- package/dist/node_modules/@langchain/google-genai/dist/utils/tools.cjs +65 -102
- package/dist/node_modules/@langchain/google-genai/dist/utils/tools.js +64 -99
- package/dist/node_modules/@langchain/google-genai/dist/utils/zod_to_genai_parameters.cjs +31 -49
- package/dist/node_modules/@langchain/google-genai/dist/utils/zod_to_genai_parameters.js +29 -45
- package/dist/node_modules/@langchain/google-genai/package.json +1 -1
- package/dist/node_modules/@langchain/ollama/package.json +1 -1
- package/dist/node_modules/@langchain/openai/package.json +1 -1
- package/dist/node_modules/nodejs-snowflake/package.json +1 -1
- package/dist/node_modules/zod/package.json +1 -1
- package/dist/node_modules/zod-to-json-schema/package.json +1 -1
- package/dist/server/ai-employees/ai-employee.js +5 -2
- package/dist/server/llm-providers/anthropic.js +4 -5
- package/dist/server/llm-providers/provider.js +5 -1
- package/dist/server/manager/ai-chat-conversation.js +3 -2
- package/dist/server/manager/built-in-manager.js +3 -3
- package/dist/server/migrations/20260107000000-reset-builtin-about.d.ts +14 -0
- package/dist/server/migrations/20260107000000-reset-builtin-about.js +48 -0
- package/dist/server/resource/ai.js +8 -2
- package/dist/server/resource/aiEmployees.d.ts +0 -1
- package/dist/server/resource/aiEmployees.js +0 -15
- package/dist/server/types/ai-message.type.d.ts +2 -0
- package/package.json +3 -3
- package/dist/client/a7f0550514128d80.js +0 -10
- package/dist/node_modules/@langchain/google-genai/dist/output_parsers.d.ts +0 -20
- package/dist/node_modules/@langchain/google-genai/dist/types.cjs +0 -2
- package/dist/node_modules/@langchain/google-genai/dist/types.js +0 -1
- package/dist/node_modules/@langchain/google-genai/dist/utils/common.d.ts +0 -22
- package/dist/node_modules/@langchain/google-genai/dist/utils/tools.d.ts +0 -10
- package/dist/node_modules/@langchain/google-genai/dist/utils/zod_to_genai_parameters.d.ts +0 -14
- package/dist/node_modules/@langchain/google-genai/index.cjs +0 -252
- package/dist/node_modules/@langchain/google-genai/index.d.cts +0 -1
- package/dist/node_modules/@langchain/google-genai/index.d.ts +0 -1
- package/dist/node_modules/@langchain/google-genai/index.js +0 -1
|
@@ -1,572 +1,440 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
const
|
|
11
|
-
const
|
|
12
|
-
const
|
|
13
|
-
const base_1 = require("@langchain/core/language_models/base");
|
|
14
|
-
const uuid_1 = require("uuid");
|
|
15
|
-
const zod_to_genai_parameters_js_1 = require("./zod_to_genai_parameters.cjs");
|
|
1
|
+
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
+
const require_zod_to_genai_parameters = require('./zod_to_genai_parameters.cjs');
|
|
3
|
+
const __langchain_core_messages = require_rolldown_runtime.__toESM(require("@langchain/core/messages"));
|
|
4
|
+
const __langchain_core_outputs = require_rolldown_runtime.__toESM(require("@langchain/core/outputs"));
|
|
5
|
+
const __langchain_core_utils_function_calling = require_rolldown_runtime.__toESM(require("@langchain/core/utils/function_calling"));
|
|
6
|
+
const __langchain_core_language_models_base = require_rolldown_runtime.__toESM(require("@langchain/core/language_models/base"));
|
|
7
|
+
const uuid = require_rolldown_runtime.__toESM(require("uuid"));
|
|
8
|
+
|
|
9
|
+
//#region src/utils/common.ts
|
|
10
|
+
const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = "__gemini_function_call_thought_signatures__";
|
|
11
|
+
const DUMMY_SIGNATURE = "ErYCCrMCAdHtim9kOoOkrPiCNVsmlpMIKd7ZMxgiFbVQOkgp7nlLcDMzVsZwIzvuT7nQROivoXA72ccC2lSDvR0Gh7dkWaGuj7ctv6t7ZceHnecx0QYa+ix8tYpRfjhyWozQ49lWiws6+YGjCt10KRTyWsZ2h6O7iHTYJwKIRwGUHRKy/qK/6kFxJm5ML00gLq4D8s5Z6DBpp2ZlR+uF4G8jJgeWQgyHWVdx2wGYElaceVAc66tZdPQRdOHpWtgYSI1YdaXgVI8KHY3/EfNc2YqqMIulvkDBAnuMhkAjV9xmBa54Tq+ih3Im4+r3DzqhGqYdsSkhS0kZMwte4Hjs65dZzCw9lANxIqYi1DJ639WNPYihp/DCJCos7o+/EeSPJaio5sgWDyUnMGkY1atsJZ+m7pj7DD5tvQ==";
|
|
12
|
+
const iife = (fn) => fn();
|
|
16
13
|
function getMessageAuthor(message) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
if (type === "tool") {
|
|
22
|
-
return type;
|
|
23
|
-
}
|
|
24
|
-
return message.name ?? type;
|
|
14
|
+
const type = message._getType();
|
|
15
|
+
if (__langchain_core_messages.ChatMessage.isInstance(message)) return message.role;
|
|
16
|
+
if (type === "tool") return type;
|
|
17
|
+
return message.name ?? type;
|
|
25
18
|
}
|
|
26
19
|
/**
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
20
|
+
* Maps a message type to a Google Generative AI chat author.
|
|
21
|
+
* @param message The message to map.
|
|
22
|
+
* @param model The model to use for mapping.
|
|
23
|
+
* @returns The message type mapped to a Google Generative AI chat author.
|
|
24
|
+
*/
|
|
32
25
|
function convertAuthorToRole(author) {
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
return "system";
|
|
44
|
-
case "human":
|
|
45
|
-
return "user";
|
|
46
|
-
case "tool":
|
|
47
|
-
case "function":
|
|
48
|
-
return "function";
|
|
49
|
-
default:
|
|
50
|
-
throw new Error(`Unknown / unsupported author: ${author}`);
|
|
51
|
-
}
|
|
26
|
+
switch (author) {
|
|
27
|
+
case "supervisor":
|
|
28
|
+
case "ai":
|
|
29
|
+
case "model": return "model";
|
|
30
|
+
case "system": return "system";
|
|
31
|
+
case "human": return "user";
|
|
32
|
+
case "tool":
|
|
33
|
+
case "function": return "function";
|
|
34
|
+
default: throw new Error(`Unknown / unsupported author: ${author}`);
|
|
35
|
+
}
|
|
52
36
|
}
|
|
53
37
|
function messageContentMedia(content) {
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
return {
|
|
64
|
-
fileData: {
|
|
65
|
-
mimeType: content.mimeType,
|
|
66
|
-
fileUri: content.fileUri,
|
|
67
|
-
},
|
|
68
|
-
};
|
|
69
|
-
}
|
|
70
|
-
throw new Error("Invalid media content");
|
|
38
|
+
if ("mimeType" in content && "data" in content) return { inlineData: {
|
|
39
|
+
mimeType: content.mimeType,
|
|
40
|
+
data: content.data
|
|
41
|
+
} };
|
|
42
|
+
if ("mimeType" in content && "fileUri" in content) return { fileData: {
|
|
43
|
+
mimeType: content.mimeType,
|
|
44
|
+
fileUri: content.fileUri
|
|
45
|
+
} };
|
|
46
|
+
throw new Error("Invalid media content");
|
|
71
47
|
}
|
|
72
48
|
function inferToolNameFromPreviousMessages(message, previousMessages) {
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
})
|
|
80
|
-
.flat()
|
|
81
|
-
.find((toolCall) => {
|
|
82
|
-
return toolCall.id === message.tool_call_id;
|
|
83
|
-
})?.name;
|
|
49
|
+
return previousMessages.map((msg) => {
|
|
50
|
+
if ((0, __langchain_core_messages.isAIMessage)(msg)) return msg.tool_calls ?? [];
|
|
51
|
+
return [];
|
|
52
|
+
}).flat().find((toolCall) => {
|
|
53
|
+
return toolCall.id === message.tool_call_id;
|
|
54
|
+
})?.name;
|
|
84
55
|
}
|
|
85
56
|
function _getStandardContentBlockConverter(isMultimodalModel) {
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
inlineData: {
|
|
152
|
-
mimeType: block.mime_type ?? "",
|
|
153
|
-
data: block.data,
|
|
154
|
-
},
|
|
155
|
-
};
|
|
156
|
-
}
|
|
157
|
-
throw new Error(`Unsupported source type: ${block.source_type}`);
|
|
158
|
-
},
|
|
159
|
-
fromStandardFileBlock(block) {
|
|
160
|
-
if (!isMultimodalModel) {
|
|
161
|
-
throw new Error("This model does not support files");
|
|
162
|
-
}
|
|
163
|
-
if (block.source_type === "text") {
|
|
164
|
-
return {
|
|
165
|
-
text: block.text,
|
|
166
|
-
};
|
|
167
|
-
}
|
|
168
|
-
if (block.source_type === "url") {
|
|
169
|
-
const data = (0, messages_1.parseBase64DataUrl)({ dataUrl: block.url });
|
|
170
|
-
if (data) {
|
|
171
|
-
return {
|
|
172
|
-
inlineData: {
|
|
173
|
-
mimeType: data.mime_type,
|
|
174
|
-
data: data.data,
|
|
175
|
-
},
|
|
176
|
-
};
|
|
177
|
-
}
|
|
178
|
-
else {
|
|
179
|
-
return {
|
|
180
|
-
fileData: {
|
|
181
|
-
mimeType: block.mime_type ?? "",
|
|
182
|
-
fileUri: block.url,
|
|
183
|
-
},
|
|
184
|
-
};
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
if (block.source_type === "base64") {
|
|
188
|
-
return {
|
|
189
|
-
inlineData: {
|
|
190
|
-
mimeType: block.mime_type ?? "",
|
|
191
|
-
data: block.data,
|
|
192
|
-
},
|
|
193
|
-
};
|
|
194
|
-
}
|
|
195
|
-
throw new Error(`Unsupported source type: ${block.source_type}`);
|
|
196
|
-
},
|
|
197
|
-
};
|
|
198
|
-
return standardContentBlockConverter;
|
|
57
|
+
const standardContentBlockConverter = {
|
|
58
|
+
providerName: "Google Gemini",
|
|
59
|
+
fromStandardTextBlock(block) {
|
|
60
|
+
return { text: block.text };
|
|
61
|
+
},
|
|
62
|
+
fromStandardImageBlock(block) {
|
|
63
|
+
if (!isMultimodalModel) throw new Error("This model does not support images");
|
|
64
|
+
if (block.source_type === "url") {
|
|
65
|
+
const data = (0, __langchain_core_messages.parseBase64DataUrl)({ dataUrl: block.url });
|
|
66
|
+
if (data) return { inlineData: {
|
|
67
|
+
mimeType: data.mime_type,
|
|
68
|
+
data: data.data
|
|
69
|
+
} };
|
|
70
|
+
else return { fileData: {
|
|
71
|
+
mimeType: block.mime_type ?? "",
|
|
72
|
+
fileUri: block.url
|
|
73
|
+
} };
|
|
74
|
+
}
|
|
75
|
+
if (block.source_type === "base64") return { inlineData: {
|
|
76
|
+
mimeType: block.mime_type ?? "",
|
|
77
|
+
data: block.data
|
|
78
|
+
} };
|
|
79
|
+
throw new Error(`Unsupported source type: ${block.source_type}`);
|
|
80
|
+
},
|
|
81
|
+
fromStandardAudioBlock(block) {
|
|
82
|
+
if (!isMultimodalModel) throw new Error("This model does not support audio");
|
|
83
|
+
if (block.source_type === "url") {
|
|
84
|
+
const data = (0, __langchain_core_messages.parseBase64DataUrl)({ dataUrl: block.url });
|
|
85
|
+
if (data) return { inlineData: {
|
|
86
|
+
mimeType: data.mime_type,
|
|
87
|
+
data: data.data
|
|
88
|
+
} };
|
|
89
|
+
else return { fileData: {
|
|
90
|
+
mimeType: block.mime_type ?? "",
|
|
91
|
+
fileUri: block.url
|
|
92
|
+
} };
|
|
93
|
+
}
|
|
94
|
+
if (block.source_type === "base64") return { inlineData: {
|
|
95
|
+
mimeType: block.mime_type ?? "",
|
|
96
|
+
data: block.data
|
|
97
|
+
} };
|
|
98
|
+
throw new Error(`Unsupported source type: ${block.source_type}`);
|
|
99
|
+
},
|
|
100
|
+
fromStandardFileBlock(block) {
|
|
101
|
+
if (!isMultimodalModel) throw new Error("This model does not support files");
|
|
102
|
+
if (block.source_type === "text") return { text: block.text };
|
|
103
|
+
if (block.source_type === "url") {
|
|
104
|
+
const data = (0, __langchain_core_messages.parseBase64DataUrl)({ dataUrl: block.url });
|
|
105
|
+
if (data) return { inlineData: {
|
|
106
|
+
mimeType: data.mime_type,
|
|
107
|
+
data: data.data
|
|
108
|
+
} };
|
|
109
|
+
else return { fileData: {
|
|
110
|
+
mimeType: block.mime_type ?? "",
|
|
111
|
+
fileUri: block.url
|
|
112
|
+
} };
|
|
113
|
+
}
|
|
114
|
+
if (block.source_type === "base64") return { inlineData: {
|
|
115
|
+
mimeType: block.mime_type ?? "",
|
|
116
|
+
data: block.data
|
|
117
|
+
} };
|
|
118
|
+
throw new Error(`Unsupported source type: ${block.source_type}`);
|
|
119
|
+
}
|
|
120
|
+
};
|
|
121
|
+
return standardContentBlockConverter;
|
|
199
122
|
}
|
|
200
123
|
function _convertLangChainContentToPart(content, isMultimodalModel) {
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
}
|
|
232
|
-
const [mimeType, encoding] = dm.replace(/^data:/, "").split(";");
|
|
233
|
-
if (encoding !== "base64") {
|
|
234
|
-
throw new Error("Please provide image as base64 encoded data URL");
|
|
235
|
-
}
|
|
236
|
-
return {
|
|
237
|
-
inlineData: {
|
|
238
|
-
data,
|
|
239
|
-
mimeType,
|
|
240
|
-
},
|
|
241
|
-
};
|
|
242
|
-
}
|
|
243
|
-
else if (content.type === "media") {
|
|
244
|
-
return messageContentMedia(content);
|
|
245
|
-
}
|
|
246
|
-
else if (content.type === "tool_use") {
|
|
247
|
-
return {
|
|
248
|
-
functionCall: {
|
|
249
|
-
name: content.name,
|
|
250
|
-
args: content.input,
|
|
251
|
-
},
|
|
252
|
-
};
|
|
253
|
-
}
|
|
254
|
-
else if (content.type?.includes("/") &&
|
|
255
|
-
// Ensure it's a single slash.
|
|
256
|
-
content.type.split("/").length === 2 &&
|
|
257
|
-
"data" in content &&
|
|
258
|
-
typeof content.data === "string") {
|
|
259
|
-
return {
|
|
260
|
-
inlineData: {
|
|
261
|
-
mimeType: content.type,
|
|
262
|
-
data: content.data,
|
|
263
|
-
},
|
|
264
|
-
};
|
|
265
|
-
}
|
|
266
|
-
else if ("functionCall" in content) {
|
|
267
|
-
// No action needed here — function calls will be added later from message.tool_calls
|
|
268
|
-
return undefined;
|
|
269
|
-
}
|
|
270
|
-
else {
|
|
271
|
-
if ("type" in content) {
|
|
272
|
-
throw new Error(`Unknown content type ${content.type}`);
|
|
273
|
-
}
|
|
274
|
-
else {
|
|
275
|
-
throw new Error(`Unknown content ${JSON.stringify(content)}`);
|
|
276
|
-
}
|
|
277
|
-
}
|
|
124
|
+
if ((0, __langchain_core_messages.isDataContentBlock)(content)) return (0, __langchain_core_messages.convertToProviderContentBlock)(content, _getStandardContentBlockConverter(isMultimodalModel));
|
|
125
|
+
if (content.type === "text") return { text: content.text };
|
|
126
|
+
else if (content.type === "executableCode") return { executableCode: content.executableCode };
|
|
127
|
+
else if (content.type === "codeExecutionResult") return { codeExecutionResult: content.codeExecutionResult };
|
|
128
|
+
else if (content.type === "image_url") {
|
|
129
|
+
if (!isMultimodalModel) throw new Error(`This model does not support images`);
|
|
130
|
+
let source;
|
|
131
|
+
if (typeof content.image_url === "string") source = content.image_url;
|
|
132
|
+
else if (typeof content.image_url === "object" && "url" in content.image_url) source = content.image_url.url;
|
|
133
|
+
else throw new Error("Please provide image as base64 encoded data URL");
|
|
134
|
+
const [dm, data] = source.split(",");
|
|
135
|
+
if (!dm.startsWith("data:")) throw new Error("Please provide image as base64 encoded data URL");
|
|
136
|
+
const [mimeType, encoding] = dm.replace(/^data:/, "").split(";");
|
|
137
|
+
if (encoding !== "base64") throw new Error("Please provide image as base64 encoded data URL");
|
|
138
|
+
return { inlineData: {
|
|
139
|
+
data,
|
|
140
|
+
mimeType
|
|
141
|
+
} };
|
|
142
|
+
} else if (content.type === "media") return messageContentMedia(content);
|
|
143
|
+
else if (content.type === "tool_use") return { functionCall: {
|
|
144
|
+
name: content.name,
|
|
145
|
+
args: content.input
|
|
146
|
+
} };
|
|
147
|
+
else if (content.type?.includes("/") && content.type.split("/").length === 2 && "data" in content && typeof content.data === "string") return { inlineData: {
|
|
148
|
+
mimeType: content.type,
|
|
149
|
+
data: content.data
|
|
150
|
+
} };
|
|
151
|
+
else if ("functionCall" in content) return void 0;
|
|
152
|
+
else if ("type" in content) throw new Error(`Unknown content type ${content.type}`);
|
|
153
|
+
else throw new Error(`Unknown content ${JSON.stringify(content)}`);
|
|
278
154
|
}
|
|
279
|
-
function convertMessageContentToParts(message, isMultimodalModel, previousMessages) {
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
messageParts.push({ text: message.content });
|
|
317
|
-
}
|
|
318
|
-
if (Array.isArray(message.content)) {
|
|
319
|
-
messageParts.push(...message.content
|
|
320
|
-
.map((c) => _convertLangChainContentToPart(c, isMultimodalModel))
|
|
321
|
-
.filter((p) => p !== undefined));
|
|
322
|
-
}
|
|
323
|
-
if ((0, messages_1.isAIMessage)(message) && message.tool_calls?.length) {
|
|
324
|
-
functionCalls = message.tool_calls.map((tc) => {
|
|
325
|
-
return {
|
|
326
|
-
functionCall: {
|
|
327
|
-
name: tc.name,
|
|
328
|
-
args: tc.args,
|
|
329
|
-
},
|
|
330
|
-
};
|
|
331
|
-
});
|
|
332
|
-
}
|
|
333
|
-
return [...messageParts, ...functionCalls];
|
|
155
|
+
function convertMessageContentToParts(message, isMultimodalModel, previousMessages, model) {
|
|
156
|
+
if ((0, __langchain_core_messages.isToolMessage)(message)) {
|
|
157
|
+
const messageName = message.name ?? inferToolNameFromPreviousMessages(message, previousMessages);
|
|
158
|
+
if (messageName === void 0) throw new Error(`Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage "${message.id}" from your passed messages. Please populate a "name" field on that ToolMessage explicitly.`);
|
|
159
|
+
const result = Array.isArray(message.content) ? message.content.map((c) => _convertLangChainContentToPart(c, isMultimodalModel)).filter((p) => p !== void 0) : message.content;
|
|
160
|
+
if (message.status === "error") return [{ functionResponse: {
|
|
161
|
+
name: messageName,
|
|
162
|
+
response: { error: { details: result } }
|
|
163
|
+
} }];
|
|
164
|
+
return [{ functionResponse: {
|
|
165
|
+
name: messageName,
|
|
166
|
+
response: { result }
|
|
167
|
+
} }];
|
|
168
|
+
}
|
|
169
|
+
let functionCalls = [];
|
|
170
|
+
const messageParts = [];
|
|
171
|
+
if (typeof message.content === "string" && message.content) messageParts.push({ text: message.content });
|
|
172
|
+
if (Array.isArray(message.content)) messageParts.push(...message.content.map((c) => _convertLangChainContentToPart(c, isMultimodalModel)).filter((p) => p !== void 0));
|
|
173
|
+
const functionThoughtSignatures = message.additional_kwargs?.[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY];
|
|
174
|
+
if ((0, __langchain_core_messages.isAIMessage)(message) && message.tool_calls?.length) functionCalls = message.tool_calls.map((tc) => {
|
|
175
|
+
const thoughtSignature = iife(() => {
|
|
176
|
+
if (tc.id) {
|
|
177
|
+
const signature = functionThoughtSignatures?.[tc.id];
|
|
178
|
+
if (signature) return signature;
|
|
179
|
+
}
|
|
180
|
+
if (model?.includes("gemini-3")) return DUMMY_SIGNATURE;
|
|
181
|
+
return "";
|
|
182
|
+
});
|
|
183
|
+
return {
|
|
184
|
+
functionCall: {
|
|
185
|
+
name: tc.name,
|
|
186
|
+
args: tc.args
|
|
187
|
+
},
|
|
188
|
+
...thoughtSignature ? { thoughtSignature } : {}
|
|
189
|
+
};
|
|
190
|
+
});
|
|
191
|
+
return [...messageParts, ...functionCalls];
|
|
334
192
|
}
|
|
335
|
-
function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false) {
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
actualRole = "user";
|
|
368
|
-
}
|
|
369
|
-
const content = {
|
|
370
|
-
role: actualRole,
|
|
371
|
-
parts,
|
|
372
|
-
};
|
|
373
|
-
return {
|
|
374
|
-
mergeWithPreviousContent: author === "system" && !convertSystemMessageToHumanContent,
|
|
375
|
-
content: [...acc.content, content],
|
|
376
|
-
};
|
|
377
|
-
}, { content: [], mergeWithPreviousContent: false }).content;
|
|
193
|
+
function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false, model) {
|
|
194
|
+
return messages.reduce((acc, message, index) => {
|
|
195
|
+
if (!(0, __langchain_core_messages.isBaseMessage)(message)) throw new Error("Unsupported message input");
|
|
196
|
+
const author = getMessageAuthor(message);
|
|
197
|
+
if (author === "system" && index !== 0) throw new Error("System message should be the first one");
|
|
198
|
+
const role = convertAuthorToRole(author);
|
|
199
|
+
const prevContent = acc.content[acc.content.length];
|
|
200
|
+
if (!acc.mergeWithPreviousContent && prevContent && prevContent.role === role) throw new Error("Google Generative AI requires alternate messages between authors");
|
|
201
|
+
const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index), model);
|
|
202
|
+
if (acc.mergeWithPreviousContent) {
|
|
203
|
+
const prevContent$1 = acc.content[acc.content.length - 1];
|
|
204
|
+
if (!prevContent$1) throw new Error("There was a problem parsing your system message. Please try a prompt without one.");
|
|
205
|
+
prevContent$1.parts.push(...parts);
|
|
206
|
+
return {
|
|
207
|
+
mergeWithPreviousContent: false,
|
|
208
|
+
content: acc.content
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
let actualRole = role;
|
|
212
|
+
if (actualRole === "function" || actualRole === "system" && !convertSystemMessageToHumanContent) actualRole = "user";
|
|
213
|
+
const content = {
|
|
214
|
+
role: actualRole,
|
|
215
|
+
parts
|
|
216
|
+
};
|
|
217
|
+
return {
|
|
218
|
+
mergeWithPreviousContent: author === "system" && !convertSystemMessageToHumanContent,
|
|
219
|
+
content: [...acc.content, content]
|
|
220
|
+
};
|
|
221
|
+
}, {
|
|
222
|
+
content: [],
|
|
223
|
+
mergeWithPreviousContent: false
|
|
224
|
+
}).content;
|
|
378
225
|
}
|
|
379
226
|
function mapGenerateContentResultToChatResult(response, extra) {
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
};
|
|
227
|
+
if (!response.candidates || response.candidates.length === 0 || !response.candidates[0]) return {
|
|
228
|
+
generations: [],
|
|
229
|
+
llmOutput: { filters: response.promptFeedback }
|
|
230
|
+
};
|
|
231
|
+
const [candidate] = response.candidates;
|
|
232
|
+
const { content: candidateContent,...generationInfo } = candidate;
|
|
233
|
+
const functionCalls = candidateContent.parts.reduce((acc, p) => {
|
|
234
|
+
if ("functionCall" in p && p.functionCall) acc.push({
|
|
235
|
+
...p,
|
|
236
|
+
id: "id" in p.functionCall && typeof p.functionCall.id === "string" ? p.functionCall.id : (0, uuid.v4)()
|
|
237
|
+
});
|
|
238
|
+
return acc;
|
|
239
|
+
}, []);
|
|
240
|
+
let content;
|
|
241
|
+
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length === 1 && candidateContent.parts[0].text) content = candidateContent.parts[0].text;
|
|
242
|
+
else if (Array.isArray(candidateContent?.parts) && candidateContent.parts.length > 0) content = candidateContent.parts.map((p) => {
|
|
243
|
+
if ("text" in p) return {
|
|
244
|
+
type: "text",
|
|
245
|
+
text: p.text
|
|
246
|
+
};
|
|
247
|
+
else if ("inlineData" in p) return {
|
|
248
|
+
type: "inlineData",
|
|
249
|
+
inlineData: p.inlineData
|
|
250
|
+
};
|
|
251
|
+
else if ("functionCall" in p) return {
|
|
252
|
+
type: "functionCall",
|
|
253
|
+
functionCall: p.functionCall
|
|
254
|
+
};
|
|
255
|
+
else if ("functionResponse" in p) return {
|
|
256
|
+
type: "functionResponse",
|
|
257
|
+
functionResponse: p.functionResponse
|
|
258
|
+
};
|
|
259
|
+
else if ("fileData" in p) return {
|
|
260
|
+
type: "fileData",
|
|
261
|
+
fileData: p.fileData
|
|
262
|
+
};
|
|
263
|
+
else if ("executableCode" in p) return {
|
|
264
|
+
type: "executableCode",
|
|
265
|
+
executableCode: p.executableCode
|
|
266
|
+
};
|
|
267
|
+
else if ("codeExecutionResult" in p) return {
|
|
268
|
+
type: "codeExecutionResult",
|
|
269
|
+
codeExecutionResult: p.codeExecutionResult
|
|
270
|
+
};
|
|
271
|
+
return p;
|
|
272
|
+
});
|
|
273
|
+
else content = [];
|
|
274
|
+
const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {
|
|
275
|
+
if ("thoughtSignature" in fc && typeof fc.thoughtSignature === "string") acc[fc.id] = fc.thoughtSignature;
|
|
276
|
+
return acc;
|
|
277
|
+
}, {});
|
|
278
|
+
let text = "";
|
|
279
|
+
if (typeof content === "string") text = content;
|
|
280
|
+
else if (Array.isArray(content) && content.length > 0) {
|
|
281
|
+
const block = content.find((b) => "text" in b);
|
|
282
|
+
text = block?.text ?? text;
|
|
283
|
+
}
|
|
284
|
+
const generation = {
|
|
285
|
+
text,
|
|
286
|
+
message: new __langchain_core_messages.AIMessage({
|
|
287
|
+
content: content ?? "",
|
|
288
|
+
tool_calls: functionCalls?.map((fc) => ({
|
|
289
|
+
type: "tool_call",
|
|
290
|
+
id: fc.id,
|
|
291
|
+
name: fc.functionCall.name,
|
|
292
|
+
args: fc.functionCall.args
|
|
293
|
+
})),
|
|
294
|
+
additional_kwargs: {
|
|
295
|
+
...generationInfo,
|
|
296
|
+
[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures
|
|
297
|
+
},
|
|
298
|
+
usage_metadata: extra?.usageMetadata
|
|
299
|
+
}),
|
|
300
|
+
generationInfo
|
|
301
|
+
};
|
|
302
|
+
return {
|
|
303
|
+
generations: [generation],
|
|
304
|
+
llmOutput: { tokenUsage: {
|
|
305
|
+
promptTokens: extra?.usageMetadata?.input_tokens,
|
|
306
|
+
completionTokens: extra?.usageMetadata?.output_tokens,
|
|
307
|
+
totalTokens: extra?.usageMetadata?.total_tokens
|
|
308
|
+
} }
|
|
309
|
+
};
|
|
464
310
|
}
|
|
465
311
|
function convertResponseContentToChatGenerationChunk(response, extra) {
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
312
|
+
if (!response.candidates || response.candidates.length === 0) return null;
|
|
313
|
+
const [candidate] = response.candidates;
|
|
314
|
+
const { content: candidateContent,...generationInfo } = candidate;
|
|
315
|
+
const functionCalls = candidateContent.parts.reduce((acc, p) => {
|
|
316
|
+
if ("functionCall" in p && p.functionCall) acc.push({
|
|
317
|
+
...p,
|
|
318
|
+
id: "id" in p.functionCall && typeof p.functionCall.id === "string" ? p.functionCall.id : (0, uuid.v4)()
|
|
319
|
+
});
|
|
320
|
+
return acc;
|
|
321
|
+
}, []);
|
|
322
|
+
let content;
|
|
323
|
+
if (Array.isArray(candidateContent?.parts) && candidateContent.parts.every((p) => "text" in p)) content = candidateContent.parts.map((p) => p.text).join("");
|
|
324
|
+
else if (Array.isArray(candidateContent?.parts)) content = candidateContent.parts.map((p) => {
|
|
325
|
+
if ("text" in p) return {
|
|
326
|
+
type: "text",
|
|
327
|
+
text: p.text
|
|
328
|
+
};
|
|
329
|
+
else if ("inlineData" in p) return {
|
|
330
|
+
type: "inlineData",
|
|
331
|
+
inlineData: p.inlineData
|
|
332
|
+
};
|
|
333
|
+
else if ("functionCall" in p) return {
|
|
334
|
+
type: "functionCall",
|
|
335
|
+
functionCall: p.functionCall
|
|
336
|
+
};
|
|
337
|
+
else if ("functionResponse" in p) return {
|
|
338
|
+
type: "functionResponse",
|
|
339
|
+
functionResponse: p.functionResponse
|
|
340
|
+
};
|
|
341
|
+
else if ("fileData" in p) return {
|
|
342
|
+
type: "fileData",
|
|
343
|
+
fileData: p.fileData
|
|
344
|
+
};
|
|
345
|
+
else if ("executableCode" in p) return {
|
|
346
|
+
type: "executableCode",
|
|
347
|
+
executableCode: p.executableCode
|
|
348
|
+
};
|
|
349
|
+
else if ("codeExecutionResult" in p) return {
|
|
350
|
+
type: "codeExecutionResult",
|
|
351
|
+
codeExecutionResult: p.codeExecutionResult
|
|
352
|
+
};
|
|
353
|
+
return p;
|
|
354
|
+
});
|
|
355
|
+
else content = [];
|
|
356
|
+
let text = "";
|
|
357
|
+
if (content && typeof content === "string") text = content;
|
|
358
|
+
else if (Array.isArray(content)) {
|
|
359
|
+
const block = content.find((b) => "text" in b);
|
|
360
|
+
text = block?.text ?? "";
|
|
361
|
+
}
|
|
362
|
+
const toolCallChunks = [];
|
|
363
|
+
if (functionCalls) toolCallChunks.push(...functionCalls.map((fc) => ({
|
|
364
|
+
type: "tool_call_chunk",
|
|
365
|
+
id: fc.id,
|
|
366
|
+
name: fc.functionCall.name,
|
|
367
|
+
args: JSON.stringify(fc.functionCall.args)
|
|
368
|
+
})));
|
|
369
|
+
const functionThoughtSignatures = functionCalls?.reduce((acc, fc) => {
|
|
370
|
+
if ("thoughtSignature" in fc && typeof fc.thoughtSignature === "string") acc[fc.id] = fc.thoughtSignature;
|
|
371
|
+
return acc;
|
|
372
|
+
}, {});
|
|
373
|
+
return new __langchain_core_outputs.ChatGenerationChunk({
|
|
374
|
+
text,
|
|
375
|
+
message: new __langchain_core_messages.AIMessageChunk({
|
|
376
|
+
content: content || "",
|
|
377
|
+
name: !candidateContent ? void 0 : candidateContent.role,
|
|
378
|
+
tool_call_chunks: toolCallChunks,
|
|
379
|
+
additional_kwargs: { [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures },
|
|
380
|
+
response_metadata: { model_provider: "google-genai" },
|
|
381
|
+
usage_metadata: extra.usageMetadata
|
|
382
|
+
}),
|
|
383
|
+
generationInfo
|
|
384
|
+
});
|
|
536
385
|
}
|
|
537
386
|
function convertToGenerativeAITools(tools) {
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
387
|
+
if (tools.every((tool) => "functionDeclarations" in tool && Array.isArray(tool.functionDeclarations))) return tools;
|
|
388
|
+
return [{ functionDeclarations: tools.map((tool) => {
|
|
389
|
+
if ((0, __langchain_core_utils_function_calling.isLangChainTool)(tool)) {
|
|
390
|
+
const jsonSchema = require_zod_to_genai_parameters.schemaToGenerativeAIParameters(tool.schema);
|
|
391
|
+
if (jsonSchema.type === "object" && "properties" in jsonSchema && Object.keys(jsonSchema.properties).length === 0) return {
|
|
392
|
+
name: tool.name,
|
|
393
|
+
description: tool.description
|
|
394
|
+
};
|
|
395
|
+
return {
|
|
396
|
+
name: tool.name,
|
|
397
|
+
description: tool.description,
|
|
398
|
+
parameters: jsonSchema
|
|
399
|
+
};
|
|
400
|
+
}
|
|
401
|
+
if ((0, __langchain_core_language_models_base.isOpenAITool)(tool)) return {
|
|
402
|
+
name: tool.function.name,
|
|
403
|
+
description: tool.function.description ?? `A function available to call.`,
|
|
404
|
+
parameters: require_zod_to_genai_parameters.jsonSchemaToGeminiParameters(tool.function.parameters)
|
|
405
|
+
};
|
|
406
|
+
return tool;
|
|
407
|
+
}) }];
|
|
408
|
+
}
|
|
409
|
+
function convertUsageMetadata(usageMetadata, model) {
|
|
410
|
+
const output = {
|
|
411
|
+
input_tokens: usageMetadata?.promptTokenCount ?? 0,
|
|
412
|
+
output_tokens: usageMetadata?.candidatesTokenCount ?? 0,
|
|
413
|
+
total_tokens: usageMetadata?.totalTokenCount ?? 0
|
|
414
|
+
};
|
|
415
|
+
if (usageMetadata?.cachedContentTokenCount) {
|
|
416
|
+
output.input_token_details ??= {};
|
|
417
|
+
output.input_token_details.cache_read = usageMetadata.cachedContentTokenCount;
|
|
418
|
+
}
|
|
419
|
+
if (model === "gemini-3-pro-preview") {
|
|
420
|
+
const over200k = Math.max(0, usageMetadata?.promptTokenCount ?? -2e5);
|
|
421
|
+
const cachedOver200k = Math.max(0, usageMetadata?.cachedContentTokenCount ?? -2e5);
|
|
422
|
+
if (over200k) output.input_token_details = {
|
|
423
|
+
...output.input_token_details,
|
|
424
|
+
over_200k: over200k
|
|
425
|
+
};
|
|
426
|
+
if (cachedOver200k) output.input_token_details = {
|
|
427
|
+
...output.input_token_details,
|
|
428
|
+
cache_read_over_200k: cachedOver200k
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
return output;
|
|
572
432
|
}
|
|
433
|
+
|
|
434
|
+
//#endregion
|
|
435
|
+
exports.convertBaseMessagesToContent = convertBaseMessagesToContent;
|
|
436
|
+
exports.convertResponseContentToChatGenerationChunk = convertResponseContentToChatGenerationChunk;
|
|
437
|
+
exports.convertToGenerativeAITools = convertToGenerativeAITools;
|
|
438
|
+
exports.convertUsageMetadata = convertUsageMetadata;
|
|
439
|
+
exports.mapGenerateContentResultToChatResult = mapGenerateContentResultToChatResult;
|
|
440
|
+
//# sourceMappingURL=common.cjs.map
|