@langchain/google-common 0.2.18 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/LICENSE +6 -6
- package/dist/_virtual/rolldown_runtime.cjs +25 -0
- package/dist/auth.cjs +82 -116
- package/dist/auth.cjs.map +1 -0
- package/dist/auth.d.cts +46 -0
- package/dist/auth.d.cts.map +1 -0
- package/dist/auth.d.ts +41 -36
- package/dist/auth.d.ts.map +1 -0
- package/dist/auth.js +80 -110
- package/dist/auth.js.map +1 -0
- package/dist/chat_models.cjs +251 -466
- package/dist/chat_models.cjs.map +1 -0
- package/dist/chat_models.d.cts +98 -0
- package/dist/chat_models.d.cts.map +1 -0
- package/dist/chat_models.d.ts +87 -73
- package/dist/chat_models.d.ts.map +1 -0
- package/dist/chat_models.js +245 -457
- package/dist/chat_models.js.map +1 -0
- package/dist/connection.cjs +321 -466
- package/dist/connection.cjs.map +1 -0
- package/dist/connection.d.cts +109 -0
- package/dist/connection.d.cts.map +1 -0
- package/dist/connection.d.ts +98 -91
- package/dist/connection.d.ts.map +1 -0
- package/dist/connection.js +317 -459
- package/dist/connection.js.map +1 -0
- package/dist/embeddings.cjs +135 -186
- package/dist/embeddings.cjs.map +1 -0
- package/dist/embeddings.d.cts +44 -0
- package/dist/embeddings.d.cts.map +1 -0
- package/dist/embeddings.d.ts +38 -32
- package/dist/embeddings.d.ts.map +1 -0
- package/dist/embeddings.js +133 -181
- package/dist/embeddings.js.map +1 -0
- package/dist/experimental/media.cjs +380 -482
- package/dist/experimental/media.cjs.map +1 -0
- package/dist/experimental/media.d.cts +198 -0
- package/dist/experimental/media.d.cts.map +1 -0
- package/dist/experimental/media.d.ts +190 -202
- package/dist/experimental/media.d.ts.map +1 -0
- package/dist/experimental/media.js +369 -468
- package/dist/experimental/media.js.map +1 -0
- package/dist/experimental/utils/media_core.cjs +403 -517
- package/dist/experimental/utils/media_core.cjs.map +1 -0
- package/dist/experimental/utils/media_core.d.cts +215 -0
- package/dist/experimental/utils/media_core.d.cts.map +1 -0
- package/dist/experimental/utils/media_core.d.ts +171 -165
- package/dist/experimental/utils/media_core.d.ts.map +1 -0
- package/dist/experimental/utils/media_core.js +395 -506
- package/dist/experimental/utils/media_core.js.map +1 -0
- package/dist/index.cjs +58 -27
- package/dist/index.d.cts +13 -0
- package/dist/index.d.ts +13 -11
- package/dist/index.js +13 -11
- package/dist/llms.cjs +157 -244
- package/dist/llms.cjs.map +1 -0
- package/dist/llms.d.cts +72 -0
- package/dist/llms.d.cts.map +1 -0
- package/dist/llms.d.ts +64 -54
- package/dist/llms.d.ts.map +1 -0
- package/dist/llms.js +154 -238
- package/dist/llms.js.map +1 -0
- package/dist/output_parsers.cjs +148 -173
- package/dist/output_parsers.cjs.map +1 -0
- package/dist/output_parsers.d.cts +53 -0
- package/dist/output_parsers.d.cts.map +1 -0
- package/dist/output_parsers.d.ts +46 -42
- package/dist/output_parsers.d.ts.map +1 -0
- package/dist/output_parsers.js +146 -168
- package/dist/output_parsers.js.map +1 -0
- package/dist/types-anthropic.d.cts +229 -0
- package/dist/types-anthropic.d.cts.map +1 -0
- package/dist/types-anthropic.d.ts +221 -215
- package/dist/types-anthropic.d.ts.map +1 -0
- package/dist/types.cjs +51 -62
- package/dist/types.cjs.map +1 -0
- package/dist/types.d.cts +748 -0
- package/dist/types.d.cts.map +1 -0
- package/dist/types.d.ts +669 -656
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +46 -45
- package/dist/types.js.map +1 -0
- package/dist/utils/anthropic.cjs +598 -821
- package/dist/utils/anthropic.cjs.map +1 -0
- package/dist/utils/anthropic.js +597 -818
- package/dist/utils/anthropic.js.map +1 -0
- package/dist/utils/common.cjs +130 -211
- package/dist/utils/common.cjs.map +1 -0
- package/dist/utils/common.d.cts +13 -0
- package/dist/utils/common.d.cts.map +1 -0
- package/dist/utils/common.d.ts +12 -7
- package/dist/utils/common.d.ts.map +1 -0
- package/dist/utils/common.js +128 -207
- package/dist/utils/common.js.map +1 -0
- package/dist/utils/failed_handler.cjs +28 -30
- package/dist/utils/failed_handler.cjs.map +1 -0
- package/dist/utils/failed_handler.d.cts +9 -0
- package/dist/utils/failed_handler.d.cts.map +1 -0
- package/dist/utils/failed_handler.d.ts +8 -2
- package/dist/utils/failed_handler.d.ts.map +1 -0
- package/dist/utils/failed_handler.js +28 -28
- package/dist/utils/failed_handler.js.map +1 -0
- package/dist/utils/gemini.cjs +1020 -1488
- package/dist/utils/gemini.cjs.map +1 -0
- package/dist/utils/gemini.d.cts +51 -0
- package/dist/utils/gemini.d.cts.map +1 -0
- package/dist/utils/gemini.d.ts +51 -48
- package/dist/utils/gemini.d.ts.map +1 -0
- package/dist/utils/gemini.js +1015 -1479
- package/dist/utils/gemini.js.map +1 -0
- package/dist/utils/index.cjs +38 -23
- package/dist/utils/index.d.cts +8 -0
- package/dist/utils/index.d.ts +8 -7
- package/dist/utils/index.js +8 -7
- package/dist/utils/palm.d.cts +11 -0
- package/dist/utils/palm.d.cts.map +1 -0
- package/dist/utils/palm.d.ts +9 -4
- package/dist/utils/palm.d.ts.map +1 -0
- package/dist/utils/safety.cjs +13 -22
- package/dist/utils/safety.cjs.map +1 -0
- package/dist/utils/safety.d.cts +12 -0
- package/dist/utils/safety.d.cts.map +1 -0
- package/dist/utils/safety.d.ts +10 -4
- package/dist/utils/safety.d.ts.map +1 -0
- package/dist/utils/safety.js +13 -19
- package/dist/utils/safety.js.map +1 -0
- package/dist/utils/stream.cjs +296 -475
- package/dist/utils/stream.cjs.map +1 -0
- package/dist/utils/stream.d.cts +165 -0
- package/dist/utils/stream.d.cts.map +1 -0
- package/dist/utils/stream.d.ts +156 -131
- package/dist/utils/stream.d.ts.map +1 -0
- package/dist/utils/stream.js +293 -469
- package/dist/utils/stream.js.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.cjs +43 -81
- package/dist/utils/zod_to_gemini_parameters.cjs.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.d.cts +22 -0
- package/dist/utils/zod_to_gemini_parameters.d.cts.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.d.ts +21 -6
- package/dist/utils/zod_to_gemini_parameters.d.ts.map +1 -0
- package/dist/utils/zod_to_gemini_parameters.js +40 -76
- package/dist/utils/zod_to_gemini_parameters.js.map +1 -0
- package/package.json +69 -85
- package/dist/types-anthropic.cjs +0 -2
- package/dist/types-anthropic.js +0 -1
- package/dist/utils/anthropic.d.ts +0 -4
- package/dist/utils/palm.cjs +0 -2
- package/dist/utils/palm.js +0 -1
- package/experimental/media.cjs +0 -1
- package/experimental/media.d.cts +0 -1
- package/experimental/media.d.ts +0 -1
- package/experimental/media.js +0 -1
- package/experimental/utils/media_core.cjs +0 -1
- package/experimental/utils/media_core.d.cts +0 -1
- package/experimental/utils/media_core.d.ts +0 -1
- package/experimental/utils/media_core.js +0 -1
- package/index.cjs +0 -1
- package/index.d.cts +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
- package/types.cjs +0 -1
- package/types.d.cts +0 -1
- package/types.d.ts +0 -1
- package/types.js +0 -1
- package/utils.cjs +0 -1
- package/utils.d.cts +0 -1
- package/utils.d.ts +0 -1
- package/utils.js +0 -1
package/dist/chat_models.js
CHANGED
|
@@ -1,463 +1,251 @@
|
|
|
1
|
+
import { removeAdditionalProperties, schemaToGeminiParameters } from "./utils/zod_to_gemini_parameters.js";
|
|
2
|
+
import { DefaultGeminiSafetyHandler, getGeminiAPI } from "./utils/gemini.js";
|
|
3
|
+
import { convertToGeminiTools, copyAIModelParams, copyAndValidateModelParamsInto } from "./utils/common.js";
|
|
4
|
+
import { ensureParams } from "./utils/failed_handler.js";
|
|
5
|
+
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
6
|
+
import { ApiKeyGoogleAuth } from "./auth.js";
|
|
1
7
|
import { getEnvironmentVariable } from "@langchain/core/utils/env";
|
|
2
|
-
import { BaseChatModel
|
|
8
|
+
import { BaseChatModel } from "@langchain/core/language_models/chat_models";
|
|
3
9
|
import { ChatGenerationChunk } from "@langchain/core/outputs";
|
|
4
10
|
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
|
-
import { RunnablePassthrough, RunnableSequence
|
|
11
|
+
import { RunnablePassthrough, RunnableSequence } from "@langchain/core/runnables";
|
|
6
12
|
import { JsonOutputKeyToolsParser } from "@langchain/core/output_parsers/openai_tools";
|
|
7
13
|
import { concat } from "@langchain/core/utils/stream";
|
|
8
|
-
import { isInteropZodSchema
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
}
|
|
57
|
-
return true;
|
|
58
|
-
}
|
|
59
|
-
computeGoogleSearchToolAdjustmentFromModel() {
|
|
60
|
-
if (this.modelName.startsWith("gemini-1.0")) {
|
|
61
|
-
return "googleSearchRetrieval";
|
|
62
|
-
}
|
|
63
|
-
else if (this.modelName.startsWith("gemini-1.5")) {
|
|
64
|
-
return "googleSearchRetrieval";
|
|
65
|
-
}
|
|
66
|
-
else {
|
|
67
|
-
return "googleSearch";
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
computeGoogleSearchToolAdjustment(apiConfig) {
|
|
71
|
-
const adj = apiConfig.googleSearchToolAdjustment;
|
|
72
|
-
if (adj === undefined || adj === true) {
|
|
73
|
-
return this.computeGoogleSearchToolAdjustmentFromModel();
|
|
74
|
-
}
|
|
75
|
-
else {
|
|
76
|
-
return adj;
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
buildGeminiAPI() {
|
|
80
|
-
const apiConfig = this.apiConfig ?? {};
|
|
81
|
-
const googleSearchToolAdjustment = this.computeGoogleSearchToolAdjustment(apiConfig);
|
|
82
|
-
const geminiConfig = {
|
|
83
|
-
useSystemInstruction: this.useSystemInstruction,
|
|
84
|
-
googleSearchToolAdjustment,
|
|
85
|
-
...apiConfig,
|
|
86
|
-
};
|
|
87
|
-
return getGeminiAPI(geminiConfig);
|
|
88
|
-
}
|
|
89
|
-
get api() {
|
|
90
|
-
switch (this.apiName) {
|
|
91
|
-
case "google":
|
|
92
|
-
return this.buildGeminiAPI();
|
|
93
|
-
default:
|
|
94
|
-
return super.api;
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
}
|
|
14
|
+
import { isInteropZodSchema } from "@langchain/core/utils/types";
|
|
15
|
+
|
|
16
|
+
//#region src/chat_models.ts
|
|
17
|
+
var ChatConnection = class extends AbstractGoogleLLMConnection {
|
|
18
|
+
convertSystemMessageToHumanContent;
|
|
19
|
+
constructor(fields, caller, client, streaming) {
|
|
20
|
+
super(fields, caller, client, streaming);
|
|
21
|
+
this.convertSystemMessageToHumanContent = fields?.convertSystemMessageToHumanContent;
|
|
22
|
+
}
|
|
23
|
+
get useSystemInstruction() {
|
|
24
|
+
return typeof this.convertSystemMessageToHumanContent === "boolean" ? !this.convertSystemMessageToHumanContent : this.computeUseSystemInstruction;
|
|
25
|
+
}
|
|
26
|
+
get computeUseSystemInstruction() {
|
|
27
|
+
if (this.modelFamily === "palm") return false;
|
|
28
|
+
else if (this.modelName === "gemini-1.0-pro-001") return false;
|
|
29
|
+
else if (this.modelName.startsWith("gemini-pro-vision")) return false;
|
|
30
|
+
else if (this.modelName.startsWith("gemini-1.0-pro-vision")) return false;
|
|
31
|
+
else if (this.modelName === "gemini-pro" && this.platform === "gai") return false;
|
|
32
|
+
else if (this.modelFamily === "gemma") return false;
|
|
33
|
+
return true;
|
|
34
|
+
}
|
|
35
|
+
computeGoogleSearchToolAdjustmentFromModel() {
|
|
36
|
+
if (this.modelName.startsWith("gemini-1.0")) return "googleSearchRetrieval";
|
|
37
|
+
else if (this.modelName.startsWith("gemini-1.5")) return "googleSearchRetrieval";
|
|
38
|
+
else return "googleSearch";
|
|
39
|
+
}
|
|
40
|
+
computeGoogleSearchToolAdjustment(apiConfig) {
|
|
41
|
+
const adj = apiConfig.googleSearchToolAdjustment;
|
|
42
|
+
if (adj === void 0 || adj === true) return this.computeGoogleSearchToolAdjustmentFromModel();
|
|
43
|
+
else return adj;
|
|
44
|
+
}
|
|
45
|
+
buildGeminiAPI() {
|
|
46
|
+
const apiConfig = this.apiConfig ?? {};
|
|
47
|
+
const googleSearchToolAdjustment = this.computeGoogleSearchToolAdjustment(apiConfig);
|
|
48
|
+
const geminiConfig = {
|
|
49
|
+
useSystemInstruction: this.useSystemInstruction,
|
|
50
|
+
googleSearchToolAdjustment,
|
|
51
|
+
...apiConfig
|
|
52
|
+
};
|
|
53
|
+
return getGeminiAPI(geminiConfig);
|
|
54
|
+
}
|
|
55
|
+
get api() {
|
|
56
|
+
switch (this.apiName) {
|
|
57
|
+
case "google": return this.buildGeminiAPI();
|
|
58
|
+
default: return super.api;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
};
|
|
98
62
|
/**
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
else {
|
|
289
|
-
return this.buildAbstractedClient(fields);
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
buildConnection(fields, client) {
|
|
293
|
-
this.connection = new ChatConnection({ ...fields, ...this }, this.caller, client, false);
|
|
294
|
-
this.streamedConnection = new ChatConnection({ ...fields, ...this }, this.caller, client, true);
|
|
295
|
-
}
|
|
296
|
-
get platform() {
|
|
297
|
-
return this.connection.platform;
|
|
298
|
-
}
|
|
299
|
-
bindTools(tools, kwargs) {
|
|
300
|
-
return this.withConfig({ tools: convertToGeminiTools(tools), ...kwargs });
|
|
301
|
-
}
|
|
302
|
-
// Replace
|
|
303
|
-
_llmType() {
|
|
304
|
-
return "chat_integration";
|
|
305
|
-
}
|
|
306
|
-
/**
|
|
307
|
-
* Get the parameters used to invoke the model
|
|
308
|
-
*/
|
|
309
|
-
invocationParams(options) {
|
|
310
|
-
return copyAIModelParams(this, options);
|
|
311
|
-
}
|
|
312
|
-
async _generate(messages, options, runManager) {
|
|
313
|
-
const parameters = this.invocationParams(options);
|
|
314
|
-
if (this.streaming) {
|
|
315
|
-
const stream = this._streamResponseChunks(messages, options, runManager);
|
|
316
|
-
let finalChunk = null;
|
|
317
|
-
for await (const chunk of stream) {
|
|
318
|
-
finalChunk = !finalChunk ? chunk : concat(finalChunk, chunk);
|
|
319
|
-
}
|
|
320
|
-
if (!finalChunk) {
|
|
321
|
-
throw new Error("No chunks were returned from the stream.");
|
|
322
|
-
}
|
|
323
|
-
return {
|
|
324
|
-
generations: [finalChunk],
|
|
325
|
-
};
|
|
326
|
-
}
|
|
327
|
-
const response = await this.connection.request(messages, parameters, options, runManager);
|
|
328
|
-
const ret = this.connection.api.responseToChatResult(response);
|
|
329
|
-
const chunk = ret?.generations?.[0];
|
|
330
|
-
if (chunk) {
|
|
331
|
-
await runManager?.handleLLMNewToken(chunk.text || "");
|
|
332
|
-
}
|
|
333
|
-
return ret;
|
|
334
|
-
}
|
|
335
|
-
async *_streamResponseChunks(_messages, options, runManager) {
|
|
336
|
-
// Make the call as a streaming request
|
|
337
|
-
const parameters = this.invocationParams(options);
|
|
338
|
-
const response = await this.streamedConnection.request(_messages, parameters, options, runManager);
|
|
339
|
-
// Get the streaming parser of the response
|
|
340
|
-
const stream = response.data;
|
|
341
|
-
let usageMetadata;
|
|
342
|
-
// Loop until the end of the stream
|
|
343
|
-
// During the loop, yield each time we get a chunk from the streaming parser
|
|
344
|
-
// that is either available or added to the queue
|
|
345
|
-
while (!stream.streamDone) {
|
|
346
|
-
const output = await stream.nextChunk();
|
|
347
|
-
await runManager?.handleCustomEvent(`google-chunk-${this.constructor.name}`, {
|
|
348
|
-
output,
|
|
349
|
-
});
|
|
350
|
-
if (output &&
|
|
351
|
-
output.usageMetadata &&
|
|
352
|
-
this.streamUsage !== false &&
|
|
353
|
-
options.streamUsage !== false) {
|
|
354
|
-
usageMetadata = {
|
|
355
|
-
input_tokens: output.usageMetadata.promptTokenCount,
|
|
356
|
-
output_tokens: output.usageMetadata.candidatesTokenCount,
|
|
357
|
-
total_tokens: output.usageMetadata.totalTokenCount,
|
|
358
|
-
};
|
|
359
|
-
}
|
|
360
|
-
const chunk = output !== null
|
|
361
|
-
? this.connection.api.responseToChatGeneration({ data: output })
|
|
362
|
-
: new ChatGenerationChunk({
|
|
363
|
-
text: "",
|
|
364
|
-
generationInfo: { finishReason: "stop" },
|
|
365
|
-
message: new AIMessageChunk({
|
|
366
|
-
content: "",
|
|
367
|
-
usage_metadata: usageMetadata,
|
|
368
|
-
}),
|
|
369
|
-
});
|
|
370
|
-
if (chunk) {
|
|
371
|
-
yield chunk;
|
|
372
|
-
await runManager?.handleLLMNewToken(chunk.text ?? "", undefined, undefined, undefined, undefined, { chunk });
|
|
373
|
-
}
|
|
374
|
-
}
|
|
375
|
-
}
|
|
376
|
-
/** @ignore */
|
|
377
|
-
_combineLLMOutput() {
|
|
378
|
-
return [];
|
|
379
|
-
}
|
|
380
|
-
withStructuredOutput(outputSchema, config) {
|
|
381
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
382
|
-
const schema = outputSchema;
|
|
383
|
-
const name = config?.name;
|
|
384
|
-
const method = config?.method;
|
|
385
|
-
const includeRaw = config?.includeRaw;
|
|
386
|
-
if (method === "jsonMode") {
|
|
387
|
-
throw new Error(`Google only supports "functionCalling" as a method.`);
|
|
388
|
-
}
|
|
389
|
-
let functionName = name ?? "extract";
|
|
390
|
-
let outputParser;
|
|
391
|
-
let tools;
|
|
392
|
-
if (isInteropZodSchema(schema)) {
|
|
393
|
-
const jsonSchema = schemaToGeminiParameters(schema);
|
|
394
|
-
tools = [
|
|
395
|
-
{
|
|
396
|
-
functionDeclarations: [
|
|
397
|
-
{
|
|
398
|
-
name: functionName,
|
|
399
|
-
description: jsonSchema.description ?? "A function available to call.",
|
|
400
|
-
parameters: jsonSchema,
|
|
401
|
-
},
|
|
402
|
-
],
|
|
403
|
-
},
|
|
404
|
-
];
|
|
405
|
-
outputParser = new JsonOutputKeyToolsParser({
|
|
406
|
-
returnSingle: true,
|
|
407
|
-
keyName: functionName,
|
|
408
|
-
zodSchema: schema,
|
|
409
|
-
});
|
|
410
|
-
}
|
|
411
|
-
else {
|
|
412
|
-
let geminiFunctionDefinition;
|
|
413
|
-
if (typeof schema.name === "string" &&
|
|
414
|
-
typeof schema.parameters === "object" &&
|
|
415
|
-
schema.parameters != null) {
|
|
416
|
-
geminiFunctionDefinition = schema;
|
|
417
|
-
functionName = schema.name;
|
|
418
|
-
}
|
|
419
|
-
else {
|
|
420
|
-
// We are providing the schema for *just* the parameters, probably
|
|
421
|
-
const parameters = removeAdditionalProperties(schema);
|
|
422
|
-
geminiFunctionDefinition = {
|
|
423
|
-
name: functionName,
|
|
424
|
-
description: schema.description ?? "",
|
|
425
|
-
parameters,
|
|
426
|
-
};
|
|
427
|
-
}
|
|
428
|
-
tools = [
|
|
429
|
-
{
|
|
430
|
-
functionDeclarations: [geminiFunctionDefinition],
|
|
431
|
-
},
|
|
432
|
-
];
|
|
433
|
-
outputParser = new JsonOutputKeyToolsParser({
|
|
434
|
-
returnSingle: true,
|
|
435
|
-
keyName: functionName,
|
|
436
|
-
});
|
|
437
|
-
}
|
|
438
|
-
const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });
|
|
439
|
-
if (!includeRaw) {
|
|
440
|
-
return llm.pipe(outputParser).withConfig({
|
|
441
|
-
runName: "ChatGoogleStructuredOutput",
|
|
442
|
-
});
|
|
443
|
-
}
|
|
444
|
-
const parserAssign = RunnablePassthrough.assign({
|
|
445
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
446
|
-
parsed: (input, config) => outputParser.invoke(input.raw, config),
|
|
447
|
-
});
|
|
448
|
-
const parserNone = RunnablePassthrough.assign({
|
|
449
|
-
parsed: () => null,
|
|
450
|
-
});
|
|
451
|
-
const parsedWithFallback = parserAssign.withFallbacks({
|
|
452
|
-
fallbacks: [parserNone],
|
|
453
|
-
});
|
|
454
|
-
return RunnableSequence.from([
|
|
455
|
-
{
|
|
456
|
-
raw: llm,
|
|
457
|
-
},
|
|
458
|
-
parsedWithFallback,
|
|
459
|
-
]).withConfig({
|
|
460
|
-
runName: "StructuredOutputRunnable",
|
|
461
|
-
});
|
|
462
|
-
}
|
|
463
|
-
}
|
|
63
|
+
* Integration with a Google chat model.
|
|
64
|
+
*/
|
|
65
|
+
var ChatGoogleBase = class extends BaseChatModel {
|
|
66
|
+
static lc_name() {
|
|
67
|
+
return "ChatGoogle";
|
|
68
|
+
}
|
|
69
|
+
get lc_secrets() {
|
|
70
|
+
return { authOptions: "GOOGLE_AUTH_OPTIONS" };
|
|
71
|
+
}
|
|
72
|
+
lc_serializable = true;
|
|
73
|
+
model;
|
|
74
|
+
modelName = "gemini-pro";
|
|
75
|
+
temperature;
|
|
76
|
+
maxOutputTokens;
|
|
77
|
+
maxReasoningTokens;
|
|
78
|
+
topP;
|
|
79
|
+
topK;
|
|
80
|
+
seed;
|
|
81
|
+
presencePenalty;
|
|
82
|
+
frequencyPenalty;
|
|
83
|
+
stopSequences = [];
|
|
84
|
+
logprobs;
|
|
85
|
+
topLogprobs = 0;
|
|
86
|
+
safetySettings = [];
|
|
87
|
+
responseModalities;
|
|
88
|
+
convertSystemMessageToHumanContent;
|
|
89
|
+
safetyHandler;
|
|
90
|
+
speechConfig;
|
|
91
|
+
streamUsage = true;
|
|
92
|
+
streaming = false;
|
|
93
|
+
labels;
|
|
94
|
+
connection;
|
|
95
|
+
streamedConnection;
|
|
96
|
+
constructor(fields) {
|
|
97
|
+
super(ensureParams(fields));
|
|
98
|
+
copyAndValidateModelParamsInto(fields, this);
|
|
99
|
+
this.safetyHandler = fields?.safetyHandler ?? new DefaultGeminiSafetyHandler();
|
|
100
|
+
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
|
|
101
|
+
const client = this.buildClient(fields);
|
|
102
|
+
this.buildConnection(fields ?? {}, client);
|
|
103
|
+
}
|
|
104
|
+
getLsParams(options) {
|
|
105
|
+
const params = this.invocationParams(options);
|
|
106
|
+
return {
|
|
107
|
+
ls_provider: "google_vertexai",
|
|
108
|
+
ls_model_name: this.model,
|
|
109
|
+
ls_model_type: "chat",
|
|
110
|
+
ls_temperature: params.temperature ?? void 0,
|
|
111
|
+
ls_max_tokens: params.maxOutputTokens ?? void 0,
|
|
112
|
+
ls_stop: options.stop
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
buildApiKeyClient(apiKey) {
|
|
116
|
+
return new ApiKeyGoogleAuth(apiKey);
|
|
117
|
+
}
|
|
118
|
+
buildApiKey(fields) {
|
|
119
|
+
return fields?.apiKey ?? getEnvironmentVariable("GOOGLE_API_KEY");
|
|
120
|
+
}
|
|
121
|
+
buildClient(fields) {
|
|
122
|
+
const apiKey = this.buildApiKey(fields);
|
|
123
|
+
if (apiKey) return this.buildApiKeyClient(apiKey);
|
|
124
|
+
else return this.buildAbstractedClient(fields);
|
|
125
|
+
}
|
|
126
|
+
buildConnection(fields, client) {
|
|
127
|
+
this.connection = new ChatConnection({
|
|
128
|
+
...fields,
|
|
129
|
+
...this
|
|
130
|
+
}, this.caller, client, false);
|
|
131
|
+
this.streamedConnection = new ChatConnection({
|
|
132
|
+
...fields,
|
|
133
|
+
...this
|
|
134
|
+
}, this.caller, client, true);
|
|
135
|
+
}
|
|
136
|
+
get platform() {
|
|
137
|
+
return this.connection.platform;
|
|
138
|
+
}
|
|
139
|
+
bindTools(tools, kwargs) {
|
|
140
|
+
return this.withConfig({
|
|
141
|
+
tools: convertToGeminiTools(tools),
|
|
142
|
+
...kwargs
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
_llmType() {
|
|
146
|
+
return "chat_integration";
|
|
147
|
+
}
|
|
148
|
+
/**
|
|
149
|
+
* Get the parameters used to invoke the model
|
|
150
|
+
*/
|
|
151
|
+
invocationParams(options) {
|
|
152
|
+
return copyAIModelParams(this, options);
|
|
153
|
+
}
|
|
154
|
+
async _generate(messages, options, runManager) {
|
|
155
|
+
const parameters = this.invocationParams(options);
|
|
156
|
+
if (this.streaming) {
|
|
157
|
+
const stream = this._streamResponseChunks(messages, options, runManager);
|
|
158
|
+
let finalChunk = null;
|
|
159
|
+
for await (const chunk$1 of stream) finalChunk = !finalChunk ? chunk$1 : concat(finalChunk, chunk$1);
|
|
160
|
+
if (!finalChunk) throw new Error("No chunks were returned from the stream.");
|
|
161
|
+
return { generations: [finalChunk] };
|
|
162
|
+
}
|
|
163
|
+
const response = await this.connection.request(messages, parameters, options, runManager);
|
|
164
|
+
const ret = this.connection.api.responseToChatResult(response);
|
|
165
|
+
const chunk = ret?.generations?.[0];
|
|
166
|
+
if (chunk) await runManager?.handleLLMNewToken(chunk.text || "");
|
|
167
|
+
return ret;
|
|
168
|
+
}
|
|
169
|
+
async *_streamResponseChunks(_messages, options, runManager) {
|
|
170
|
+
const parameters = this.invocationParams(options);
|
|
171
|
+
const response = await this.streamedConnection.request(_messages, parameters, options, runManager);
|
|
172
|
+
const stream = response.data;
|
|
173
|
+
let usageMetadata;
|
|
174
|
+
while (!stream.streamDone) {
|
|
175
|
+
const output = await stream.nextChunk();
|
|
176
|
+
await runManager?.handleCustomEvent(`google-chunk-${this.constructor.name}`, { output });
|
|
177
|
+
if (output && output.usageMetadata && this.streamUsage !== false && options.streamUsage !== false) usageMetadata = {
|
|
178
|
+
input_tokens: output.usageMetadata.promptTokenCount,
|
|
179
|
+
output_tokens: output.usageMetadata.candidatesTokenCount,
|
|
180
|
+
total_tokens: output.usageMetadata.totalTokenCount
|
|
181
|
+
};
|
|
182
|
+
const chunk = output !== null ? this.connection.api.responseToChatGeneration({ data: output }) : new ChatGenerationChunk({
|
|
183
|
+
text: "",
|
|
184
|
+
generationInfo: { finishReason: "stop" },
|
|
185
|
+
message: new AIMessageChunk({
|
|
186
|
+
content: "",
|
|
187
|
+
usage_metadata: usageMetadata
|
|
188
|
+
})
|
|
189
|
+
});
|
|
190
|
+
if (chunk) {
|
|
191
|
+
yield chunk;
|
|
192
|
+
await runManager?.handleLLMNewToken(chunk.text ?? "", void 0, void 0, void 0, void 0, { chunk });
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/** @ignore */
|
|
197
|
+
_combineLLMOutput() {
|
|
198
|
+
return [];
|
|
199
|
+
}
|
|
200
|
+
withStructuredOutput(outputSchema, config) {
|
|
201
|
+
const schema = outputSchema;
|
|
202
|
+
const name = config?.name;
|
|
203
|
+
const method = config?.method;
|
|
204
|
+
const includeRaw = config?.includeRaw;
|
|
205
|
+
if (method === "jsonMode") throw new Error(`Google only supports "functionCalling" as a method.`);
|
|
206
|
+
let functionName = name ?? "extract";
|
|
207
|
+
let outputParser;
|
|
208
|
+
let tools;
|
|
209
|
+
if (isInteropZodSchema(schema)) {
|
|
210
|
+
const jsonSchema = schemaToGeminiParameters(schema);
|
|
211
|
+
tools = [{ functionDeclarations: [{
|
|
212
|
+
name: functionName,
|
|
213
|
+
description: jsonSchema.description ?? "A function available to call.",
|
|
214
|
+
parameters: jsonSchema
|
|
215
|
+
}] }];
|
|
216
|
+
outputParser = new JsonOutputKeyToolsParser({
|
|
217
|
+
returnSingle: true,
|
|
218
|
+
keyName: functionName,
|
|
219
|
+
zodSchema: schema
|
|
220
|
+
});
|
|
221
|
+
} else {
|
|
222
|
+
let geminiFunctionDefinition;
|
|
223
|
+
if (typeof schema.name === "string" && typeof schema.parameters === "object" && schema.parameters != null) {
|
|
224
|
+
geminiFunctionDefinition = schema;
|
|
225
|
+
functionName = schema.name;
|
|
226
|
+
} else {
|
|
227
|
+
const parameters = removeAdditionalProperties(schema);
|
|
228
|
+
geminiFunctionDefinition = {
|
|
229
|
+
name: functionName,
|
|
230
|
+
description: schema.description ?? "",
|
|
231
|
+
parameters
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
tools = [{ functionDeclarations: [geminiFunctionDefinition] }];
|
|
235
|
+
outputParser = new JsonOutputKeyToolsParser({
|
|
236
|
+
returnSingle: true,
|
|
237
|
+
keyName: functionName
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });
|
|
241
|
+
if (!includeRaw) return llm.pipe(outputParser).withConfig({ runName: "ChatGoogleStructuredOutput" });
|
|
242
|
+
const parserAssign = RunnablePassthrough.assign({ parsed: (input, config$1) => outputParser.invoke(input.raw, config$1) });
|
|
243
|
+
const parserNone = RunnablePassthrough.assign({ parsed: () => null });
|
|
244
|
+
const parsedWithFallback = parserAssign.withFallbacks({ fallbacks: [parserNone] });
|
|
245
|
+
return RunnableSequence.from([{ raw: llm }, parsedWithFallback]).withConfig({ runName: "StructuredOutputRunnable" });
|
|
246
|
+
}
|
|
247
|
+
};
|
|
248
|
+
|
|
249
|
+
//#endregion
|
|
250
|
+
export { ChatConnection, ChatGoogleBase };
|
|
251
|
+
//# sourceMappingURL=chat_models.js.map
|