@langchain/google-common 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +27 -48
- package/dist/chat_models.d.ts +5 -5
- package/dist/chat_models.js +28 -49
- package/dist/connection.cjs +178 -98
- package/dist/connection.d.ts +47 -16
- package/dist/connection.js +174 -97
- package/dist/llms.cjs +2 -2
- package/dist/llms.js +2 -2
- package/dist/types-anthropic.cjs +2 -0
- package/dist/types-anthropic.d.ts +159 -0
- package/dist/types-anthropic.js +1 -0
- package/dist/types.cjs +54 -0
- package/dist/types.d.ts +68 -6
- package/dist/types.js +39 -1
- package/dist/utils/anthropic.cjs +541 -0
- package/dist/utils/anthropic.d.ts +4 -0
- package/dist/utils/anthropic.js +535 -0
- package/dist/utils/common.cjs +20 -1
- package/dist/utils/common.d.ts +3 -2
- package/dist/utils/common.js +18 -0
- package/dist/utils/gemini.cjs +303 -127
- package/dist/utils/gemini.d.ts +4 -14
- package/dist/utils/gemini.js +300 -124
- package/dist/utils/stream.cjs +184 -4
- package/dist/utils/stream.d.ts +73 -3
- package/dist/utils/stream.js +178 -3
- package/package.json +1 -1
package/dist/chat_models.cjs
CHANGED
|
@@ -53,49 +53,20 @@ class ChatConnection extends connection_js_1.AbstractGoogleLLMConnection {
|
|
|
53
53
|
}
|
|
54
54
|
return true;
|
|
55
55
|
}
|
|
56
|
-
|
|
57
|
-
const
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
return acc;
|
|
63
|
-
}
|
|
64
|
-
// Combine adjacent function messages
|
|
65
|
-
if (cur[0]?.role === "function" &&
|
|
66
|
-
acc.length > 0 &&
|
|
67
|
-
acc[acc.length - 1].role === "function") {
|
|
68
|
-
acc[acc.length - 1].parts = [
|
|
69
|
-
...acc[acc.length - 1].parts,
|
|
70
|
-
...cur[0].parts,
|
|
71
|
-
];
|
|
72
|
-
}
|
|
73
|
-
else {
|
|
74
|
-
acc.push(...cur);
|
|
75
|
-
}
|
|
76
|
-
return acc;
|
|
77
|
-
}, []);
|
|
56
|
+
buildGeminiAPI() {
|
|
57
|
+
const geminiConfig = {
|
|
58
|
+
useSystemInstruction: this.useSystemInstruction,
|
|
59
|
+
...this.apiConfig,
|
|
60
|
+
};
|
|
61
|
+
return (0, gemini_js_1.getGeminiAPI)(geminiConfig);
|
|
78
62
|
}
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
63
|
+
get api() {
|
|
64
|
+
switch (this.apiName) {
|
|
65
|
+
case "google":
|
|
66
|
+
return this.buildGeminiAPI();
|
|
67
|
+
default:
|
|
68
|
+
return super.api;
|
|
82
69
|
}
|
|
83
|
-
let ret = {};
|
|
84
|
-
for (let index = 0; index < input.length; index += 1) {
|
|
85
|
-
const message = input[index];
|
|
86
|
-
if (message._getType() === "system") {
|
|
87
|
-
// For system types, we only want it if it is the first message,
|
|
88
|
-
// if it appears anywhere else, it should be an error.
|
|
89
|
-
if (index === 0) {
|
|
90
|
-
// eslint-disable-next-line prefer-destructuring
|
|
91
|
-
ret = (await this.api.baseMessageToContent(message, undefined, true))[0];
|
|
92
|
-
}
|
|
93
|
-
else {
|
|
94
|
-
throw new Error("System messages are only permitted as the first passed message.");
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
return ret;
|
|
99
70
|
}
|
|
100
71
|
}
|
|
101
72
|
exports.ChatConnection = ChatConnection;
|
|
@@ -274,15 +245,18 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
274
245
|
generations: [finalChunk],
|
|
275
246
|
};
|
|
276
247
|
}
|
|
277
|
-
const response = await this.connection.request(messages, parameters, options);
|
|
278
|
-
const ret = this.connection.api.
|
|
279
|
-
|
|
248
|
+
const response = await this.connection.request(messages, parameters, options, runManager);
|
|
249
|
+
const ret = this.connection.api.responseToChatResult(response);
|
|
250
|
+
const chunk = ret?.generations?.[0];
|
|
251
|
+
if (chunk) {
|
|
252
|
+
await runManager?.handleLLMNewToken(chunk.text || "");
|
|
253
|
+
}
|
|
280
254
|
return ret;
|
|
281
255
|
}
|
|
282
256
|
async *_streamResponseChunks(_messages, options, runManager) {
|
|
283
257
|
// Make the call as a streaming request
|
|
284
258
|
const parameters = this.invocationParams(options);
|
|
285
|
-
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
259
|
+
const response = await this.streamedConnection.request(_messages, parameters, options, runManager);
|
|
286
260
|
// Get the streaming parser of the response
|
|
287
261
|
const stream = response.data;
|
|
288
262
|
let usageMetadata;
|
|
@@ -291,6 +265,9 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
291
265
|
// that is either available or added to the queue
|
|
292
266
|
while (!stream.streamDone) {
|
|
293
267
|
const output = await stream.nextChunk();
|
|
268
|
+
await runManager?.handleCustomEvent(`google-chunk-${this.constructor.name}`, {
|
|
269
|
+
output,
|
|
270
|
+
});
|
|
294
271
|
if (output &&
|
|
295
272
|
output.usageMetadata &&
|
|
296
273
|
this.streamUsage !== false &&
|
|
@@ -302,7 +279,7 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
302
279
|
};
|
|
303
280
|
}
|
|
304
281
|
const chunk = output !== null
|
|
305
|
-
? this.connection.api.
|
|
282
|
+
? this.connection.api.responseToChatGeneration({ data: output })
|
|
306
283
|
: new outputs_1.ChatGenerationChunk({
|
|
307
284
|
text: "",
|
|
308
285
|
generationInfo: { finishReason: "stop" },
|
|
@@ -311,8 +288,10 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
311
288
|
usage_metadata: usageMetadata,
|
|
312
289
|
}),
|
|
313
290
|
});
|
|
314
|
-
|
|
315
|
-
|
|
291
|
+
if (chunk) {
|
|
292
|
+
yield chunk;
|
|
293
|
+
await runManager?.handleLLMNewToken(chunk.text ?? "", undefined, undefined, undefined, undefined, { chunk });
|
|
294
|
+
}
|
|
316
295
|
}
|
|
317
296
|
}
|
|
318
297
|
/** @ignore */
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -7,22 +7,22 @@ import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchai
|
|
|
7
7
|
import type { z } from "zod";
|
|
8
8
|
import { Runnable } from "@langchain/core/runnables";
|
|
9
9
|
import { AsyncCaller } from "@langchain/core/utils/async_caller";
|
|
10
|
-
import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType,
|
|
10
|
+
import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GoogleAIBaseLanguageModelCallOptions, GoogleAIAPI, GoogleAIAPIParams } from "./types.js";
|
|
11
11
|
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
12
12
|
import { GoogleAbstractedClient } from "./auth.js";
|
|
13
|
-
import type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType
|
|
13
|
+
import type { GoogleBaseLLMInput, GoogleAISafetyHandler, GoogleAISafetyParams, GoogleAIToolType } from "./types.js";
|
|
14
14
|
export declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<BaseMessage[], AuthOptions> {
|
|
15
15
|
convertSystemMessageToHumanContent: boolean | undefined;
|
|
16
16
|
constructor(fields: GoogleAIBaseLLMInput<AuthOptions> | undefined, caller: AsyncCaller, client: GoogleAbstractedClient, streaming: boolean);
|
|
17
17
|
get useSystemInstruction(): boolean;
|
|
18
18
|
get computeUseSystemInstruction(): boolean;
|
|
19
|
-
|
|
20
|
-
|
|
19
|
+
buildGeminiAPI(): GoogleAIAPI;
|
|
20
|
+
get api(): GoogleAIAPI;
|
|
21
21
|
}
|
|
22
22
|
/**
|
|
23
23
|
* Input to chat model class.
|
|
24
24
|
*/
|
|
25
|
-
export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams,
|
|
25
|
+
export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, GoogleAIAPIParams, Pick<GoogleAIBaseLanguageModelCallOptions, "streamUsage"> {
|
|
26
26
|
}
|
|
27
27
|
/**
|
|
28
28
|
* Integration with a Google chat model.
|
package/dist/chat_models.js
CHANGED
|
@@ -7,7 +7,7 @@ import { JsonOutputKeyToolsParser } from "@langchain/core/output_parsers/openai_
|
|
|
7
7
|
import { concat } from "@langchain/core/utils/stream";
|
|
8
8
|
import { convertToGeminiTools, copyAIModelParams, copyAndValidateModelParamsInto, } from "./utils/common.js";
|
|
9
9
|
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
10
|
-
import { DefaultGeminiSafetyHandler } from "./utils/gemini.js";
|
|
10
|
+
import { DefaultGeminiSafetyHandler, getGeminiAPI } from "./utils/gemini.js";
|
|
11
11
|
import { ApiKeyGoogleAuth } from "./auth.js";
|
|
12
12
|
import { ensureParams } from "./utils/failed_handler.js";
|
|
13
13
|
import { zodToGeminiParameters } from "./utils/zod_to_gemini_parameters.js";
|
|
@@ -50,49 +50,20 @@ export class ChatConnection extends AbstractGoogleLLMConnection {
|
|
|
50
50
|
}
|
|
51
51
|
return true;
|
|
52
52
|
}
|
|
53
|
-
|
|
54
|
-
const
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
return acc;
|
|
60
|
-
}
|
|
61
|
-
// Combine adjacent function messages
|
|
62
|
-
if (cur[0]?.role === "function" &&
|
|
63
|
-
acc.length > 0 &&
|
|
64
|
-
acc[acc.length - 1].role === "function") {
|
|
65
|
-
acc[acc.length - 1].parts = [
|
|
66
|
-
...acc[acc.length - 1].parts,
|
|
67
|
-
...cur[0].parts,
|
|
68
|
-
];
|
|
69
|
-
}
|
|
70
|
-
else {
|
|
71
|
-
acc.push(...cur);
|
|
72
|
-
}
|
|
73
|
-
return acc;
|
|
74
|
-
}, []);
|
|
53
|
+
buildGeminiAPI() {
|
|
54
|
+
const geminiConfig = {
|
|
55
|
+
useSystemInstruction: this.useSystemInstruction,
|
|
56
|
+
...this.apiConfig,
|
|
57
|
+
};
|
|
58
|
+
return getGeminiAPI(geminiConfig);
|
|
75
59
|
}
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
60
|
+
get api() {
|
|
61
|
+
switch (this.apiName) {
|
|
62
|
+
case "google":
|
|
63
|
+
return this.buildGeminiAPI();
|
|
64
|
+
default:
|
|
65
|
+
return super.api;
|
|
79
66
|
}
|
|
80
|
-
let ret = {};
|
|
81
|
-
for (let index = 0; index < input.length; index += 1) {
|
|
82
|
-
const message = input[index];
|
|
83
|
-
if (message._getType() === "system") {
|
|
84
|
-
// For system types, we only want it if it is the first message,
|
|
85
|
-
// if it appears anywhere else, it should be an error.
|
|
86
|
-
if (index === 0) {
|
|
87
|
-
// eslint-disable-next-line prefer-destructuring
|
|
88
|
-
ret = (await this.api.baseMessageToContent(message, undefined, true))[0];
|
|
89
|
-
}
|
|
90
|
-
else {
|
|
91
|
-
throw new Error("System messages are only permitted as the first passed message.");
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
return ret;
|
|
96
67
|
}
|
|
97
68
|
}
|
|
98
69
|
/**
|
|
@@ -270,15 +241,18 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
270
241
|
generations: [finalChunk],
|
|
271
242
|
};
|
|
272
243
|
}
|
|
273
|
-
const response = await this.connection.request(messages, parameters, options);
|
|
274
|
-
const ret = this.connection.api.
|
|
275
|
-
|
|
244
|
+
const response = await this.connection.request(messages, parameters, options, runManager);
|
|
245
|
+
const ret = this.connection.api.responseToChatResult(response);
|
|
246
|
+
const chunk = ret?.generations?.[0];
|
|
247
|
+
if (chunk) {
|
|
248
|
+
await runManager?.handleLLMNewToken(chunk.text || "");
|
|
249
|
+
}
|
|
276
250
|
return ret;
|
|
277
251
|
}
|
|
278
252
|
async *_streamResponseChunks(_messages, options, runManager) {
|
|
279
253
|
// Make the call as a streaming request
|
|
280
254
|
const parameters = this.invocationParams(options);
|
|
281
|
-
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
255
|
+
const response = await this.streamedConnection.request(_messages, parameters, options, runManager);
|
|
282
256
|
// Get the streaming parser of the response
|
|
283
257
|
const stream = response.data;
|
|
284
258
|
let usageMetadata;
|
|
@@ -287,6 +261,9 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
287
261
|
// that is either available or added to the queue
|
|
288
262
|
while (!stream.streamDone) {
|
|
289
263
|
const output = await stream.nextChunk();
|
|
264
|
+
await runManager?.handleCustomEvent(`google-chunk-${this.constructor.name}`, {
|
|
265
|
+
output,
|
|
266
|
+
});
|
|
290
267
|
if (output &&
|
|
291
268
|
output.usageMetadata &&
|
|
292
269
|
this.streamUsage !== false &&
|
|
@@ -298,7 +275,7 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
298
275
|
};
|
|
299
276
|
}
|
|
300
277
|
const chunk = output !== null
|
|
301
|
-
? this.connection.api.
|
|
278
|
+
? this.connection.api.responseToChatGeneration({ data: output })
|
|
302
279
|
: new ChatGenerationChunk({
|
|
303
280
|
text: "",
|
|
304
281
|
generationInfo: { finishReason: "stop" },
|
|
@@ -307,8 +284,10 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
307
284
|
usage_metadata: usageMetadata,
|
|
308
285
|
}),
|
|
309
286
|
});
|
|
310
|
-
|
|
311
|
-
|
|
287
|
+
if (chunk) {
|
|
288
|
+
yield chunk;
|
|
289
|
+
await runManager?.handleLLMNewToken(chunk.text ?? "", undefined, undefined, undefined, undefined, { chunk });
|
|
290
|
+
}
|
|
312
291
|
}
|
|
313
292
|
}
|
|
314
293
|
/** @ignore */
|
package/dist/connection.cjs
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.AbstractGoogleLLMConnection = exports.GoogleAIConnection = exports.GoogleRawConnection = exports.GoogleHostConnection = exports.GoogleConnection = void 0;
|
|
3
|
+
exports.GoogleRequestRecorder = exports.GoogleRequestLogger = exports.GoogleRequestCallbackHandler = exports.AbstractGoogleLLMConnection = exports.GoogleAIConnection = exports.GoogleRawConnection = exports.GoogleHostConnection = exports.GoogleConnection = void 0;
|
|
4
4
|
const env_1 = require("@langchain/core/utils/env");
|
|
5
|
-
const
|
|
6
|
-
const zod_to_gemini_parameters_js_1 = require("./utils/zod_to_gemini_parameters.cjs");
|
|
5
|
+
const base_1 = require("@langchain/core/callbacks/base");
|
|
7
6
|
const index_js_1 = require("./utils/index.cjs");
|
|
7
|
+
const anthropic_js_1 = require("./utils/anthropic.cjs");
|
|
8
8
|
class GoogleConnection {
|
|
9
9
|
constructor(caller, client, streaming) {
|
|
10
10
|
Object.defineProperty(this, "caller", {
|
|
@@ -102,17 +102,17 @@ class GoogleHostConnection extends GoogleConnection {
|
|
|
102
102
|
writable: true,
|
|
103
103
|
value: void 0
|
|
104
104
|
});
|
|
105
|
-
Object.defineProperty(this, "
|
|
105
|
+
Object.defineProperty(this, "_endpoint", {
|
|
106
106
|
enumerable: true,
|
|
107
107
|
configurable: true,
|
|
108
108
|
writable: true,
|
|
109
|
-
value:
|
|
109
|
+
value: void 0
|
|
110
110
|
});
|
|
111
|
-
Object.defineProperty(this, "
|
|
111
|
+
Object.defineProperty(this, "_location", {
|
|
112
112
|
enumerable: true,
|
|
113
113
|
configurable: true,
|
|
114
114
|
writable: true,
|
|
115
|
-
value:
|
|
115
|
+
value: void 0
|
|
116
116
|
});
|
|
117
117
|
Object.defineProperty(this, "apiVersion", {
|
|
118
118
|
enumerable: true,
|
|
@@ -122,8 +122,8 @@ class GoogleHostConnection extends GoogleConnection {
|
|
|
122
122
|
});
|
|
123
123
|
this.caller = caller;
|
|
124
124
|
this.platformType = fields?.platformType;
|
|
125
|
-
this.
|
|
126
|
-
this.
|
|
125
|
+
this._endpoint = fields?.endpoint;
|
|
126
|
+
this._location = fields?.location;
|
|
127
127
|
this.apiVersion = fields?.apiVersion ?? this.apiVersion;
|
|
128
128
|
this.client = client;
|
|
129
129
|
}
|
|
@@ -133,6 +133,18 @@ class GoogleHostConnection extends GoogleConnection {
|
|
|
133
133
|
get computedPlatformType() {
|
|
134
134
|
return "gcp";
|
|
135
135
|
}
|
|
136
|
+
get location() {
|
|
137
|
+
return this._location ?? this.computedLocation;
|
|
138
|
+
}
|
|
139
|
+
get computedLocation() {
|
|
140
|
+
return "us-central1";
|
|
141
|
+
}
|
|
142
|
+
get endpoint() {
|
|
143
|
+
return this._endpoint ?? this.computedEndpoint;
|
|
144
|
+
}
|
|
145
|
+
get computedEndpoint() {
|
|
146
|
+
return `${this.location}-aiplatform.googleapis.com`;
|
|
147
|
+
}
|
|
136
148
|
buildMethod() {
|
|
137
149
|
return "POST";
|
|
138
150
|
}
|
|
@@ -167,24 +179,48 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
167
179
|
writable: true,
|
|
168
180
|
value: void 0
|
|
169
181
|
});
|
|
170
|
-
|
|
171
|
-
|
|
182
|
+
Object.defineProperty(this, "_apiName", {
|
|
183
|
+
enumerable: true,
|
|
184
|
+
configurable: true,
|
|
185
|
+
writable: true,
|
|
186
|
+
value: void 0
|
|
187
|
+
});
|
|
188
|
+
Object.defineProperty(this, "apiConfig", {
|
|
172
189
|
enumerable: true,
|
|
173
190
|
configurable: true,
|
|
174
191
|
writable: true,
|
|
175
192
|
value: void 0
|
|
176
|
-
});
|
|
193
|
+
});
|
|
177
194
|
this.client = client;
|
|
178
195
|
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
179
196
|
this.model = this.modelName;
|
|
180
|
-
this.
|
|
197
|
+
this._apiName = fields?.apiName;
|
|
198
|
+
this.apiConfig = {
|
|
199
|
+
safetyHandler: fields?.safetyHandler,
|
|
200
|
+
...fields?.apiConfig,
|
|
201
|
+
};
|
|
181
202
|
}
|
|
182
203
|
get modelFamily() {
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
204
|
+
return (0, index_js_1.modelToFamily)(this.model);
|
|
205
|
+
}
|
|
206
|
+
get modelPublisher() {
|
|
207
|
+
return (0, index_js_1.modelToPublisher)(this.model);
|
|
208
|
+
}
|
|
209
|
+
get computedAPIName() {
|
|
210
|
+
// At least at the moment, model publishers and APIs map the same
|
|
211
|
+
return this.modelPublisher;
|
|
212
|
+
}
|
|
213
|
+
get apiName() {
|
|
214
|
+
return this._apiName ?? this.computedAPIName;
|
|
215
|
+
}
|
|
216
|
+
get api() {
|
|
217
|
+
switch (this.apiName) {
|
|
218
|
+
case "google":
|
|
219
|
+
return (0, index_js_1.getGeminiAPI)(this.apiConfig);
|
|
220
|
+
case "anthropic":
|
|
221
|
+
return (0, anthropic_js_1.getAnthropicAPI)(this.apiConfig);
|
|
222
|
+
default:
|
|
223
|
+
throw new Error(`Unknown API: ${this.apiName}`);
|
|
188
224
|
}
|
|
189
225
|
}
|
|
190
226
|
get computedPlatformType() {
|
|
@@ -195,6 +231,16 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
195
231
|
return "gcp";
|
|
196
232
|
}
|
|
197
233
|
}
|
|
234
|
+
get computedLocation() {
|
|
235
|
+
switch (this.apiName) {
|
|
236
|
+
case "google":
|
|
237
|
+
return super.computedLocation;
|
|
238
|
+
case "anthropic":
|
|
239
|
+
return "us-east5";
|
|
240
|
+
default:
|
|
241
|
+
throw new Error(`Unknown apiName: ${this.apiName}. Can't get location.`);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
198
244
|
async buildUrlGenerativeLanguage() {
|
|
199
245
|
const method = await this.buildUrlMethod();
|
|
200
246
|
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.model}:${method}`;
|
|
@@ -203,7 +249,8 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
203
249
|
async buildUrlVertex() {
|
|
204
250
|
const projectId = await this.client.getProjectId();
|
|
205
251
|
const method = await this.buildUrlMethod();
|
|
206
|
-
const
|
|
252
|
+
const publisher = this.modelPublisher;
|
|
253
|
+
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/${publisher}/models/${this.model}:${method}`;
|
|
207
254
|
return url;
|
|
208
255
|
}
|
|
209
256
|
async buildUrl() {
|
|
@@ -214,9 +261,30 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
214
261
|
return this.buildUrlVertex();
|
|
215
262
|
}
|
|
216
263
|
}
|
|
217
|
-
async request(input, parameters, options) {
|
|
218
|
-
const
|
|
264
|
+
async request(input, parameters, options, runManager) {
|
|
265
|
+
const moduleName = this.constructor.name;
|
|
266
|
+
const streamingParameters = {
|
|
267
|
+
...parameters,
|
|
268
|
+
streaming: this.streaming,
|
|
269
|
+
};
|
|
270
|
+
const data = await this.formatData(input, streamingParameters);
|
|
271
|
+
await runManager?.handleCustomEvent(`google-request-${moduleName}`, {
|
|
272
|
+
data,
|
|
273
|
+
parameters: streamingParameters,
|
|
274
|
+
options,
|
|
275
|
+
connection: {
|
|
276
|
+
...this,
|
|
277
|
+
url: await this.buildUrl(),
|
|
278
|
+
urlMethod: await this.buildUrlMethod(),
|
|
279
|
+
modelFamily: this.modelFamily,
|
|
280
|
+
modelPublisher: this.modelPublisher,
|
|
281
|
+
computedPlatformType: this.computedPlatformType,
|
|
282
|
+
},
|
|
283
|
+
});
|
|
219
284
|
const response = await this._request(data, options);
|
|
285
|
+
await runManager?.handleCustomEvent(`google-response-${moduleName}`, {
|
|
286
|
+
response,
|
|
287
|
+
});
|
|
220
288
|
return response;
|
|
221
289
|
}
|
|
222
290
|
}
|
|
@@ -225,99 +293,111 @@ class AbstractGoogleLLMConnection extends GoogleAIConnection {
|
|
|
225
293
|
async buildUrlMethodGemini() {
|
|
226
294
|
return this.streaming ? "streamGenerateContent" : "generateContent";
|
|
227
295
|
}
|
|
296
|
+
async buildUrlMethodClaude() {
|
|
297
|
+
return this.streaming ? "streamRawPredict" : "rawPredict";
|
|
298
|
+
}
|
|
228
299
|
async buildUrlMethod() {
|
|
229
300
|
switch (this.modelFamily) {
|
|
230
301
|
case "gemini":
|
|
231
302
|
return this.buildUrlMethodGemini();
|
|
303
|
+
case "claude":
|
|
304
|
+
return this.buildUrlMethodClaude();
|
|
232
305
|
default:
|
|
233
306
|
throw new Error(`Unknown model family: ${this.modelFamily}`);
|
|
234
307
|
}
|
|
235
308
|
}
|
|
236
|
-
|
|
309
|
+
async formatData(input, parameters) {
|
|
310
|
+
return this.api.formatData(input, parameters);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
exports.AbstractGoogleLLMConnection = AbstractGoogleLLMConnection;
|
|
314
|
+
class GoogleRequestCallbackHandler extends base_1.BaseCallbackHandler {
|
|
315
|
+
customEventInfo(eventName) {
|
|
316
|
+
const names = eventName.split("-");
|
|
237
317
|
return {
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
topP: parameters.topP,
|
|
241
|
-
maxOutputTokens: parameters.maxOutputTokens,
|
|
242
|
-
stopSequences: parameters.stopSequences,
|
|
243
|
-
responseMimeType: parameters.responseMimeType,
|
|
318
|
+
subEvent: names[1],
|
|
319
|
+
module: names[2],
|
|
244
320
|
};
|
|
245
321
|
}
|
|
246
|
-
|
|
247
|
-
|
|
322
|
+
handleCustomEvent(eventName, data, runId, tags, metadata) {
|
|
323
|
+
if (!eventName) {
|
|
324
|
+
return undefined;
|
|
325
|
+
}
|
|
326
|
+
const eventInfo = this.customEventInfo(eventName);
|
|
327
|
+
switch (eventInfo.subEvent) {
|
|
328
|
+
case "request":
|
|
329
|
+
return this.handleCustomRequestEvent(eventName, eventInfo, data, runId, tags, metadata);
|
|
330
|
+
case "response":
|
|
331
|
+
return this.handleCustomResponseEvent(eventName, eventInfo, data, runId, tags, metadata);
|
|
332
|
+
case "chunk":
|
|
333
|
+
return this.handleCustomChunkEvent(eventName, eventInfo, data, runId, tags, metadata);
|
|
334
|
+
default:
|
|
335
|
+
console.error(`Unexpected eventInfo for ${eventName} ${JSON.stringify(eventInfo, null, 1)}`);
|
|
336
|
+
}
|
|
248
337
|
}
|
|
249
|
-
|
|
250
|
-
|
|
338
|
+
}
|
|
339
|
+
exports.GoogleRequestCallbackHandler = GoogleRequestCallbackHandler;
|
|
340
|
+
class GoogleRequestLogger extends GoogleRequestCallbackHandler {
|
|
341
|
+
constructor() {
|
|
342
|
+
super(...arguments);
|
|
343
|
+
Object.defineProperty(this, "name", {
|
|
344
|
+
enumerable: true,
|
|
345
|
+
configurable: true,
|
|
346
|
+
writable: true,
|
|
347
|
+
value: "GoogleRequestLogger"
|
|
348
|
+
});
|
|
251
349
|
}
|
|
252
|
-
|
|
253
|
-
const
|
|
254
|
-
|
|
255
|
-
name: tool.name,
|
|
256
|
-
description: tool.description ?? `A function available to call.`,
|
|
257
|
-
parameters: jsonSchema,
|
|
258
|
-
};
|
|
350
|
+
log(eventName, data, tags) {
|
|
351
|
+
const tagStr = tags ? `[${tags}]` : "[]";
|
|
352
|
+
console.log(`${eventName} ${tagStr} ${JSON.stringify(data, null, 1)}`);
|
|
259
353
|
}
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
{
|
|
263
|
-
functionDeclarations: tools.map(this.structuredToolToFunctionDeclaration),
|
|
264
|
-
},
|
|
265
|
-
];
|
|
354
|
+
handleCustomRequestEvent(eventName, _eventInfo, data, _runId, tags, _metadata) {
|
|
355
|
+
this.log(eventName, data, tags);
|
|
266
356
|
}
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
if (!tools || tools.length === 0) {
|
|
270
|
-
return [];
|
|
271
|
-
}
|
|
272
|
-
if (tools.every(function_calling_1.isLangChainTool)) {
|
|
273
|
-
return this.structuredToolsToGeminiTools(tools);
|
|
274
|
-
}
|
|
275
|
-
else {
|
|
276
|
-
if (tools.length === 1 &&
|
|
277
|
-
(!("functionDeclarations" in tools[0]) ||
|
|
278
|
-
!tools[0].functionDeclarations?.length)) {
|
|
279
|
-
return [];
|
|
280
|
-
}
|
|
281
|
-
return tools;
|
|
282
|
-
}
|
|
357
|
+
handleCustomResponseEvent(eventName, _eventInfo, data, _runId, tags, _metadata) {
|
|
358
|
+
this.log(eventName, data, tags);
|
|
283
359
|
}
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
return undefined;
|
|
287
|
-
}
|
|
288
|
-
return {
|
|
289
|
-
functionCallingConfig: {
|
|
290
|
-
mode: parameters.tool_choice,
|
|
291
|
-
allowedFunctionNames: parameters.allowed_function_names,
|
|
292
|
-
},
|
|
293
|
-
};
|
|
360
|
+
handleCustomChunkEvent(eventName, _eventInfo, data, _runId, tags, _metadata) {
|
|
361
|
+
this.log(eventName, data, tags);
|
|
294
362
|
}
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
};
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
363
|
+
}
|
|
364
|
+
exports.GoogleRequestLogger = GoogleRequestLogger;
|
|
365
|
+
class GoogleRequestRecorder extends GoogleRequestCallbackHandler {
|
|
366
|
+
constructor() {
|
|
367
|
+
super(...arguments);
|
|
368
|
+
Object.defineProperty(this, "name", {
|
|
369
|
+
enumerable: true,
|
|
370
|
+
configurable: true,
|
|
371
|
+
writable: true,
|
|
372
|
+
value: "GoogleRequestRecorder"
|
|
373
|
+
});
|
|
374
|
+
Object.defineProperty(this, "request", {
|
|
375
|
+
enumerable: true,
|
|
376
|
+
configurable: true,
|
|
377
|
+
writable: true,
|
|
378
|
+
value: {}
|
|
379
|
+
});
|
|
380
|
+
Object.defineProperty(this, "response", {
|
|
381
|
+
enumerable: true,
|
|
382
|
+
configurable: true,
|
|
383
|
+
writable: true,
|
|
384
|
+
value: {}
|
|
385
|
+
});
|
|
386
|
+
Object.defineProperty(this, "chunk", {
|
|
387
|
+
enumerable: true,
|
|
388
|
+
configurable: true,
|
|
389
|
+
writable: true,
|
|
390
|
+
value: []
|
|
391
|
+
});
|
|
392
|
+
}
|
|
393
|
+
handleCustomRequestEvent(_eventName, _eventInfo, data, _runId, _tags, _metadata) {
|
|
394
|
+
this.request = data;
|
|
395
|
+
}
|
|
396
|
+
handleCustomResponseEvent(_eventName, _eventInfo, data, _runId, _tags, _metadata) {
|
|
397
|
+
this.response = data;
|
|
398
|
+
}
|
|
399
|
+
handleCustomChunkEvent(_eventName, _eventInfo, data, _runId, _tags, _metadata) {
|
|
400
|
+
this.chunk.push(data);
|
|
321
401
|
}
|
|
322
402
|
}
|
|
323
|
-
exports.
|
|
403
|
+
exports.GoogleRequestRecorder = GoogleRequestRecorder;
|