@langchain/google-common 0.0.4 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auth.cjs +2 -1
- package/dist/auth.js +2 -1
- package/dist/chat_models.cjs +30 -4
- package/dist/chat_models.d.ts +9 -3
- package/dist/chat_models.js +30 -4
- package/dist/connection.cjs +5 -5
- package/dist/connection.d.ts +0 -1
- package/dist/connection.js +5 -5
- package/dist/llms.cjs +6 -0
- package/dist/llms.d.ts +1 -0
- package/dist/llms.js +6 -0
- package/dist/types.d.ts +5 -2
- package/dist/utils/common.cjs +6 -2
- package/dist/utils/common.js +6 -2
- package/dist/utils/gemini.cjs +61 -16
- package/dist/utils/gemini.d.ts +4 -4
- package/dist/utils/gemini.js +60 -15
- package/dist/utils/zod_to_gemini_parameters.cjs +1 -0
- package/dist/utils/zod_to_gemini_parameters.js +1 -0
- package/package.json +3 -2
package/dist/auth.cjs
CHANGED
|
@@ -19,7 +19,8 @@ class GoogleAbstractedFetchClient {
|
|
|
19
19
|
}
|
|
20
20
|
const res = await fetch(url, fetchOptions);
|
|
21
21
|
if (!res.ok) {
|
|
22
|
-
const
|
|
22
|
+
const resText = await res.text();
|
|
23
|
+
const error = new Error(`Google request failed with status code ${res.status}: ${resText}`);
|
|
23
24
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
24
25
|
error.response = res;
|
|
25
26
|
throw error;
|
package/dist/auth.js
CHANGED
|
@@ -16,7 +16,8 @@ export class GoogleAbstractedFetchClient {
|
|
|
16
16
|
}
|
|
17
17
|
const res = await fetch(url, fetchOptions);
|
|
18
18
|
if (!res.ok) {
|
|
19
|
-
const
|
|
19
|
+
const resText = await res.text();
|
|
20
|
+
const error = new Error(`Google request failed with status code ${res.status}: ${resText}`);
|
|
20
21
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
22
|
error.response = res;
|
|
22
23
|
throw error;
|
package/dist/chat_models.cjs
CHANGED
|
@@ -7,6 +7,7 @@ const outputs_1 = require("@langchain/core/outputs");
|
|
|
7
7
|
const messages_1 = require("@langchain/core/messages");
|
|
8
8
|
const runnables_1 = require("@langchain/core/runnables");
|
|
9
9
|
const openai_tools_1 = require("@langchain/core/output_parsers/openai_tools");
|
|
10
|
+
const function_calling_1 = require("@langchain/core/utils/function_calling");
|
|
10
11
|
const common_js_1 = require("./utils/common.cjs");
|
|
11
12
|
const connection_js_1 = require("./connection.cjs");
|
|
12
13
|
const gemini_js_1 = require("./utils/gemini.cjs");
|
|
@@ -16,10 +17,27 @@ const zod_to_gemini_parameters_js_1 = require("./utils/zod_to_gemini_parameters.
|
|
|
16
17
|
class ChatConnection extends connection_js_1.AbstractGoogleLLMConnection {
|
|
17
18
|
formatContents(input, _parameters) {
|
|
18
19
|
return input
|
|
19
|
-
.map((msg) => (0, gemini_js_1.baseMessageToContent)(msg))
|
|
20
|
+
.map((msg, i) => (0, gemini_js_1.baseMessageToContent)(msg, input[i - 1]))
|
|
20
21
|
.reduce((acc, cur) => [...acc, ...cur]);
|
|
21
22
|
}
|
|
22
23
|
}
|
|
24
|
+
function convertToGeminiTools(structuredTools) {
|
|
25
|
+
return [
|
|
26
|
+
{
|
|
27
|
+
functionDeclarations: structuredTools.map((structuredTool) => {
|
|
28
|
+
if ((0, function_calling_1.isStructuredTool)(structuredTool)) {
|
|
29
|
+
const jsonSchema = (0, zod_to_gemini_parameters_js_1.zodToGeminiParameters)(structuredTool.schema);
|
|
30
|
+
return {
|
|
31
|
+
name: structuredTool.name,
|
|
32
|
+
description: structuredTool.description,
|
|
33
|
+
parameters: jsonSchema,
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
return structuredTool;
|
|
37
|
+
}),
|
|
38
|
+
},
|
|
39
|
+
];
|
|
40
|
+
}
|
|
23
41
|
/**
|
|
24
42
|
* Integration with a chat model.
|
|
25
43
|
*/
|
|
@@ -36,7 +54,6 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
36
54
|
writable: true,
|
|
37
55
|
value: true
|
|
38
56
|
});
|
|
39
|
-
/** @deprecated Prefer `modelName` */
|
|
40
57
|
Object.defineProperty(this, "model", {
|
|
41
58
|
enumerable: true,
|
|
42
59
|
configurable: true,
|
|
@@ -131,19 +148,28 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
131
148
|
get platform() {
|
|
132
149
|
return this.connection.platform;
|
|
133
150
|
}
|
|
151
|
+
bindTools(tools, kwargs) {
|
|
152
|
+
return this.bind({ tools: convertToGeminiTools(tools), ...kwargs });
|
|
153
|
+
}
|
|
134
154
|
// Replace
|
|
135
155
|
_llmType() {
|
|
136
156
|
return "chat_integration";
|
|
137
157
|
}
|
|
158
|
+
/**
|
|
159
|
+
* Get the parameters used to invoke the model
|
|
160
|
+
*/
|
|
161
|
+
invocationParams(options) {
|
|
162
|
+
return (0, common_js_1.copyAIModelParams)(this, options);
|
|
163
|
+
}
|
|
138
164
|
async _generate(messages, options, _runManager) {
|
|
139
|
-
const parameters =
|
|
165
|
+
const parameters = this.invocationParams(options);
|
|
140
166
|
const response = await this.connection.request(messages, parameters, options);
|
|
141
167
|
const ret = (0, gemini_js_1.safeResponseToChatResult)(response, this.safetyHandler);
|
|
142
168
|
return ret;
|
|
143
169
|
}
|
|
144
170
|
async *_streamResponseChunks(_messages, options, _runManager) {
|
|
145
171
|
// Make the call as a streaming request
|
|
146
|
-
const parameters =
|
|
172
|
+
const parameters = this.invocationParams(options);
|
|
147
173
|
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
148
174
|
// Get the streaming parser of the response
|
|
149
175
|
const stream = response.data;
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -2,9 +2,11 @@ import { type BaseMessage } from "@langchain/core/messages";
|
|
|
2
2
|
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
3
3
|
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
4
4
|
import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
|
|
5
|
+
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
6
|
import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
|
|
6
7
|
import type { z } from "zod";
|
|
7
|
-
import { Runnable } from "@langchain/core/runnables";
|
|
8
|
+
import { Runnable, RunnableInterface } from "@langchain/core/runnables";
|
|
9
|
+
import { StructuredToolInterface } from "@langchain/core/tools";
|
|
8
10
|
import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GeminiContent, GoogleAIBaseLanguageModelCallOptions } from "./types.js";
|
|
9
11
|
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
10
12
|
import { GoogleAbstractedClient } from "./auth.js";
|
|
@@ -20,10 +22,9 @@ export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, G
|
|
|
20
22
|
/**
|
|
21
23
|
* Integration with a chat model.
|
|
22
24
|
*/
|
|
23
|
-
export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions> implements ChatGoogleBaseInput<AuthOptions> {
|
|
25
|
+
export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk> implements ChatGoogleBaseInput<AuthOptions> {
|
|
24
26
|
static lc_name(): string;
|
|
25
27
|
lc_serializable: boolean;
|
|
26
|
-
/** @deprecated Prefer `modelName` */
|
|
27
28
|
model: string;
|
|
28
29
|
modelName: string;
|
|
29
30
|
temperature: number;
|
|
@@ -42,7 +43,12 @@ export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<
|
|
|
42
43
|
buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
|
|
43
44
|
buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;
|
|
44
45
|
get platform(): GooglePlatformType;
|
|
46
|
+
bindTools(tools: (StructuredToolInterface | Record<string, unknown>)[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): RunnableInterface<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;
|
|
45
47
|
_llmType(): string;
|
|
48
|
+
/**
|
|
49
|
+
* Get the parameters used to invoke the model
|
|
50
|
+
*/
|
|
51
|
+
invocationParams(options?: this["ParsedCallOptions"]): import("./types.js").GoogleAIModelRequestParams;
|
|
46
52
|
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;
|
|
47
53
|
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
48
54
|
/** @ignore */
|
package/dist/chat_models.js
CHANGED
|
@@ -4,6 +4,7 @@ import { ChatGenerationChunk } from "@langchain/core/outputs";
|
|
|
4
4
|
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
5
|
import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables";
|
|
6
6
|
import { JsonOutputKeyToolsParser } from "@langchain/core/output_parsers/openai_tools";
|
|
7
|
+
import { isStructuredTool } from "@langchain/core/utils/function_calling";
|
|
7
8
|
import { copyAIModelParams, copyAndValidateModelParamsInto, } from "./utils/common.js";
|
|
8
9
|
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
9
10
|
import { baseMessageToContent, safeResponseToChatGeneration, safeResponseToChatResult, DefaultGeminiSafetyHandler, } from "./utils/gemini.js";
|
|
@@ -13,10 +14,27 @@ import { zodToGeminiParameters } from "./utils/zod_to_gemini_parameters.js";
|
|
|
13
14
|
class ChatConnection extends AbstractGoogleLLMConnection {
|
|
14
15
|
formatContents(input, _parameters) {
|
|
15
16
|
return input
|
|
16
|
-
.map((msg) => baseMessageToContent(msg))
|
|
17
|
+
.map((msg, i) => baseMessageToContent(msg, input[i - 1]))
|
|
17
18
|
.reduce((acc, cur) => [...acc, ...cur]);
|
|
18
19
|
}
|
|
19
20
|
}
|
|
21
|
+
function convertToGeminiTools(structuredTools) {
|
|
22
|
+
return [
|
|
23
|
+
{
|
|
24
|
+
functionDeclarations: structuredTools.map((structuredTool) => {
|
|
25
|
+
if (isStructuredTool(structuredTool)) {
|
|
26
|
+
const jsonSchema = zodToGeminiParameters(structuredTool.schema);
|
|
27
|
+
return {
|
|
28
|
+
name: structuredTool.name,
|
|
29
|
+
description: structuredTool.description,
|
|
30
|
+
parameters: jsonSchema,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
return structuredTool;
|
|
34
|
+
}),
|
|
35
|
+
},
|
|
36
|
+
];
|
|
37
|
+
}
|
|
20
38
|
/**
|
|
21
39
|
* Integration with a chat model.
|
|
22
40
|
*/
|
|
@@ -33,7 +51,6 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
33
51
|
writable: true,
|
|
34
52
|
value: true
|
|
35
53
|
});
|
|
36
|
-
/** @deprecated Prefer `modelName` */
|
|
37
54
|
Object.defineProperty(this, "model", {
|
|
38
55
|
enumerable: true,
|
|
39
56
|
configurable: true,
|
|
@@ -128,19 +145,28 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
128
145
|
get platform() {
|
|
129
146
|
return this.connection.platform;
|
|
130
147
|
}
|
|
148
|
+
bindTools(tools, kwargs) {
|
|
149
|
+
return this.bind({ tools: convertToGeminiTools(tools), ...kwargs });
|
|
150
|
+
}
|
|
131
151
|
// Replace
|
|
132
152
|
_llmType() {
|
|
133
153
|
return "chat_integration";
|
|
134
154
|
}
|
|
155
|
+
/**
|
|
156
|
+
* Get the parameters used to invoke the model
|
|
157
|
+
*/
|
|
158
|
+
invocationParams(options) {
|
|
159
|
+
return copyAIModelParams(this, options);
|
|
160
|
+
}
|
|
135
161
|
async _generate(messages, options, _runManager) {
|
|
136
|
-
const parameters =
|
|
162
|
+
const parameters = this.invocationParams(options);
|
|
137
163
|
const response = await this.connection.request(messages, parameters, options);
|
|
138
164
|
const ret = safeResponseToChatResult(response, this.safetyHandler);
|
|
139
165
|
return ret;
|
|
140
166
|
}
|
|
141
167
|
async *_streamResponseChunks(_messages, options, _runManager) {
|
|
142
168
|
// Make the call as a streaming request
|
|
143
|
-
const parameters =
|
|
169
|
+
const parameters = this.invocationParams(options);
|
|
144
170
|
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
145
171
|
// Get the streaming parser of the response
|
|
146
172
|
const stream = response.data;
|
package/dist/connection.cjs
CHANGED
|
@@ -124,7 +124,6 @@ exports.GoogleHostConnection = GoogleHostConnection;
|
|
|
124
124
|
class GoogleAIConnection extends GoogleHostConnection {
|
|
125
125
|
constructor(fields, caller, client, streaming) {
|
|
126
126
|
super(fields, caller, client, streaming);
|
|
127
|
-
/** @deprecated Prefer `modelName` */
|
|
128
127
|
Object.defineProperty(this, "model", {
|
|
129
128
|
enumerable: true,
|
|
130
129
|
configurable: true,
|
|
@@ -144,10 +143,11 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
144
143
|
value: void 0
|
|
145
144
|
});
|
|
146
145
|
this.client = client;
|
|
147
|
-
this.modelName = fields?.
|
|
146
|
+
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
147
|
+
this.model = this.modelName;
|
|
148
148
|
}
|
|
149
149
|
get modelFamily() {
|
|
150
|
-
if (this.
|
|
150
|
+
if (this.model.startsWith("gemini")) {
|
|
151
151
|
return "gemini";
|
|
152
152
|
}
|
|
153
153
|
else {
|
|
@@ -164,13 +164,13 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
164
164
|
}
|
|
165
165
|
async buildUrlGenerativeLanguage() {
|
|
166
166
|
const method = await this.buildUrlMethod();
|
|
167
|
-
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.
|
|
167
|
+
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.model}:${method}`;
|
|
168
168
|
return url;
|
|
169
169
|
}
|
|
170
170
|
async buildUrlVertex() {
|
|
171
171
|
const projectId = await this.client.getProjectId();
|
|
172
172
|
const method = await this.buildUrlMethod();
|
|
173
|
-
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.
|
|
173
|
+
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.model}:${method}`;
|
|
174
174
|
return url;
|
|
175
175
|
}
|
|
176
176
|
async buildUrl() {
|
package/dist/connection.d.ts
CHANGED
|
@@ -26,7 +26,6 @@ export declare abstract class GoogleHostConnection<CallOptions extends AsyncCall
|
|
|
26
26
|
buildMethod(): GoogleAbstractedClientOpsMethod;
|
|
27
27
|
}
|
|
28
28
|
export declare abstract class GoogleAIConnection<CallOptions extends BaseLanguageModelCallOptions, MessageType, AuthOptions> extends GoogleHostConnection<CallOptions, GoogleLLMResponse, AuthOptions> implements GoogleAIBaseLLMInput<AuthOptions> {
|
|
29
|
-
/** @deprecated Prefer `modelName` */
|
|
30
29
|
model: string;
|
|
31
30
|
modelName: string;
|
|
32
31
|
client: GoogleAbstractedClient;
|
package/dist/connection.js
CHANGED
|
@@ -119,7 +119,6 @@ export class GoogleHostConnection extends GoogleConnection {
|
|
|
119
119
|
export class GoogleAIConnection extends GoogleHostConnection {
|
|
120
120
|
constructor(fields, caller, client, streaming) {
|
|
121
121
|
super(fields, caller, client, streaming);
|
|
122
|
-
/** @deprecated Prefer `modelName` */
|
|
123
122
|
Object.defineProperty(this, "model", {
|
|
124
123
|
enumerable: true,
|
|
125
124
|
configurable: true,
|
|
@@ -139,10 +138,11 @@ export class GoogleAIConnection extends GoogleHostConnection {
|
|
|
139
138
|
value: void 0
|
|
140
139
|
});
|
|
141
140
|
this.client = client;
|
|
142
|
-
this.modelName = fields?.
|
|
141
|
+
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
142
|
+
this.model = this.modelName;
|
|
143
143
|
}
|
|
144
144
|
get modelFamily() {
|
|
145
|
-
if (this.
|
|
145
|
+
if (this.model.startsWith("gemini")) {
|
|
146
146
|
return "gemini";
|
|
147
147
|
}
|
|
148
148
|
else {
|
|
@@ -159,13 +159,13 @@ export class GoogleAIConnection extends GoogleHostConnection {
|
|
|
159
159
|
}
|
|
160
160
|
async buildUrlGenerativeLanguage() {
|
|
161
161
|
const method = await this.buildUrlMethod();
|
|
162
|
-
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.
|
|
162
|
+
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.model}:${method}`;
|
|
163
163
|
return url;
|
|
164
164
|
}
|
|
165
165
|
async buildUrlVertex() {
|
|
166
166
|
const projectId = await this.client.getProjectId();
|
|
167
167
|
const method = await this.buildUrlMethod();
|
|
168
|
-
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.
|
|
168
|
+
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.model}:${method}`;
|
|
169
169
|
return url;
|
|
170
170
|
}
|
|
171
171
|
async buildUrl() {
|
package/dist/llms.cjs
CHANGED
|
@@ -59,6 +59,12 @@ class GoogleBaseLLM extends llms_1.LLM {
|
|
|
59
59
|
writable: true,
|
|
60
60
|
value: "gemini-pro"
|
|
61
61
|
});
|
|
62
|
+
Object.defineProperty(this, "model", {
|
|
63
|
+
enumerable: true,
|
|
64
|
+
configurable: true,
|
|
65
|
+
writable: true,
|
|
66
|
+
value: "gemini-pro"
|
|
67
|
+
});
|
|
62
68
|
Object.defineProperty(this, "temperature", {
|
|
63
69
|
enumerable: true,
|
|
64
70
|
configurable: true,
|
package/dist/llms.d.ts
CHANGED
|
@@ -19,6 +19,7 @@ export declare abstract class GoogleBaseLLM<AuthOptions> extends LLM<BaseLanguag
|
|
|
19
19
|
originalFields?: GoogleBaseLLMInput<AuthOptions>;
|
|
20
20
|
lc_serializable: boolean;
|
|
21
21
|
modelName: string;
|
|
22
|
+
model: string;
|
|
22
23
|
temperature: number;
|
|
23
24
|
maxOutputTokens: number;
|
|
24
25
|
topP: number;
|
package/dist/llms.js
CHANGED
|
@@ -56,6 +56,12 @@ export class GoogleBaseLLM extends LLM {
|
|
|
56
56
|
writable: true,
|
|
57
57
|
value: "gemini-pro"
|
|
58
58
|
});
|
|
59
|
+
Object.defineProperty(this, "model", {
|
|
60
|
+
enumerable: true,
|
|
61
|
+
configurable: true,
|
|
62
|
+
writable: true,
|
|
63
|
+
value: "gemini-pro"
|
|
64
|
+
});
|
|
59
65
|
Object.defineProperty(this, "temperature", {
|
|
60
66
|
enumerable: true,
|
|
61
67
|
configurable: true,
|
package/dist/types.d.ts
CHANGED
|
@@ -38,9 +38,12 @@ export interface GoogleAISafetySetting {
|
|
|
38
38
|
threshold: string;
|
|
39
39
|
}
|
|
40
40
|
export interface GoogleAIModelParams {
|
|
41
|
-
/** @deprecated Prefer `modelName` */
|
|
42
|
-
model?: string;
|
|
43
41
|
/** Model to use */
|
|
42
|
+
model?: string;
|
|
43
|
+
/**
|
|
44
|
+
* Model to use
|
|
45
|
+
* Alias for `model`
|
|
46
|
+
*/
|
|
44
47
|
modelName?: string;
|
|
45
48
|
/** Sampling temperature to use */
|
|
46
49
|
temperature?: number;
|
package/dist/utils/common.cjs
CHANGED
|
@@ -8,7 +8,9 @@ function copyAIModelParams(params, options) {
|
|
|
8
8
|
exports.copyAIModelParams = copyAIModelParams;
|
|
9
9
|
function copyAIModelParamsInto(params, options, target) {
|
|
10
10
|
const ret = target || {};
|
|
11
|
-
|
|
11
|
+
const model = options?.model ?? params?.model ?? target.model;
|
|
12
|
+
ret.modelName =
|
|
13
|
+
model ?? options?.modelName ?? params?.modelName ?? target.modelName;
|
|
12
14
|
ret.temperature =
|
|
13
15
|
options?.temperature ?? params?.temperature ?? target.temperature;
|
|
14
16
|
ret.maxOutputTokens =
|
|
@@ -74,6 +76,7 @@ function copyAIModelParamsInto(params, options, target) {
|
|
|
74
76
|
throw new Error(`Cannot mix structured tools with Gemini tools.\nReceived ${structuredOutputTools.length} structured tools and ${geminiTools.length} Gemini tools.`);
|
|
75
77
|
}
|
|
76
78
|
ret.tools = geminiTools ?? structuredOutputTools;
|
|
79
|
+
console.log(ret);
|
|
77
80
|
return ret;
|
|
78
81
|
}
|
|
79
82
|
exports.copyAIModelParamsInto = copyAIModelParamsInto;
|
|
@@ -91,7 +94,8 @@ function modelToFamily(modelName) {
|
|
|
91
94
|
exports.modelToFamily = modelToFamily;
|
|
92
95
|
function validateModelParams(params) {
|
|
93
96
|
const testParams = params ?? {};
|
|
94
|
-
|
|
97
|
+
const model = testParams.model ?? testParams.modelName;
|
|
98
|
+
switch (modelToFamily(model)) {
|
|
95
99
|
case "gemini":
|
|
96
100
|
return (0, gemini_js_1.validateGeminiParams)(testParams);
|
|
97
101
|
default:
|
package/dist/utils/common.js
CHANGED
|
@@ -4,7 +4,9 @@ export function copyAIModelParams(params, options) {
|
|
|
4
4
|
}
|
|
5
5
|
export function copyAIModelParamsInto(params, options, target) {
|
|
6
6
|
const ret = target || {};
|
|
7
|
-
|
|
7
|
+
const model = options?.model ?? params?.model ?? target.model;
|
|
8
|
+
ret.modelName =
|
|
9
|
+
model ?? options?.modelName ?? params?.modelName ?? target.modelName;
|
|
8
10
|
ret.temperature =
|
|
9
11
|
options?.temperature ?? params?.temperature ?? target.temperature;
|
|
10
12
|
ret.maxOutputTokens =
|
|
@@ -70,6 +72,7 @@ export function copyAIModelParamsInto(params, options, target) {
|
|
|
70
72
|
throw new Error(`Cannot mix structured tools with Gemini tools.\nReceived ${structuredOutputTools.length} structured tools and ${geminiTools.length} Gemini tools.`);
|
|
71
73
|
}
|
|
72
74
|
ret.tools = geminiTools ?? structuredOutputTools;
|
|
75
|
+
console.log(ret);
|
|
73
76
|
return ret;
|
|
74
77
|
}
|
|
75
78
|
export function modelToFamily(modelName) {
|
|
@@ -85,7 +88,8 @@ export function modelToFamily(modelName) {
|
|
|
85
88
|
}
|
|
86
89
|
export function validateModelParams(params) {
|
|
87
90
|
const testParams = params ?? {};
|
|
88
|
-
|
|
91
|
+
const model = testParams.model ?? testParams.modelName;
|
|
92
|
+
switch (modelToFamily(model)) {
|
|
89
93
|
case "gemini":
|
|
90
94
|
return validateGeminiParams(testParams);
|
|
91
95
|
default:
|
package/dist/utils/gemini.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = exports.isModelGemini = exports.validateGeminiParams = exports.safeResponseToChatResult = exports.responseToChatResult = exports.safeResponseToBaseMessage = exports.responseToBaseMessage = exports.partsToBaseMessageFields = exports.responseToBaseMessageFields = exports.responseToChatGenerations = exports.partToChatGeneration = exports.
|
|
3
|
+
exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = exports.isModelGemini = exports.validateGeminiParams = exports.safeResponseToChatResult = exports.responseToChatResult = exports.safeResponseToBaseMessage = exports.responseToBaseMessage = exports.partsToBaseMessageFields = exports.responseToBaseMessageFields = exports.responseToChatGenerations = exports.partToChatGeneration = exports.partToMessageChunk = exports.chunkToString = exports.safeResponseToChatGeneration = exports.responseToChatGeneration = exports.safeResponseToGeneration = exports.responseToGeneration = exports.safeResponseToString = exports.responseToString = exports.partToText = exports.responseToParts = exports.responseToGenerateContentResponseData = exports.toolsRawToTools = exports.partsToToolsRaw = exports.partsToMessageContent = exports.baseMessageToContent = exports.messageContentToParts = void 0;
|
|
4
|
+
const uuid_1 = require("uuid");
|
|
4
5
|
const messages_1 = require("@langchain/core/messages");
|
|
5
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
6
7
|
const safety_js_1 = require("./safety.cjs");
|
|
@@ -107,7 +108,18 @@ function messageKwargsToParts(kwargs) {
|
|
|
107
108
|
}
|
|
108
109
|
function roleMessageToContent(role, message) {
|
|
109
110
|
const contentParts = messageContentToParts(message.content);
|
|
110
|
-
|
|
111
|
+
let toolParts;
|
|
112
|
+
if ((0, messages_1.isAIMessage)(message) && !!message.tool_calls?.length) {
|
|
113
|
+
toolParts = message.tool_calls.map((toolCall) => ({
|
|
114
|
+
functionCall: {
|
|
115
|
+
name: toolCall.name,
|
|
116
|
+
args: toolCall.args,
|
|
117
|
+
},
|
|
118
|
+
}));
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
toolParts = messageKwargsToParts(message.additional_kwargs);
|
|
122
|
+
}
|
|
111
123
|
const parts = [...contentParts, ...toolParts];
|
|
112
124
|
return [
|
|
113
125
|
{
|
|
@@ -122,7 +134,7 @@ function systemMessageToContent(message) {
|
|
|
122
134
|
...roleMessageToContent("model", new messages_1.AIMessage("Ok")),
|
|
123
135
|
];
|
|
124
136
|
}
|
|
125
|
-
function toolMessageToContent(message) {
|
|
137
|
+
function toolMessageToContent(message, prevMessage) {
|
|
126
138
|
const contentStr = typeof message.content === "string"
|
|
127
139
|
? message.content
|
|
128
140
|
: message.content.reduce((acc, content) => {
|
|
@@ -133,6 +145,11 @@ function toolMessageToContent(message) {
|
|
|
133
145
|
return acc;
|
|
134
146
|
}
|
|
135
147
|
}, "");
|
|
148
|
+
// Hacky :(
|
|
149
|
+
const responseName = ((0, messages_1.isAIMessage)(prevMessage) && !!prevMessage.tool_calls?.length
|
|
150
|
+
? prevMessage.tool_calls[0].name
|
|
151
|
+
: prevMessage.name) ?? message.tool_call_id;
|
|
152
|
+
console.log(contentStr);
|
|
136
153
|
try {
|
|
137
154
|
const content = JSON.parse(contentStr);
|
|
138
155
|
return [
|
|
@@ -141,8 +158,8 @@ function toolMessageToContent(message) {
|
|
|
141
158
|
parts: [
|
|
142
159
|
{
|
|
143
160
|
functionResponse: {
|
|
144
|
-
name:
|
|
145
|
-
response: content,
|
|
161
|
+
name: responseName,
|
|
162
|
+
response: { content },
|
|
146
163
|
},
|
|
147
164
|
},
|
|
148
165
|
],
|
|
@@ -156,10 +173,8 @@ function toolMessageToContent(message) {
|
|
|
156
173
|
parts: [
|
|
157
174
|
{
|
|
158
175
|
functionResponse: {
|
|
159
|
-
name:
|
|
160
|
-
response: {
|
|
161
|
-
response: contentStr,
|
|
162
|
-
},
|
|
176
|
+
name: responseName,
|
|
177
|
+
response: { content: contentStr },
|
|
163
178
|
},
|
|
164
179
|
},
|
|
165
180
|
],
|
|
@@ -167,7 +182,7 @@ function toolMessageToContent(message) {
|
|
|
167
182
|
];
|
|
168
183
|
}
|
|
169
184
|
}
|
|
170
|
-
function baseMessageToContent(message) {
|
|
185
|
+
function baseMessageToContent(message, prevMessage) {
|
|
171
186
|
const type = message._getType();
|
|
172
187
|
switch (type) {
|
|
173
188
|
case "system":
|
|
@@ -177,7 +192,10 @@ function baseMessageToContent(message) {
|
|
|
177
192
|
case "ai":
|
|
178
193
|
return roleMessageToContent("model", message);
|
|
179
194
|
case "tool":
|
|
180
|
-
|
|
195
|
+
if (!prevMessage) {
|
|
196
|
+
throw new Error("Tool messages cannot be the first message passed to the model.");
|
|
197
|
+
}
|
|
198
|
+
return toolMessageToContent(message, prevMessage);
|
|
181
199
|
default:
|
|
182
200
|
console.log(`Unsupported message type: ${type}`);
|
|
183
201
|
return [];
|
|
@@ -241,7 +259,7 @@ function toolRawToTool(raw) {
|
|
|
241
259
|
}
|
|
242
260
|
function functionCallPartToToolRaw(part) {
|
|
243
261
|
return {
|
|
244
|
-
id:
|
|
262
|
+
id: (0, uuid_1.v4)().replace(/-/g, ""),
|
|
245
263
|
type: "function",
|
|
246
264
|
function: {
|
|
247
265
|
name: part.functionCall.name,
|
|
@@ -346,7 +364,7 @@ exports.safeResponseToGeneration = safeResponseToGeneration;
|
|
|
346
364
|
function responseToChatGeneration(response) {
|
|
347
365
|
return new outputs_1.ChatGenerationChunk({
|
|
348
366
|
text: responseToString(response),
|
|
349
|
-
message:
|
|
367
|
+
message: partToMessageChunk(responseToParts(response)[0]),
|
|
350
368
|
generationInfo: response,
|
|
351
369
|
});
|
|
352
370
|
}
|
|
@@ -373,7 +391,7 @@ function chunkToString(chunk) {
|
|
|
373
391
|
}
|
|
374
392
|
}
|
|
375
393
|
exports.chunkToString = chunkToString;
|
|
376
|
-
function
|
|
394
|
+
function partToMessageChunk(part) {
|
|
377
395
|
const fields = partsToBaseMessageFields([part]);
|
|
378
396
|
if (typeof fields.content === "string") {
|
|
379
397
|
return new messages_1.AIMessageChunk(fields);
|
|
@@ -389,9 +407,9 @@ function partToMessage(part) {
|
|
|
389
407
|
}
|
|
390
408
|
return new messages_1.AIMessageChunk(fields);
|
|
391
409
|
}
|
|
392
|
-
exports.
|
|
410
|
+
exports.partToMessageChunk = partToMessageChunk;
|
|
393
411
|
function partToChatGeneration(part) {
|
|
394
|
-
const message =
|
|
412
|
+
const message = partToMessageChunk(part);
|
|
395
413
|
const text = partToText(part);
|
|
396
414
|
return new outputs_1.ChatGenerationChunk({
|
|
397
415
|
text,
|
|
@@ -405,11 +423,18 @@ function responseToChatGenerations(response) {
|
|
|
405
423
|
if (ret.every((item) => typeof item.message.content === "string")) {
|
|
406
424
|
const combinedContent = ret.map((item) => item.message.content).join("");
|
|
407
425
|
const combinedText = ret.map((item) => item.text).join("");
|
|
426
|
+
const toolCallChunks = ret[ret.length - 1].message.additional_kwargs?.tool_calls?.map((toolCall, i) => ({
|
|
427
|
+
name: toolCall.function.name,
|
|
428
|
+
args: toolCall.function.arguments,
|
|
429
|
+
id: toolCall.id,
|
|
430
|
+
index: i,
|
|
431
|
+
}));
|
|
408
432
|
ret = [
|
|
409
433
|
new outputs_1.ChatGenerationChunk({
|
|
410
434
|
message: new messages_1.AIMessageChunk({
|
|
411
435
|
content: combinedContent,
|
|
412
436
|
additional_kwargs: ret[ret.length - 1].message.additional_kwargs,
|
|
437
|
+
tool_call_chunks: toolCallChunks,
|
|
413
438
|
}),
|
|
414
439
|
text: combinedText,
|
|
415
440
|
generationInfo: ret[ret.length - 1].generationInfo,
|
|
@@ -427,10 +452,30 @@ exports.responseToBaseMessageFields = responseToBaseMessageFields;
|
|
|
427
452
|
function partsToBaseMessageFields(parts) {
|
|
428
453
|
const fields = {
|
|
429
454
|
content: partsToMessageContent(parts),
|
|
455
|
+
tool_calls: [],
|
|
456
|
+
invalid_tool_calls: [],
|
|
430
457
|
};
|
|
431
458
|
const rawTools = partsToToolsRaw(parts);
|
|
432
459
|
if (rawTools.length > 0) {
|
|
433
460
|
const tools = toolsRawToTools(rawTools);
|
|
461
|
+
for (const tool of tools) {
|
|
462
|
+
try {
|
|
463
|
+
fields.tool_calls?.push({
|
|
464
|
+
name: tool.function.name,
|
|
465
|
+
args: JSON.parse(tool.function.arguments),
|
|
466
|
+
id: tool.id,
|
|
467
|
+
});
|
|
468
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
469
|
+
}
|
|
470
|
+
catch (e) {
|
|
471
|
+
fields.invalid_tool_calls?.push({
|
|
472
|
+
name: tool.function.name,
|
|
473
|
+
args: JSON.parse(tool.function.arguments),
|
|
474
|
+
id: tool.id,
|
|
475
|
+
error: e.message,
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
}
|
|
434
479
|
fields.additional_kwargs = {
|
|
435
480
|
tool_calls: tools,
|
|
436
481
|
};
|
package/dist/utils/gemini.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { BaseMessage, BaseMessageChunk, BaseMessageFields, MessageContent } from "@langchain/core/messages";
|
|
1
|
+
import { AIMessageFields, BaseMessage, BaseMessageChunk, BaseMessageFields, MessageContent } from "@langchain/core/messages";
|
|
2
2
|
import { ChatGeneration, ChatGenerationChunk, ChatResult, Generation } from "@langchain/core/outputs";
|
|
3
3
|
import type { GoogleLLMResponse, GoogleAIModelParams, GeminiPart, GeminiContent, GenerateContentResponseData, GoogleAISafetyHandler } from "../types.js";
|
|
4
4
|
export declare function messageContentToParts(content: MessageContent): GeminiPart[];
|
|
5
|
-
export declare function baseMessageToContent(message: BaseMessage): GeminiContent[];
|
|
5
|
+
export declare function baseMessageToContent(message: BaseMessage, prevMessage?: BaseMessage): GeminiContent[];
|
|
6
6
|
export declare function partsToMessageContent(parts: GeminiPart[]): MessageContent;
|
|
7
7
|
interface FunctionCall {
|
|
8
8
|
name: string;
|
|
@@ -34,11 +34,11 @@ export declare function safeResponseToGeneration(response: GoogleLLMResponse, sa
|
|
|
34
34
|
export declare function responseToChatGeneration(response: GoogleLLMResponse): ChatGenerationChunk;
|
|
35
35
|
export declare function safeResponseToChatGeneration(response: GoogleLLMResponse, safetyHandler: GoogleAISafetyHandler): ChatGenerationChunk;
|
|
36
36
|
export declare function chunkToString(chunk: BaseMessageChunk): string;
|
|
37
|
-
export declare function
|
|
37
|
+
export declare function partToMessageChunk(part: GeminiPart): BaseMessageChunk;
|
|
38
38
|
export declare function partToChatGeneration(part: GeminiPart): ChatGeneration;
|
|
39
39
|
export declare function responseToChatGenerations(response: GoogleLLMResponse): ChatGeneration[];
|
|
40
40
|
export declare function responseToBaseMessageFields(response: GoogleLLMResponse): BaseMessageFields;
|
|
41
|
-
export declare function partsToBaseMessageFields(parts: GeminiPart[]):
|
|
41
|
+
export declare function partsToBaseMessageFields(parts: GeminiPart[]): AIMessageFields;
|
|
42
42
|
export declare function responseToBaseMessage(response: GoogleLLMResponse): BaseMessage;
|
|
43
43
|
export declare function safeResponseToBaseMessage(response: GoogleLLMResponse, safetyHandler: GoogleAISafetyHandler): BaseMessage;
|
|
44
44
|
export declare function responseToChatResult(response: GoogleLLMResponse): ChatResult;
|
package/dist/utils/gemini.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { v4 as uuidv4 } from "uuid";
|
|
2
|
+
import { AIMessage, AIMessageChunk, isAIMessage, } from "@langchain/core/messages";
|
|
2
3
|
import { ChatGenerationChunk, } from "@langchain/core/outputs";
|
|
3
4
|
import { GoogleAISafetyError } from "./safety.js";
|
|
4
5
|
function messageContentText(content) {
|
|
@@ -103,7 +104,18 @@ function messageKwargsToParts(kwargs) {
|
|
|
103
104
|
}
|
|
104
105
|
function roleMessageToContent(role, message) {
|
|
105
106
|
const contentParts = messageContentToParts(message.content);
|
|
106
|
-
|
|
107
|
+
let toolParts;
|
|
108
|
+
if (isAIMessage(message) && !!message.tool_calls?.length) {
|
|
109
|
+
toolParts = message.tool_calls.map((toolCall) => ({
|
|
110
|
+
functionCall: {
|
|
111
|
+
name: toolCall.name,
|
|
112
|
+
args: toolCall.args,
|
|
113
|
+
},
|
|
114
|
+
}));
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
toolParts = messageKwargsToParts(message.additional_kwargs);
|
|
118
|
+
}
|
|
107
119
|
const parts = [...contentParts, ...toolParts];
|
|
108
120
|
return [
|
|
109
121
|
{
|
|
@@ -118,7 +130,7 @@ function systemMessageToContent(message) {
|
|
|
118
130
|
...roleMessageToContent("model", new AIMessage("Ok")),
|
|
119
131
|
];
|
|
120
132
|
}
|
|
121
|
-
function toolMessageToContent(message) {
|
|
133
|
+
function toolMessageToContent(message, prevMessage) {
|
|
122
134
|
const contentStr = typeof message.content === "string"
|
|
123
135
|
? message.content
|
|
124
136
|
: message.content.reduce((acc, content) => {
|
|
@@ -129,6 +141,11 @@ function toolMessageToContent(message) {
|
|
|
129
141
|
return acc;
|
|
130
142
|
}
|
|
131
143
|
}, "");
|
|
144
|
+
// Hacky :(
|
|
145
|
+
const responseName = (isAIMessage(prevMessage) && !!prevMessage.tool_calls?.length
|
|
146
|
+
? prevMessage.tool_calls[0].name
|
|
147
|
+
: prevMessage.name) ?? message.tool_call_id;
|
|
148
|
+
console.log(contentStr);
|
|
132
149
|
try {
|
|
133
150
|
const content = JSON.parse(contentStr);
|
|
134
151
|
return [
|
|
@@ -137,8 +154,8 @@ function toolMessageToContent(message) {
|
|
|
137
154
|
parts: [
|
|
138
155
|
{
|
|
139
156
|
functionResponse: {
|
|
140
|
-
name:
|
|
141
|
-
response: content,
|
|
157
|
+
name: responseName,
|
|
158
|
+
response: { content },
|
|
142
159
|
},
|
|
143
160
|
},
|
|
144
161
|
],
|
|
@@ -152,10 +169,8 @@ function toolMessageToContent(message) {
|
|
|
152
169
|
parts: [
|
|
153
170
|
{
|
|
154
171
|
functionResponse: {
|
|
155
|
-
name:
|
|
156
|
-
response: {
|
|
157
|
-
response: contentStr,
|
|
158
|
-
},
|
|
172
|
+
name: responseName,
|
|
173
|
+
response: { content: contentStr },
|
|
159
174
|
},
|
|
160
175
|
},
|
|
161
176
|
],
|
|
@@ -163,7 +178,7 @@ function toolMessageToContent(message) {
|
|
|
163
178
|
];
|
|
164
179
|
}
|
|
165
180
|
}
|
|
166
|
-
export function baseMessageToContent(message) {
|
|
181
|
+
export function baseMessageToContent(message, prevMessage) {
|
|
167
182
|
const type = message._getType();
|
|
168
183
|
switch (type) {
|
|
169
184
|
case "system":
|
|
@@ -173,7 +188,10 @@ export function baseMessageToContent(message) {
|
|
|
173
188
|
case "ai":
|
|
174
189
|
return roleMessageToContent("model", message);
|
|
175
190
|
case "tool":
|
|
176
|
-
|
|
191
|
+
if (!prevMessage) {
|
|
192
|
+
throw new Error("Tool messages cannot be the first message passed to the model.");
|
|
193
|
+
}
|
|
194
|
+
return toolMessageToContent(message, prevMessage);
|
|
177
195
|
default:
|
|
178
196
|
console.log(`Unsupported message type: ${type}`);
|
|
179
197
|
return [];
|
|
@@ -235,7 +253,7 @@ function toolRawToTool(raw) {
|
|
|
235
253
|
}
|
|
236
254
|
function functionCallPartToToolRaw(part) {
|
|
237
255
|
return {
|
|
238
|
-
id:
|
|
256
|
+
id: uuidv4().replace(/-/g, ""),
|
|
239
257
|
type: "function",
|
|
240
258
|
function: {
|
|
241
259
|
name: part.functionCall.name,
|
|
@@ -331,7 +349,7 @@ export function safeResponseToGeneration(response, safetyHandler) {
|
|
|
331
349
|
export function responseToChatGeneration(response) {
|
|
332
350
|
return new ChatGenerationChunk({
|
|
333
351
|
text: responseToString(response),
|
|
334
|
-
message:
|
|
352
|
+
message: partToMessageChunk(responseToParts(response)[0]),
|
|
335
353
|
generationInfo: response,
|
|
336
354
|
});
|
|
337
355
|
}
|
|
@@ -355,7 +373,7 @@ export function chunkToString(chunk) {
|
|
|
355
373
|
throw new Error(`Unexpected chunk: ${chunk}`);
|
|
356
374
|
}
|
|
357
375
|
}
|
|
358
|
-
export function
|
|
376
|
+
export function partToMessageChunk(part) {
|
|
359
377
|
const fields = partsToBaseMessageFields([part]);
|
|
360
378
|
if (typeof fields.content === "string") {
|
|
361
379
|
return new AIMessageChunk(fields);
|
|
@@ -372,7 +390,7 @@ export function partToMessage(part) {
|
|
|
372
390
|
return new AIMessageChunk(fields);
|
|
373
391
|
}
|
|
374
392
|
export function partToChatGeneration(part) {
|
|
375
|
-
const message =
|
|
393
|
+
const message = partToMessageChunk(part);
|
|
376
394
|
const text = partToText(part);
|
|
377
395
|
return new ChatGenerationChunk({
|
|
378
396
|
text,
|
|
@@ -385,11 +403,18 @@ export function responseToChatGenerations(response) {
|
|
|
385
403
|
if (ret.every((item) => typeof item.message.content === "string")) {
|
|
386
404
|
const combinedContent = ret.map((item) => item.message.content).join("");
|
|
387
405
|
const combinedText = ret.map((item) => item.text).join("");
|
|
406
|
+
const toolCallChunks = ret[ret.length - 1].message.additional_kwargs?.tool_calls?.map((toolCall, i) => ({
|
|
407
|
+
name: toolCall.function.name,
|
|
408
|
+
args: toolCall.function.arguments,
|
|
409
|
+
id: toolCall.id,
|
|
410
|
+
index: i,
|
|
411
|
+
}));
|
|
388
412
|
ret = [
|
|
389
413
|
new ChatGenerationChunk({
|
|
390
414
|
message: new AIMessageChunk({
|
|
391
415
|
content: combinedContent,
|
|
392
416
|
additional_kwargs: ret[ret.length - 1].message.additional_kwargs,
|
|
417
|
+
tool_call_chunks: toolCallChunks,
|
|
393
418
|
}),
|
|
394
419
|
text: combinedText,
|
|
395
420
|
generationInfo: ret[ret.length - 1].generationInfo,
|
|
@@ -405,10 +430,30 @@ export function responseToBaseMessageFields(response) {
|
|
|
405
430
|
export function partsToBaseMessageFields(parts) {
|
|
406
431
|
const fields = {
|
|
407
432
|
content: partsToMessageContent(parts),
|
|
433
|
+
tool_calls: [],
|
|
434
|
+
invalid_tool_calls: [],
|
|
408
435
|
};
|
|
409
436
|
const rawTools = partsToToolsRaw(parts);
|
|
410
437
|
if (rawTools.length > 0) {
|
|
411
438
|
const tools = toolsRawToTools(rawTools);
|
|
439
|
+
for (const tool of tools) {
|
|
440
|
+
try {
|
|
441
|
+
fields.tool_calls?.push({
|
|
442
|
+
name: tool.function.name,
|
|
443
|
+
args: JSON.parse(tool.function.arguments),
|
|
444
|
+
id: tool.id,
|
|
445
|
+
});
|
|
446
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
447
|
+
}
|
|
448
|
+
catch (e) {
|
|
449
|
+
fields.invalid_tool_calls?.push({
|
|
450
|
+
name: tool.function.name,
|
|
451
|
+
args: JSON.parse(tool.function.arguments),
|
|
452
|
+
id: tool.id,
|
|
453
|
+
error: e.message,
|
|
454
|
+
});
|
|
455
|
+
}
|
|
456
|
+
}
|
|
412
457
|
fields.additional_kwargs = {
|
|
413
458
|
tool_calls: tools,
|
|
414
459
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/google-common",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.5",
|
|
4
4
|
"description": "Core types and classes for Google services.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -39,7 +39,8 @@
|
|
|
39
39
|
"author": "LangChain",
|
|
40
40
|
"license": "MIT",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@langchain/core": "~0.1.
|
|
42
|
+
"@langchain/core": "~0.1.56",
|
|
43
|
+
"uuid": "^9.0.0",
|
|
43
44
|
"zod-to-json-schema": "^3.22.4"
|
|
44
45
|
},
|
|
45
46
|
"devDependencies": {
|