@langchain/google-common 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auth.cjs +2 -1
- package/dist/auth.js +2 -1
- package/dist/chat_models.cjs +30 -4
- package/dist/chat_models.d.ts +9 -3
- package/dist/chat_models.js +30 -4
- package/dist/connection.cjs +5 -5
- package/dist/connection.d.ts +0 -1
- package/dist/connection.js +5 -5
- package/dist/llms.cjs +6 -0
- package/dist/llms.d.ts +1 -0
- package/dist/llms.js +6 -0
- package/dist/types.d.ts +5 -2
- package/dist/utils/common.cjs +7 -2
- package/dist/utils/common.js +7 -2
- package/dist/utils/gemini.cjs +96 -21
- package/dist/utils/gemini.d.ts +4 -4
- package/dist/utils/gemini.js +95 -20
- package/dist/utils/zod_to_gemini_parameters.cjs +1 -0
- package/dist/utils/zod_to_gemini_parameters.js +1 -0
- package/package.json +3 -2
package/dist/auth.cjs
CHANGED
|
@@ -19,7 +19,8 @@ class GoogleAbstractedFetchClient {
|
|
|
19
19
|
}
|
|
20
20
|
const res = await fetch(url, fetchOptions);
|
|
21
21
|
if (!res.ok) {
|
|
22
|
-
const
|
|
22
|
+
const resText = await res.text();
|
|
23
|
+
const error = new Error(`Google request failed with status code ${res.status}: ${resText}`);
|
|
23
24
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
24
25
|
error.response = res;
|
|
25
26
|
throw error;
|
package/dist/auth.js
CHANGED
|
@@ -16,7 +16,8 @@ export class GoogleAbstractedFetchClient {
|
|
|
16
16
|
}
|
|
17
17
|
const res = await fetch(url, fetchOptions);
|
|
18
18
|
if (!res.ok) {
|
|
19
|
-
const
|
|
19
|
+
const resText = await res.text();
|
|
20
|
+
const error = new Error(`Google request failed with status code ${res.status}: ${resText}`);
|
|
20
21
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
22
|
error.response = res;
|
|
22
23
|
throw error;
|
package/dist/chat_models.cjs
CHANGED
|
@@ -7,6 +7,7 @@ const outputs_1 = require("@langchain/core/outputs");
|
|
|
7
7
|
const messages_1 = require("@langchain/core/messages");
|
|
8
8
|
const runnables_1 = require("@langchain/core/runnables");
|
|
9
9
|
const openai_tools_1 = require("@langchain/core/output_parsers/openai_tools");
|
|
10
|
+
const function_calling_1 = require("@langchain/core/utils/function_calling");
|
|
10
11
|
const common_js_1 = require("./utils/common.cjs");
|
|
11
12
|
const connection_js_1 = require("./connection.cjs");
|
|
12
13
|
const gemini_js_1 = require("./utils/gemini.cjs");
|
|
@@ -16,10 +17,27 @@ const zod_to_gemini_parameters_js_1 = require("./utils/zod_to_gemini_parameters.
|
|
|
16
17
|
class ChatConnection extends connection_js_1.AbstractGoogleLLMConnection {
|
|
17
18
|
formatContents(input, _parameters) {
|
|
18
19
|
return input
|
|
19
|
-
.map((msg) => (0, gemini_js_1.baseMessageToContent)(msg))
|
|
20
|
+
.map((msg, i) => (0, gemini_js_1.baseMessageToContent)(msg, input[i - 1]))
|
|
20
21
|
.reduce((acc, cur) => [...acc, ...cur]);
|
|
21
22
|
}
|
|
22
23
|
}
|
|
24
|
+
function convertToGeminiTools(structuredTools) {
|
|
25
|
+
return [
|
|
26
|
+
{
|
|
27
|
+
functionDeclarations: structuredTools.map((structuredTool) => {
|
|
28
|
+
if ((0, function_calling_1.isStructuredTool)(structuredTool)) {
|
|
29
|
+
const jsonSchema = (0, zod_to_gemini_parameters_js_1.zodToGeminiParameters)(structuredTool.schema);
|
|
30
|
+
return {
|
|
31
|
+
name: structuredTool.name,
|
|
32
|
+
description: structuredTool.description,
|
|
33
|
+
parameters: jsonSchema,
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
return structuredTool;
|
|
37
|
+
}),
|
|
38
|
+
},
|
|
39
|
+
];
|
|
40
|
+
}
|
|
23
41
|
/**
|
|
24
42
|
* Integration with a chat model.
|
|
25
43
|
*/
|
|
@@ -36,7 +54,6 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
36
54
|
writable: true,
|
|
37
55
|
value: true
|
|
38
56
|
});
|
|
39
|
-
/** @deprecated Prefer `modelName` */
|
|
40
57
|
Object.defineProperty(this, "model", {
|
|
41
58
|
enumerable: true,
|
|
42
59
|
configurable: true,
|
|
@@ -131,19 +148,28 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
131
148
|
get platform() {
|
|
132
149
|
return this.connection.platform;
|
|
133
150
|
}
|
|
151
|
+
bindTools(tools, kwargs) {
|
|
152
|
+
return this.bind({ tools: convertToGeminiTools(tools), ...kwargs });
|
|
153
|
+
}
|
|
134
154
|
// Replace
|
|
135
155
|
_llmType() {
|
|
136
156
|
return "chat_integration";
|
|
137
157
|
}
|
|
158
|
+
/**
|
|
159
|
+
* Get the parameters used to invoke the model
|
|
160
|
+
*/
|
|
161
|
+
invocationParams(options) {
|
|
162
|
+
return (0, common_js_1.copyAIModelParams)(this, options);
|
|
163
|
+
}
|
|
138
164
|
async _generate(messages, options, _runManager) {
|
|
139
|
-
const parameters =
|
|
165
|
+
const parameters = this.invocationParams(options);
|
|
140
166
|
const response = await this.connection.request(messages, parameters, options);
|
|
141
167
|
const ret = (0, gemini_js_1.safeResponseToChatResult)(response, this.safetyHandler);
|
|
142
168
|
return ret;
|
|
143
169
|
}
|
|
144
170
|
async *_streamResponseChunks(_messages, options, _runManager) {
|
|
145
171
|
// Make the call as a streaming request
|
|
146
|
-
const parameters =
|
|
172
|
+
const parameters = this.invocationParams(options);
|
|
147
173
|
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
148
174
|
// Get the streaming parser of the response
|
|
149
175
|
const stream = response.data;
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -2,9 +2,11 @@ import { type BaseMessage } from "@langchain/core/messages";
|
|
|
2
2
|
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
3
3
|
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
4
4
|
import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
|
|
5
|
+
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
6
|
import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
|
|
6
7
|
import type { z } from "zod";
|
|
7
|
-
import { Runnable } from "@langchain/core/runnables";
|
|
8
|
+
import { Runnable, RunnableInterface } from "@langchain/core/runnables";
|
|
9
|
+
import { StructuredToolInterface } from "@langchain/core/tools";
|
|
8
10
|
import { GoogleAIBaseLLMInput, GoogleAIModelParams, GoogleAISafetySetting, GoogleConnectionParams, GooglePlatformType, GeminiContent, GoogleAIBaseLanguageModelCallOptions } from "./types.js";
|
|
9
11
|
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
10
12
|
import { GoogleAbstractedClient } from "./auth.js";
|
|
@@ -20,10 +22,9 @@ export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, G
|
|
|
20
22
|
/**
|
|
21
23
|
* Integration with a chat model.
|
|
22
24
|
*/
|
|
23
|
-
export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions> implements ChatGoogleBaseInput<AuthOptions> {
|
|
25
|
+
export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk> implements ChatGoogleBaseInput<AuthOptions> {
|
|
24
26
|
static lc_name(): string;
|
|
25
27
|
lc_serializable: boolean;
|
|
26
|
-
/** @deprecated Prefer `modelName` */
|
|
27
28
|
model: string;
|
|
28
29
|
modelName: string;
|
|
29
30
|
temperature: number;
|
|
@@ -42,7 +43,12 @@ export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<
|
|
|
42
43
|
buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
|
|
43
44
|
buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;
|
|
44
45
|
get platform(): GooglePlatformType;
|
|
46
|
+
bindTools(tools: (StructuredToolInterface | Record<string, unknown>)[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): RunnableInterface<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;
|
|
45
47
|
_llmType(): string;
|
|
48
|
+
/**
|
|
49
|
+
* Get the parameters used to invoke the model
|
|
50
|
+
*/
|
|
51
|
+
invocationParams(options?: this["ParsedCallOptions"]): import("./types.js").GoogleAIModelRequestParams;
|
|
46
52
|
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;
|
|
47
53
|
_streamResponseChunks(_messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
48
54
|
/** @ignore */
|
package/dist/chat_models.js
CHANGED
|
@@ -4,6 +4,7 @@ import { ChatGenerationChunk } from "@langchain/core/outputs";
|
|
|
4
4
|
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
5
|
import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables";
|
|
6
6
|
import { JsonOutputKeyToolsParser } from "@langchain/core/output_parsers/openai_tools";
|
|
7
|
+
import { isStructuredTool } from "@langchain/core/utils/function_calling";
|
|
7
8
|
import { copyAIModelParams, copyAndValidateModelParamsInto, } from "./utils/common.js";
|
|
8
9
|
import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
9
10
|
import { baseMessageToContent, safeResponseToChatGeneration, safeResponseToChatResult, DefaultGeminiSafetyHandler, } from "./utils/gemini.js";
|
|
@@ -13,10 +14,27 @@ import { zodToGeminiParameters } from "./utils/zod_to_gemini_parameters.js";
|
|
|
13
14
|
class ChatConnection extends AbstractGoogleLLMConnection {
|
|
14
15
|
formatContents(input, _parameters) {
|
|
15
16
|
return input
|
|
16
|
-
.map((msg) => baseMessageToContent(msg))
|
|
17
|
+
.map((msg, i) => baseMessageToContent(msg, input[i - 1]))
|
|
17
18
|
.reduce((acc, cur) => [...acc, ...cur]);
|
|
18
19
|
}
|
|
19
20
|
}
|
|
21
|
+
function convertToGeminiTools(structuredTools) {
|
|
22
|
+
return [
|
|
23
|
+
{
|
|
24
|
+
functionDeclarations: structuredTools.map((structuredTool) => {
|
|
25
|
+
if (isStructuredTool(structuredTool)) {
|
|
26
|
+
const jsonSchema = zodToGeminiParameters(structuredTool.schema);
|
|
27
|
+
return {
|
|
28
|
+
name: structuredTool.name,
|
|
29
|
+
description: structuredTool.description,
|
|
30
|
+
parameters: jsonSchema,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
return structuredTool;
|
|
34
|
+
}),
|
|
35
|
+
},
|
|
36
|
+
];
|
|
37
|
+
}
|
|
20
38
|
/**
|
|
21
39
|
* Integration with a chat model.
|
|
22
40
|
*/
|
|
@@ -33,7 +51,6 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
33
51
|
writable: true,
|
|
34
52
|
value: true
|
|
35
53
|
});
|
|
36
|
-
/** @deprecated Prefer `modelName` */
|
|
37
54
|
Object.defineProperty(this, "model", {
|
|
38
55
|
enumerable: true,
|
|
39
56
|
configurable: true,
|
|
@@ -128,19 +145,28 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
128
145
|
get platform() {
|
|
129
146
|
return this.connection.platform;
|
|
130
147
|
}
|
|
148
|
+
bindTools(tools, kwargs) {
|
|
149
|
+
return this.bind({ tools: convertToGeminiTools(tools), ...kwargs });
|
|
150
|
+
}
|
|
131
151
|
// Replace
|
|
132
152
|
_llmType() {
|
|
133
153
|
return "chat_integration";
|
|
134
154
|
}
|
|
155
|
+
/**
|
|
156
|
+
* Get the parameters used to invoke the model
|
|
157
|
+
*/
|
|
158
|
+
invocationParams(options) {
|
|
159
|
+
return copyAIModelParams(this, options);
|
|
160
|
+
}
|
|
135
161
|
async _generate(messages, options, _runManager) {
|
|
136
|
-
const parameters =
|
|
162
|
+
const parameters = this.invocationParams(options);
|
|
137
163
|
const response = await this.connection.request(messages, parameters, options);
|
|
138
164
|
const ret = safeResponseToChatResult(response, this.safetyHandler);
|
|
139
165
|
return ret;
|
|
140
166
|
}
|
|
141
167
|
async *_streamResponseChunks(_messages, options, _runManager) {
|
|
142
168
|
// Make the call as a streaming request
|
|
143
|
-
const parameters =
|
|
169
|
+
const parameters = this.invocationParams(options);
|
|
144
170
|
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
145
171
|
// Get the streaming parser of the response
|
|
146
172
|
const stream = response.data;
|
package/dist/connection.cjs
CHANGED
|
@@ -124,7 +124,6 @@ exports.GoogleHostConnection = GoogleHostConnection;
|
|
|
124
124
|
class GoogleAIConnection extends GoogleHostConnection {
|
|
125
125
|
constructor(fields, caller, client, streaming) {
|
|
126
126
|
super(fields, caller, client, streaming);
|
|
127
|
-
/** @deprecated Prefer `modelName` */
|
|
128
127
|
Object.defineProperty(this, "model", {
|
|
129
128
|
enumerable: true,
|
|
130
129
|
configurable: true,
|
|
@@ -144,10 +143,11 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
144
143
|
value: void 0
|
|
145
144
|
});
|
|
146
145
|
this.client = client;
|
|
147
|
-
this.modelName = fields?.
|
|
146
|
+
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
147
|
+
this.model = this.modelName;
|
|
148
148
|
}
|
|
149
149
|
get modelFamily() {
|
|
150
|
-
if (this.
|
|
150
|
+
if (this.model.startsWith("gemini")) {
|
|
151
151
|
return "gemini";
|
|
152
152
|
}
|
|
153
153
|
else {
|
|
@@ -164,13 +164,13 @@ class GoogleAIConnection extends GoogleHostConnection {
|
|
|
164
164
|
}
|
|
165
165
|
async buildUrlGenerativeLanguage() {
|
|
166
166
|
const method = await this.buildUrlMethod();
|
|
167
|
-
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.
|
|
167
|
+
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.model}:${method}`;
|
|
168
168
|
return url;
|
|
169
169
|
}
|
|
170
170
|
async buildUrlVertex() {
|
|
171
171
|
const projectId = await this.client.getProjectId();
|
|
172
172
|
const method = await this.buildUrlMethod();
|
|
173
|
-
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.
|
|
173
|
+
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.model}:${method}`;
|
|
174
174
|
return url;
|
|
175
175
|
}
|
|
176
176
|
async buildUrl() {
|
package/dist/connection.d.ts
CHANGED
|
@@ -26,7 +26,6 @@ export declare abstract class GoogleHostConnection<CallOptions extends AsyncCall
|
|
|
26
26
|
buildMethod(): GoogleAbstractedClientOpsMethod;
|
|
27
27
|
}
|
|
28
28
|
export declare abstract class GoogleAIConnection<CallOptions extends BaseLanguageModelCallOptions, MessageType, AuthOptions> extends GoogleHostConnection<CallOptions, GoogleLLMResponse, AuthOptions> implements GoogleAIBaseLLMInput<AuthOptions> {
|
|
29
|
-
/** @deprecated Prefer `modelName` */
|
|
30
29
|
model: string;
|
|
31
30
|
modelName: string;
|
|
32
31
|
client: GoogleAbstractedClient;
|
package/dist/connection.js
CHANGED
|
@@ -119,7 +119,6 @@ export class GoogleHostConnection extends GoogleConnection {
|
|
|
119
119
|
export class GoogleAIConnection extends GoogleHostConnection {
|
|
120
120
|
constructor(fields, caller, client, streaming) {
|
|
121
121
|
super(fields, caller, client, streaming);
|
|
122
|
-
/** @deprecated Prefer `modelName` */
|
|
123
122
|
Object.defineProperty(this, "model", {
|
|
124
123
|
enumerable: true,
|
|
125
124
|
configurable: true,
|
|
@@ -139,10 +138,11 @@ export class GoogleAIConnection extends GoogleHostConnection {
|
|
|
139
138
|
value: void 0
|
|
140
139
|
});
|
|
141
140
|
this.client = client;
|
|
142
|
-
this.modelName = fields?.
|
|
141
|
+
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
142
|
+
this.model = this.modelName;
|
|
143
143
|
}
|
|
144
144
|
get modelFamily() {
|
|
145
|
-
if (this.
|
|
145
|
+
if (this.model.startsWith("gemini")) {
|
|
146
146
|
return "gemini";
|
|
147
147
|
}
|
|
148
148
|
else {
|
|
@@ -159,13 +159,13 @@ export class GoogleAIConnection extends GoogleHostConnection {
|
|
|
159
159
|
}
|
|
160
160
|
async buildUrlGenerativeLanguage() {
|
|
161
161
|
const method = await this.buildUrlMethod();
|
|
162
|
-
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.
|
|
162
|
+
const url = `https://generativelanguage.googleapis.com/${this.apiVersion}/models/${this.model}:${method}`;
|
|
163
163
|
return url;
|
|
164
164
|
}
|
|
165
165
|
async buildUrlVertex() {
|
|
166
166
|
const projectId = await this.client.getProjectId();
|
|
167
167
|
const method = await this.buildUrlMethod();
|
|
168
|
-
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.
|
|
168
|
+
const url = `https://${this.endpoint}/${this.apiVersion}/projects/${projectId}/locations/${this.location}/publishers/google/models/${this.model}:${method}`;
|
|
169
169
|
return url;
|
|
170
170
|
}
|
|
171
171
|
async buildUrl() {
|
package/dist/llms.cjs
CHANGED
|
@@ -59,6 +59,12 @@ class GoogleBaseLLM extends llms_1.LLM {
|
|
|
59
59
|
writable: true,
|
|
60
60
|
value: "gemini-pro"
|
|
61
61
|
});
|
|
62
|
+
Object.defineProperty(this, "model", {
|
|
63
|
+
enumerable: true,
|
|
64
|
+
configurable: true,
|
|
65
|
+
writable: true,
|
|
66
|
+
value: "gemini-pro"
|
|
67
|
+
});
|
|
62
68
|
Object.defineProperty(this, "temperature", {
|
|
63
69
|
enumerable: true,
|
|
64
70
|
configurable: true,
|
package/dist/llms.d.ts
CHANGED
|
@@ -19,6 +19,7 @@ export declare abstract class GoogleBaseLLM<AuthOptions> extends LLM<BaseLanguag
|
|
|
19
19
|
originalFields?: GoogleBaseLLMInput<AuthOptions>;
|
|
20
20
|
lc_serializable: boolean;
|
|
21
21
|
modelName: string;
|
|
22
|
+
model: string;
|
|
22
23
|
temperature: number;
|
|
23
24
|
maxOutputTokens: number;
|
|
24
25
|
topP: number;
|
package/dist/llms.js
CHANGED
|
@@ -56,6 +56,12 @@ export class GoogleBaseLLM extends LLM {
|
|
|
56
56
|
writable: true,
|
|
57
57
|
value: "gemini-pro"
|
|
58
58
|
});
|
|
59
|
+
Object.defineProperty(this, "model", {
|
|
60
|
+
enumerable: true,
|
|
61
|
+
configurable: true,
|
|
62
|
+
writable: true,
|
|
63
|
+
value: "gemini-pro"
|
|
64
|
+
});
|
|
59
65
|
Object.defineProperty(this, "temperature", {
|
|
60
66
|
enumerable: true,
|
|
61
67
|
configurable: true,
|
package/dist/types.d.ts
CHANGED
|
@@ -38,9 +38,12 @@ export interface GoogleAISafetySetting {
|
|
|
38
38
|
threshold: string;
|
|
39
39
|
}
|
|
40
40
|
export interface GoogleAIModelParams {
|
|
41
|
-
/** @deprecated Prefer `modelName` */
|
|
42
|
-
model?: string;
|
|
43
41
|
/** Model to use */
|
|
42
|
+
model?: string;
|
|
43
|
+
/**
|
|
44
|
+
* Model to use
|
|
45
|
+
* Alias for `model`
|
|
46
|
+
*/
|
|
44
47
|
modelName?: string;
|
|
45
48
|
/** Sampling temperature to use */
|
|
46
49
|
temperature?: number;
|
package/dist/utils/common.cjs
CHANGED
|
@@ -8,7 +8,10 @@ function copyAIModelParams(params, options) {
|
|
|
8
8
|
exports.copyAIModelParams = copyAIModelParams;
|
|
9
9
|
function copyAIModelParamsInto(params, options, target) {
|
|
10
10
|
const ret = target || {};
|
|
11
|
-
|
|
11
|
+
const model = options?.model ?? params?.model ?? target.model;
|
|
12
|
+
ret.modelName =
|
|
13
|
+
model ?? options?.modelName ?? params?.modelName ?? target.modelName;
|
|
14
|
+
ret.model = model;
|
|
12
15
|
ret.temperature =
|
|
13
16
|
options?.temperature ?? params?.temperature ?? target.temperature;
|
|
14
17
|
ret.maxOutputTokens =
|
|
@@ -74,6 +77,7 @@ function copyAIModelParamsInto(params, options, target) {
|
|
|
74
77
|
throw new Error(`Cannot mix structured tools with Gemini tools.\nReceived ${structuredOutputTools.length} structured tools and ${geminiTools.length} Gemini tools.`);
|
|
75
78
|
}
|
|
76
79
|
ret.tools = geminiTools ?? structuredOutputTools;
|
|
80
|
+
console.log(ret);
|
|
77
81
|
return ret;
|
|
78
82
|
}
|
|
79
83
|
exports.copyAIModelParamsInto = copyAIModelParamsInto;
|
|
@@ -91,7 +95,8 @@ function modelToFamily(modelName) {
|
|
|
91
95
|
exports.modelToFamily = modelToFamily;
|
|
92
96
|
function validateModelParams(params) {
|
|
93
97
|
const testParams = params ?? {};
|
|
94
|
-
|
|
98
|
+
const model = testParams.model ?? testParams.modelName;
|
|
99
|
+
switch (modelToFamily(model)) {
|
|
95
100
|
case "gemini":
|
|
96
101
|
return (0, gemini_js_1.validateGeminiParams)(testParams);
|
|
97
102
|
default:
|
package/dist/utils/common.js
CHANGED
|
@@ -4,7 +4,10 @@ export function copyAIModelParams(params, options) {
|
|
|
4
4
|
}
|
|
5
5
|
export function copyAIModelParamsInto(params, options, target) {
|
|
6
6
|
const ret = target || {};
|
|
7
|
-
|
|
7
|
+
const model = options?.model ?? params?.model ?? target.model;
|
|
8
|
+
ret.modelName =
|
|
9
|
+
model ?? options?.modelName ?? params?.modelName ?? target.modelName;
|
|
10
|
+
ret.model = model;
|
|
8
11
|
ret.temperature =
|
|
9
12
|
options?.temperature ?? params?.temperature ?? target.temperature;
|
|
10
13
|
ret.maxOutputTokens =
|
|
@@ -70,6 +73,7 @@ export function copyAIModelParamsInto(params, options, target) {
|
|
|
70
73
|
throw new Error(`Cannot mix structured tools with Gemini tools.\nReceived ${structuredOutputTools.length} structured tools and ${geminiTools.length} Gemini tools.`);
|
|
71
74
|
}
|
|
72
75
|
ret.tools = geminiTools ?? structuredOutputTools;
|
|
76
|
+
console.log(ret);
|
|
73
77
|
return ret;
|
|
74
78
|
}
|
|
75
79
|
export function modelToFamily(modelName) {
|
|
@@ -85,7 +89,8 @@ export function modelToFamily(modelName) {
|
|
|
85
89
|
}
|
|
86
90
|
export function validateModelParams(params) {
|
|
87
91
|
const testParams = params ?? {};
|
|
88
|
-
|
|
92
|
+
const model = testParams.model ?? testParams.modelName;
|
|
93
|
+
switch (modelToFamily(model)) {
|
|
89
94
|
case "gemini":
|
|
90
95
|
return validateGeminiParams(testParams);
|
|
91
96
|
default:
|
package/dist/utils/gemini.cjs
CHANGED
|
@@ -1,9 +1,19 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = exports.isModelGemini = exports.validateGeminiParams = exports.safeResponseToChatResult = exports.responseToChatResult = exports.safeResponseToBaseMessage = exports.responseToBaseMessage = exports.partsToBaseMessageFields = exports.responseToBaseMessageFields = exports.responseToChatGenerations = exports.partToChatGeneration = exports.
|
|
3
|
+
exports.MessageGeminiSafetyHandler = exports.DefaultGeminiSafetyHandler = exports.isModelGemini = exports.validateGeminiParams = exports.safeResponseToChatResult = exports.responseToChatResult = exports.safeResponseToBaseMessage = exports.responseToBaseMessage = exports.partsToBaseMessageFields = exports.responseToBaseMessageFields = exports.responseToChatGenerations = exports.partToChatGeneration = exports.partToMessageChunk = exports.chunkToString = exports.safeResponseToChatGeneration = exports.responseToChatGeneration = exports.safeResponseToGeneration = exports.responseToGeneration = exports.safeResponseToString = exports.responseToString = exports.partToText = exports.responseToParts = exports.responseToGenerateContentResponseData = exports.toolsRawToTools = exports.partsToToolsRaw = exports.partsToMessageContent = exports.baseMessageToContent = exports.messageContentToParts = void 0;
|
|
4
|
+
const uuid_1 = require("uuid");
|
|
4
5
|
const messages_1 = require("@langchain/core/messages");
|
|
5
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
6
7
|
const safety_js_1 = require("./safety.cjs");
|
|
8
|
+
const extractMimeType = (str) => {
|
|
9
|
+
if (str.startsWith("data:")) {
|
|
10
|
+
return {
|
|
11
|
+
mimeType: str.split(":")[1].split(";")[0],
|
|
12
|
+
data: str.split(",")[1],
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
return null;
|
|
16
|
+
};
|
|
7
17
|
function messageContentText(content) {
|
|
8
18
|
if (content?.text && content?.text.length > 0) {
|
|
9
19
|
return {
|
|
@@ -21,12 +31,10 @@ function messageContentImageUrl(content) {
|
|
|
21
31
|
if (!url) {
|
|
22
32
|
throw new Error("Missing Image URL");
|
|
23
33
|
}
|
|
24
|
-
|
|
34
|
+
const mineTypeAndData = extractMimeType(url);
|
|
35
|
+
if (mineTypeAndData) {
|
|
25
36
|
return {
|
|
26
|
-
inlineData:
|
|
27
|
-
mimeType: url.split(":")[1].split(";")[0],
|
|
28
|
-
data: url.split(",")[1],
|
|
29
|
-
},
|
|
37
|
+
inlineData: mineTypeAndData,
|
|
30
38
|
};
|
|
31
39
|
}
|
|
32
40
|
else {
|
|
@@ -39,6 +47,27 @@ function messageContentImageUrl(content) {
|
|
|
39
47
|
};
|
|
40
48
|
}
|
|
41
49
|
}
|
|
50
|
+
function messageContentMedia(
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
52
|
+
content) {
|
|
53
|
+
if ("mimeType" in content && "data" in content) {
|
|
54
|
+
return {
|
|
55
|
+
inlineData: {
|
|
56
|
+
mimeType: content.mimeType,
|
|
57
|
+
data: content.data,
|
|
58
|
+
},
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
else if ("mimeType" in content && "fileUri" in content) {
|
|
62
|
+
return {
|
|
63
|
+
fileData: {
|
|
64
|
+
mimeType: content.mimeType,
|
|
65
|
+
fileUri: content.fileUri,
|
|
66
|
+
},
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
throw new Error("Invalid media content");
|
|
70
|
+
}
|
|
42
71
|
function messageContentToParts(content) {
|
|
43
72
|
// Convert a string to a text type MessageContent if needed
|
|
44
73
|
const messageContent = typeof content === "string"
|
|
@@ -64,6 +93,8 @@ function messageContentToParts(content) {
|
|
|
64
93
|
return messageContentImageUrl(content);
|
|
65
94
|
}
|
|
66
95
|
break;
|
|
96
|
+
case "media":
|
|
97
|
+
return messageContentMedia(content);
|
|
67
98
|
default:
|
|
68
99
|
throw new Error(`Unsupported type received while converting message to message parts`);
|
|
69
100
|
}
|
|
@@ -107,7 +138,18 @@ function messageKwargsToParts(kwargs) {
|
|
|
107
138
|
}
|
|
108
139
|
function roleMessageToContent(role, message) {
|
|
109
140
|
const contentParts = messageContentToParts(message.content);
|
|
110
|
-
|
|
141
|
+
let toolParts;
|
|
142
|
+
if ((0, messages_1.isAIMessage)(message) && !!message.tool_calls?.length) {
|
|
143
|
+
toolParts = message.tool_calls.map((toolCall) => ({
|
|
144
|
+
functionCall: {
|
|
145
|
+
name: toolCall.name,
|
|
146
|
+
args: toolCall.args,
|
|
147
|
+
},
|
|
148
|
+
}));
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
toolParts = messageKwargsToParts(message.additional_kwargs);
|
|
152
|
+
}
|
|
111
153
|
const parts = [...contentParts, ...toolParts];
|
|
112
154
|
return [
|
|
113
155
|
{
|
|
@@ -122,7 +164,7 @@ function systemMessageToContent(message) {
|
|
|
122
164
|
...roleMessageToContent("model", new messages_1.AIMessage("Ok")),
|
|
123
165
|
];
|
|
124
166
|
}
|
|
125
|
-
function toolMessageToContent(message) {
|
|
167
|
+
function toolMessageToContent(message, prevMessage) {
|
|
126
168
|
const contentStr = typeof message.content === "string"
|
|
127
169
|
? message.content
|
|
128
170
|
: message.content.reduce((acc, content) => {
|
|
@@ -133,6 +175,11 @@ function toolMessageToContent(message) {
|
|
|
133
175
|
return acc;
|
|
134
176
|
}
|
|
135
177
|
}, "");
|
|
178
|
+
// Hacky :(
|
|
179
|
+
const responseName = ((0, messages_1.isAIMessage)(prevMessage) && !!prevMessage.tool_calls?.length
|
|
180
|
+
? prevMessage.tool_calls[0].name
|
|
181
|
+
: prevMessage.name) ?? message.tool_call_id;
|
|
182
|
+
console.log(contentStr);
|
|
136
183
|
try {
|
|
137
184
|
const content = JSON.parse(contentStr);
|
|
138
185
|
return [
|
|
@@ -141,8 +188,8 @@ function toolMessageToContent(message) {
|
|
|
141
188
|
parts: [
|
|
142
189
|
{
|
|
143
190
|
functionResponse: {
|
|
144
|
-
name:
|
|
145
|
-
response: content,
|
|
191
|
+
name: responseName,
|
|
192
|
+
response: { content },
|
|
146
193
|
},
|
|
147
194
|
},
|
|
148
195
|
],
|
|
@@ -156,10 +203,8 @@ function toolMessageToContent(message) {
|
|
|
156
203
|
parts: [
|
|
157
204
|
{
|
|
158
205
|
functionResponse: {
|
|
159
|
-
name:
|
|
160
|
-
response: {
|
|
161
|
-
response: contentStr,
|
|
162
|
-
},
|
|
206
|
+
name: responseName,
|
|
207
|
+
response: { content: contentStr },
|
|
163
208
|
},
|
|
164
209
|
},
|
|
165
210
|
],
|
|
@@ -167,7 +212,7 @@ function toolMessageToContent(message) {
|
|
|
167
212
|
];
|
|
168
213
|
}
|
|
169
214
|
}
|
|
170
|
-
function baseMessageToContent(message) {
|
|
215
|
+
function baseMessageToContent(message, prevMessage) {
|
|
171
216
|
const type = message._getType();
|
|
172
217
|
switch (type) {
|
|
173
218
|
case "system":
|
|
@@ -177,7 +222,10 @@ function baseMessageToContent(message) {
|
|
|
177
222
|
case "ai":
|
|
178
223
|
return roleMessageToContent("model", message);
|
|
179
224
|
case "tool":
|
|
180
|
-
|
|
225
|
+
if (!prevMessage) {
|
|
226
|
+
throw new Error("Tool messages cannot be the first message passed to the model.");
|
|
227
|
+
}
|
|
228
|
+
return toolMessageToContent(message, prevMessage);
|
|
181
229
|
default:
|
|
182
230
|
console.log(`Unsupported message type: ${type}`);
|
|
183
231
|
return [];
|
|
@@ -241,7 +289,7 @@ function toolRawToTool(raw) {
|
|
|
241
289
|
}
|
|
242
290
|
function functionCallPartToToolRaw(part) {
|
|
243
291
|
return {
|
|
244
|
-
id:
|
|
292
|
+
id: (0, uuid_1.v4)().replace(/-/g, ""),
|
|
245
293
|
type: "function",
|
|
246
294
|
function: {
|
|
247
295
|
name: part.functionCall.name,
|
|
@@ -346,7 +394,7 @@ exports.safeResponseToGeneration = safeResponseToGeneration;
|
|
|
346
394
|
function responseToChatGeneration(response) {
|
|
347
395
|
return new outputs_1.ChatGenerationChunk({
|
|
348
396
|
text: responseToString(response),
|
|
349
|
-
message:
|
|
397
|
+
message: partToMessageChunk(responseToParts(response)[0]),
|
|
350
398
|
generationInfo: response,
|
|
351
399
|
});
|
|
352
400
|
}
|
|
@@ -373,7 +421,7 @@ function chunkToString(chunk) {
|
|
|
373
421
|
}
|
|
374
422
|
}
|
|
375
423
|
exports.chunkToString = chunkToString;
|
|
376
|
-
function
|
|
424
|
+
function partToMessageChunk(part) {
|
|
377
425
|
const fields = partsToBaseMessageFields([part]);
|
|
378
426
|
if (typeof fields.content === "string") {
|
|
379
427
|
return new messages_1.AIMessageChunk(fields);
|
|
@@ -389,9 +437,9 @@ function partToMessage(part) {
|
|
|
389
437
|
}
|
|
390
438
|
return new messages_1.AIMessageChunk(fields);
|
|
391
439
|
}
|
|
392
|
-
exports.
|
|
440
|
+
exports.partToMessageChunk = partToMessageChunk;
|
|
393
441
|
function partToChatGeneration(part) {
|
|
394
|
-
const message =
|
|
442
|
+
const message = partToMessageChunk(part);
|
|
395
443
|
const text = partToText(part);
|
|
396
444
|
return new outputs_1.ChatGenerationChunk({
|
|
397
445
|
text,
|
|
@@ -405,11 +453,18 @@ function responseToChatGenerations(response) {
|
|
|
405
453
|
if (ret.every((item) => typeof item.message.content === "string")) {
|
|
406
454
|
const combinedContent = ret.map((item) => item.message.content).join("");
|
|
407
455
|
const combinedText = ret.map((item) => item.text).join("");
|
|
456
|
+
const toolCallChunks = ret[ret.length - 1].message.additional_kwargs?.tool_calls?.map((toolCall, i) => ({
|
|
457
|
+
name: toolCall.function.name,
|
|
458
|
+
args: toolCall.function.arguments,
|
|
459
|
+
id: toolCall.id,
|
|
460
|
+
index: i,
|
|
461
|
+
}));
|
|
408
462
|
ret = [
|
|
409
463
|
new outputs_1.ChatGenerationChunk({
|
|
410
464
|
message: new messages_1.AIMessageChunk({
|
|
411
465
|
content: combinedContent,
|
|
412
466
|
additional_kwargs: ret[ret.length - 1].message.additional_kwargs,
|
|
467
|
+
tool_call_chunks: toolCallChunks,
|
|
413
468
|
}),
|
|
414
469
|
text: combinedText,
|
|
415
470
|
generationInfo: ret[ret.length - 1].generationInfo,
|
|
@@ -427,10 +482,30 @@ exports.responseToBaseMessageFields = responseToBaseMessageFields;
|
|
|
427
482
|
function partsToBaseMessageFields(parts) {
|
|
428
483
|
const fields = {
|
|
429
484
|
content: partsToMessageContent(parts),
|
|
485
|
+
tool_calls: [],
|
|
486
|
+
invalid_tool_calls: [],
|
|
430
487
|
};
|
|
431
488
|
const rawTools = partsToToolsRaw(parts);
|
|
432
489
|
if (rawTools.length > 0) {
|
|
433
490
|
const tools = toolsRawToTools(rawTools);
|
|
491
|
+
for (const tool of tools) {
|
|
492
|
+
try {
|
|
493
|
+
fields.tool_calls?.push({
|
|
494
|
+
name: tool.function.name,
|
|
495
|
+
args: JSON.parse(tool.function.arguments),
|
|
496
|
+
id: tool.id,
|
|
497
|
+
});
|
|
498
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
499
|
+
}
|
|
500
|
+
catch (e) {
|
|
501
|
+
fields.invalid_tool_calls?.push({
|
|
502
|
+
name: tool.function.name,
|
|
503
|
+
args: JSON.parse(tool.function.arguments),
|
|
504
|
+
id: tool.id,
|
|
505
|
+
error: e.message,
|
|
506
|
+
});
|
|
507
|
+
}
|
|
508
|
+
}
|
|
434
509
|
fields.additional_kwargs = {
|
|
435
510
|
tool_calls: tools,
|
|
436
511
|
};
|
package/dist/utils/gemini.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { BaseMessage, BaseMessageChunk, BaseMessageFields, MessageContent } from "@langchain/core/messages";
|
|
1
|
+
import { AIMessageFields, BaseMessage, BaseMessageChunk, BaseMessageFields, MessageContent } from "@langchain/core/messages";
|
|
2
2
|
import { ChatGeneration, ChatGenerationChunk, ChatResult, Generation } from "@langchain/core/outputs";
|
|
3
3
|
import type { GoogleLLMResponse, GoogleAIModelParams, GeminiPart, GeminiContent, GenerateContentResponseData, GoogleAISafetyHandler } from "../types.js";
|
|
4
4
|
export declare function messageContentToParts(content: MessageContent): GeminiPart[];
|
|
5
|
-
export declare function baseMessageToContent(message: BaseMessage): GeminiContent[];
|
|
5
|
+
export declare function baseMessageToContent(message: BaseMessage, prevMessage?: BaseMessage): GeminiContent[];
|
|
6
6
|
export declare function partsToMessageContent(parts: GeminiPart[]): MessageContent;
|
|
7
7
|
interface FunctionCall {
|
|
8
8
|
name: string;
|
|
@@ -34,11 +34,11 @@ export declare function safeResponseToGeneration(response: GoogleLLMResponse, sa
|
|
|
34
34
|
export declare function responseToChatGeneration(response: GoogleLLMResponse): ChatGenerationChunk;
|
|
35
35
|
export declare function safeResponseToChatGeneration(response: GoogleLLMResponse, safetyHandler: GoogleAISafetyHandler): ChatGenerationChunk;
|
|
36
36
|
export declare function chunkToString(chunk: BaseMessageChunk): string;
|
|
37
|
-
export declare function
|
|
37
|
+
export declare function partToMessageChunk(part: GeminiPart): BaseMessageChunk;
|
|
38
38
|
export declare function partToChatGeneration(part: GeminiPart): ChatGeneration;
|
|
39
39
|
export declare function responseToChatGenerations(response: GoogleLLMResponse): ChatGeneration[];
|
|
40
40
|
export declare function responseToBaseMessageFields(response: GoogleLLMResponse): BaseMessageFields;
|
|
41
|
-
export declare function partsToBaseMessageFields(parts: GeminiPart[]):
|
|
41
|
+
export declare function partsToBaseMessageFields(parts: GeminiPart[]): AIMessageFields;
|
|
42
42
|
export declare function responseToBaseMessage(response: GoogleLLMResponse): BaseMessage;
|
|
43
43
|
export declare function safeResponseToBaseMessage(response: GoogleLLMResponse, safetyHandler: GoogleAISafetyHandler): BaseMessage;
|
|
44
44
|
export declare function responseToChatResult(response: GoogleLLMResponse): ChatResult;
|
package/dist/utils/gemini.js
CHANGED
|
@@ -1,6 +1,16 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { v4 as uuidv4 } from "uuid";
|
|
2
|
+
import { AIMessage, AIMessageChunk, isAIMessage, } from "@langchain/core/messages";
|
|
2
3
|
import { ChatGenerationChunk, } from "@langchain/core/outputs";
|
|
3
4
|
import { GoogleAISafetyError } from "./safety.js";
|
|
5
|
+
const extractMimeType = (str) => {
|
|
6
|
+
if (str.startsWith("data:")) {
|
|
7
|
+
return {
|
|
8
|
+
mimeType: str.split(":")[1].split(";")[0],
|
|
9
|
+
data: str.split(",")[1],
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
return null;
|
|
13
|
+
};
|
|
4
14
|
function messageContentText(content) {
|
|
5
15
|
if (content?.text && content?.text.length > 0) {
|
|
6
16
|
return {
|
|
@@ -18,12 +28,10 @@ function messageContentImageUrl(content) {
|
|
|
18
28
|
if (!url) {
|
|
19
29
|
throw new Error("Missing Image URL");
|
|
20
30
|
}
|
|
21
|
-
|
|
31
|
+
const mineTypeAndData = extractMimeType(url);
|
|
32
|
+
if (mineTypeAndData) {
|
|
22
33
|
return {
|
|
23
|
-
inlineData:
|
|
24
|
-
mimeType: url.split(":")[1].split(";")[0],
|
|
25
|
-
data: url.split(",")[1],
|
|
26
|
-
},
|
|
34
|
+
inlineData: mineTypeAndData,
|
|
27
35
|
};
|
|
28
36
|
}
|
|
29
37
|
else {
|
|
@@ -36,6 +44,27 @@ function messageContentImageUrl(content) {
|
|
|
36
44
|
};
|
|
37
45
|
}
|
|
38
46
|
}
|
|
47
|
+
function messageContentMedia(
|
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
49
|
+
content) {
|
|
50
|
+
if ("mimeType" in content && "data" in content) {
|
|
51
|
+
return {
|
|
52
|
+
inlineData: {
|
|
53
|
+
mimeType: content.mimeType,
|
|
54
|
+
data: content.data,
|
|
55
|
+
},
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
else if ("mimeType" in content && "fileUri" in content) {
|
|
59
|
+
return {
|
|
60
|
+
fileData: {
|
|
61
|
+
mimeType: content.mimeType,
|
|
62
|
+
fileUri: content.fileUri,
|
|
63
|
+
},
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
throw new Error("Invalid media content");
|
|
67
|
+
}
|
|
39
68
|
export function messageContentToParts(content) {
|
|
40
69
|
// Convert a string to a text type MessageContent if needed
|
|
41
70
|
const messageContent = typeof content === "string"
|
|
@@ -61,6 +90,8 @@ export function messageContentToParts(content) {
|
|
|
61
90
|
return messageContentImageUrl(content);
|
|
62
91
|
}
|
|
63
92
|
break;
|
|
93
|
+
case "media":
|
|
94
|
+
return messageContentMedia(content);
|
|
64
95
|
default:
|
|
65
96
|
throw new Error(`Unsupported type received while converting message to message parts`);
|
|
66
97
|
}
|
|
@@ -103,7 +134,18 @@ function messageKwargsToParts(kwargs) {
|
|
|
103
134
|
}
|
|
104
135
|
function roleMessageToContent(role, message) {
|
|
105
136
|
const contentParts = messageContentToParts(message.content);
|
|
106
|
-
|
|
137
|
+
let toolParts;
|
|
138
|
+
if (isAIMessage(message) && !!message.tool_calls?.length) {
|
|
139
|
+
toolParts = message.tool_calls.map((toolCall) => ({
|
|
140
|
+
functionCall: {
|
|
141
|
+
name: toolCall.name,
|
|
142
|
+
args: toolCall.args,
|
|
143
|
+
},
|
|
144
|
+
}));
|
|
145
|
+
}
|
|
146
|
+
else {
|
|
147
|
+
toolParts = messageKwargsToParts(message.additional_kwargs);
|
|
148
|
+
}
|
|
107
149
|
const parts = [...contentParts, ...toolParts];
|
|
108
150
|
return [
|
|
109
151
|
{
|
|
@@ -118,7 +160,7 @@ function systemMessageToContent(message) {
|
|
|
118
160
|
...roleMessageToContent("model", new AIMessage("Ok")),
|
|
119
161
|
];
|
|
120
162
|
}
|
|
121
|
-
function toolMessageToContent(message) {
|
|
163
|
+
function toolMessageToContent(message, prevMessage) {
|
|
122
164
|
const contentStr = typeof message.content === "string"
|
|
123
165
|
? message.content
|
|
124
166
|
: message.content.reduce((acc, content) => {
|
|
@@ -129,6 +171,11 @@ function toolMessageToContent(message) {
|
|
|
129
171
|
return acc;
|
|
130
172
|
}
|
|
131
173
|
}, "");
|
|
174
|
+
// Hacky :(
|
|
175
|
+
const responseName = (isAIMessage(prevMessage) && !!prevMessage.tool_calls?.length
|
|
176
|
+
? prevMessage.tool_calls[0].name
|
|
177
|
+
: prevMessage.name) ?? message.tool_call_id;
|
|
178
|
+
console.log(contentStr);
|
|
132
179
|
try {
|
|
133
180
|
const content = JSON.parse(contentStr);
|
|
134
181
|
return [
|
|
@@ -137,8 +184,8 @@ function toolMessageToContent(message) {
|
|
|
137
184
|
parts: [
|
|
138
185
|
{
|
|
139
186
|
functionResponse: {
|
|
140
|
-
name:
|
|
141
|
-
response: content,
|
|
187
|
+
name: responseName,
|
|
188
|
+
response: { content },
|
|
142
189
|
},
|
|
143
190
|
},
|
|
144
191
|
],
|
|
@@ -152,10 +199,8 @@ function toolMessageToContent(message) {
|
|
|
152
199
|
parts: [
|
|
153
200
|
{
|
|
154
201
|
functionResponse: {
|
|
155
|
-
name:
|
|
156
|
-
response: {
|
|
157
|
-
response: contentStr,
|
|
158
|
-
},
|
|
202
|
+
name: responseName,
|
|
203
|
+
response: { content: contentStr },
|
|
159
204
|
},
|
|
160
205
|
},
|
|
161
206
|
],
|
|
@@ -163,7 +208,7 @@ function toolMessageToContent(message) {
|
|
|
163
208
|
];
|
|
164
209
|
}
|
|
165
210
|
}
|
|
166
|
-
export function baseMessageToContent(message) {
|
|
211
|
+
export function baseMessageToContent(message, prevMessage) {
|
|
167
212
|
const type = message._getType();
|
|
168
213
|
switch (type) {
|
|
169
214
|
case "system":
|
|
@@ -173,7 +218,10 @@ export function baseMessageToContent(message) {
|
|
|
173
218
|
case "ai":
|
|
174
219
|
return roleMessageToContent("model", message);
|
|
175
220
|
case "tool":
|
|
176
|
-
|
|
221
|
+
if (!prevMessage) {
|
|
222
|
+
throw new Error("Tool messages cannot be the first message passed to the model.");
|
|
223
|
+
}
|
|
224
|
+
return toolMessageToContent(message, prevMessage);
|
|
177
225
|
default:
|
|
178
226
|
console.log(`Unsupported message type: ${type}`);
|
|
179
227
|
return [];
|
|
@@ -235,7 +283,7 @@ function toolRawToTool(raw) {
|
|
|
235
283
|
}
|
|
236
284
|
function functionCallPartToToolRaw(part) {
|
|
237
285
|
return {
|
|
238
|
-
id:
|
|
286
|
+
id: uuidv4().replace(/-/g, ""),
|
|
239
287
|
type: "function",
|
|
240
288
|
function: {
|
|
241
289
|
name: part.functionCall.name,
|
|
@@ -331,7 +379,7 @@ export function safeResponseToGeneration(response, safetyHandler) {
|
|
|
331
379
|
export function responseToChatGeneration(response) {
|
|
332
380
|
return new ChatGenerationChunk({
|
|
333
381
|
text: responseToString(response),
|
|
334
|
-
message:
|
|
382
|
+
message: partToMessageChunk(responseToParts(response)[0]),
|
|
335
383
|
generationInfo: response,
|
|
336
384
|
});
|
|
337
385
|
}
|
|
@@ -355,7 +403,7 @@ export function chunkToString(chunk) {
|
|
|
355
403
|
throw new Error(`Unexpected chunk: ${chunk}`);
|
|
356
404
|
}
|
|
357
405
|
}
|
|
358
|
-
export function
|
|
406
|
+
export function partToMessageChunk(part) {
|
|
359
407
|
const fields = partsToBaseMessageFields([part]);
|
|
360
408
|
if (typeof fields.content === "string") {
|
|
361
409
|
return new AIMessageChunk(fields);
|
|
@@ -372,7 +420,7 @@ export function partToMessage(part) {
|
|
|
372
420
|
return new AIMessageChunk(fields);
|
|
373
421
|
}
|
|
374
422
|
export function partToChatGeneration(part) {
|
|
375
|
-
const message =
|
|
423
|
+
const message = partToMessageChunk(part);
|
|
376
424
|
const text = partToText(part);
|
|
377
425
|
return new ChatGenerationChunk({
|
|
378
426
|
text,
|
|
@@ -385,11 +433,18 @@ export function responseToChatGenerations(response) {
|
|
|
385
433
|
if (ret.every((item) => typeof item.message.content === "string")) {
|
|
386
434
|
const combinedContent = ret.map((item) => item.message.content).join("");
|
|
387
435
|
const combinedText = ret.map((item) => item.text).join("");
|
|
436
|
+
const toolCallChunks = ret[ret.length - 1].message.additional_kwargs?.tool_calls?.map((toolCall, i) => ({
|
|
437
|
+
name: toolCall.function.name,
|
|
438
|
+
args: toolCall.function.arguments,
|
|
439
|
+
id: toolCall.id,
|
|
440
|
+
index: i,
|
|
441
|
+
}));
|
|
388
442
|
ret = [
|
|
389
443
|
new ChatGenerationChunk({
|
|
390
444
|
message: new AIMessageChunk({
|
|
391
445
|
content: combinedContent,
|
|
392
446
|
additional_kwargs: ret[ret.length - 1].message.additional_kwargs,
|
|
447
|
+
tool_call_chunks: toolCallChunks,
|
|
393
448
|
}),
|
|
394
449
|
text: combinedText,
|
|
395
450
|
generationInfo: ret[ret.length - 1].generationInfo,
|
|
@@ -405,10 +460,30 @@ export function responseToBaseMessageFields(response) {
|
|
|
405
460
|
export function partsToBaseMessageFields(parts) {
|
|
406
461
|
const fields = {
|
|
407
462
|
content: partsToMessageContent(parts),
|
|
463
|
+
tool_calls: [],
|
|
464
|
+
invalid_tool_calls: [],
|
|
408
465
|
};
|
|
409
466
|
const rawTools = partsToToolsRaw(parts);
|
|
410
467
|
if (rawTools.length > 0) {
|
|
411
468
|
const tools = toolsRawToTools(rawTools);
|
|
469
|
+
for (const tool of tools) {
|
|
470
|
+
try {
|
|
471
|
+
fields.tool_calls?.push({
|
|
472
|
+
name: tool.function.name,
|
|
473
|
+
args: JSON.parse(tool.function.arguments),
|
|
474
|
+
id: tool.id,
|
|
475
|
+
});
|
|
476
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
477
|
+
}
|
|
478
|
+
catch (e) {
|
|
479
|
+
fields.invalid_tool_calls?.push({
|
|
480
|
+
name: tool.function.name,
|
|
481
|
+
args: JSON.parse(tool.function.arguments),
|
|
482
|
+
id: tool.id,
|
|
483
|
+
error: e.message,
|
|
484
|
+
});
|
|
485
|
+
}
|
|
486
|
+
}
|
|
412
487
|
fields.additional_kwargs = {
|
|
413
488
|
tool_calls: tools,
|
|
414
489
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/google-common",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.6",
|
|
4
4
|
"description": "Core types and classes for Google services.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -39,7 +39,8 @@
|
|
|
39
39
|
"author": "LangChain",
|
|
40
40
|
"license": "MIT",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@langchain/core": "~0.1.
|
|
42
|
+
"@langchain/core": "~0.1.56",
|
|
43
|
+
"uuid": "^9.0.0",
|
|
43
44
|
"zod-to-json-schema": "^3.22.4"
|
|
44
45
|
},
|
|
45
46
|
"devDependencies": {
|