@langchain/google-common 0.0.17 → 0.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +28 -0
- package/dist/chat_models.d.ts +5 -4
- package/dist/chat_models.js +29 -1
- package/dist/types.d.ts +6 -0
- package/dist/utils/gemini.cjs +10 -0
- package/dist/utils/gemini.js +10 -0
- package/dist/utils/zod_to_gemini_parameters.cjs +34 -23
- package/dist/utils/zod_to_gemini_parameters.d.ts +3 -1
- package/dist/utils/zod_to_gemini_parameters.js +31 -22
- package/package.json +2 -2
package/dist/chat_models.cjs
CHANGED
|
@@ -5,6 +5,7 @@ const env_1 = require("@langchain/core/utils/env");
|
|
|
5
5
|
const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
6
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
7
7
|
const messages_1 = require("@langchain/core/messages");
|
|
8
|
+
const base_1 = require("@langchain/core/language_models/base");
|
|
8
9
|
const runnables_1 = require("@langchain/core/runnables");
|
|
9
10
|
const openai_tools_1 = require("@langchain/core/output_parsers/openai_tools");
|
|
10
11
|
const function_calling_1 = require("@langchain/core/utils/function_calling");
|
|
@@ -96,6 +97,14 @@ function convertToGeminiTools(structuredTools) {
|
|
|
96
97
|
parameters: jsonSchema,
|
|
97
98
|
};
|
|
98
99
|
}
|
|
100
|
+
if ((0, base_1.isOpenAITool)(structuredTool)) {
|
|
101
|
+
return {
|
|
102
|
+
name: structuredTool.function.name,
|
|
103
|
+
description: structuredTool.function.description ??
|
|
104
|
+
`A function available to call.`,
|
|
105
|
+
parameters: (0, zod_to_gemini_parameters_js_1.jsonSchemaToGeminiParameters)(structuredTool.function.parameters),
|
|
106
|
+
};
|
|
107
|
+
}
|
|
99
108
|
return structuredTool;
|
|
100
109
|
}),
|
|
101
110
|
},
|
|
@@ -184,6 +193,12 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
184
193
|
writable: true,
|
|
185
194
|
value: void 0
|
|
186
195
|
});
|
|
196
|
+
Object.defineProperty(this, "streamUsage", {
|
|
197
|
+
enumerable: true,
|
|
198
|
+
configurable: true,
|
|
199
|
+
writable: true,
|
|
200
|
+
value: true
|
|
201
|
+
});
|
|
187
202
|
Object.defineProperty(this, "connection", {
|
|
188
203
|
enumerable: true,
|
|
189
204
|
configurable: true,
|
|
@@ -199,6 +214,7 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
199
214
|
(0, common_js_1.copyAndValidateModelParamsInto)(fields, this);
|
|
200
215
|
this.safetyHandler =
|
|
201
216
|
fields?.safetyHandler ?? new gemini_js_1.DefaultGeminiSafetyHandler();
|
|
217
|
+
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
|
|
202
218
|
const client = this.buildClient(fields);
|
|
203
219
|
this.buildConnection(fields ?? {}, client);
|
|
204
220
|
}
|
|
@@ -260,11 +276,22 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
260
276
|
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
261
277
|
// Get the streaming parser of the response
|
|
262
278
|
const stream = response.data;
|
|
279
|
+
let usageMetadata;
|
|
263
280
|
// Loop until the end of the stream
|
|
264
281
|
// During the loop, yield each time we get a chunk from the streaming parser
|
|
265
282
|
// that is either available or added to the queue
|
|
266
283
|
while (!stream.streamDone) {
|
|
267
284
|
const output = await stream.nextChunk();
|
|
285
|
+
if (output &&
|
|
286
|
+
output.usageMetadata &&
|
|
287
|
+
this.streamUsage !== false &&
|
|
288
|
+
options.streamUsage !== false) {
|
|
289
|
+
usageMetadata = {
|
|
290
|
+
input_tokens: output.usageMetadata.promptTokenCount,
|
|
291
|
+
output_tokens: output.usageMetadata.candidatesTokenCount,
|
|
292
|
+
total_tokens: output.usageMetadata.totalTokenCount,
|
|
293
|
+
};
|
|
294
|
+
}
|
|
268
295
|
const chunk = output !== null
|
|
269
296
|
? (0, gemini_js_1.safeResponseToChatGeneration)({ data: output }, this.safetyHandler)
|
|
270
297
|
: new outputs_1.ChatGenerationChunk({
|
|
@@ -272,6 +299,7 @@ class ChatGoogleBase extends chat_models_1.BaseChatModel {
|
|
|
272
299
|
generationInfo: { finishReason: "stop" },
|
|
273
300
|
message: new messages_1.AIMessageChunk({
|
|
274
301
|
content: "",
|
|
302
|
+
usage_metadata: usageMetadata,
|
|
275
303
|
}),
|
|
276
304
|
});
|
|
277
305
|
yield chunk;
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -3,7 +3,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
|
3
3
|
import { BaseChatModel, LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
4
4
|
import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
|
|
5
5
|
import { AIMessageChunk } from "@langchain/core/messages";
|
|
6
|
-
import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
|
|
6
|
+
import { BaseLanguageModelInput, StructuredOutputMethodOptions, ToolDefinition } from "@langchain/core/language_models/base";
|
|
7
7
|
import type { z } from "zod";
|
|
8
8
|
import { Runnable } from "@langchain/core/runnables";
|
|
9
9
|
import { AsyncCaller } from "@langchain/core/utils/async_caller";
|
|
@@ -23,7 +23,7 @@ declare class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<Ba
|
|
|
23
23
|
/**
|
|
24
24
|
* Input to chat model class.
|
|
25
25
|
*/
|
|
26
|
-
export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams {
|
|
26
|
+
export interface ChatGoogleBaseInput<AuthOptions> extends BaseChatModelParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams, Pick<GoogleAIBaseLanguageModelCallOptions, "streamUsage"> {
|
|
27
27
|
}
|
|
28
28
|
/**
|
|
29
29
|
* Integration with a chat model.
|
|
@@ -44,17 +44,18 @@ export declare abstract class ChatGoogleBase<AuthOptions> extends BaseChatModel<
|
|
|
44
44
|
safetySettings: GoogleAISafetySetting[];
|
|
45
45
|
convertSystemMessageToHumanContent: boolean | undefined;
|
|
46
46
|
safetyHandler: GoogleAISafetyHandler;
|
|
47
|
+
streamUsage: boolean;
|
|
47
48
|
protected connection: ChatConnection<AuthOptions>;
|
|
48
49
|
protected streamedConnection: ChatConnection<AuthOptions>;
|
|
49
50
|
constructor(fields?: ChatGoogleBaseInput<AuthOptions>);
|
|
50
|
-
|
|
51
|
+
getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
|
|
51
52
|
abstract buildAbstractedClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
|
|
52
53
|
buildApiKeyClient(apiKey: string): GoogleAbstractedClient;
|
|
53
54
|
buildApiKey(fields?: GoogleAIBaseLLMInput<AuthOptions>): string | undefined;
|
|
54
55
|
buildClient(fields?: GoogleAIBaseLLMInput<AuthOptions>): GoogleAbstractedClient;
|
|
55
56
|
buildConnection(fields: GoogleBaseLLMInput<AuthOptions>, client: GoogleAbstractedClient): void;
|
|
56
57
|
get platform(): GooglePlatformType;
|
|
57
|
-
bindTools(tools: (StructuredToolInterface | Record<string, unknown>)[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;
|
|
58
|
+
bindTools(tools: (StructuredToolInterface | Record<string, unknown> | ToolDefinition)[], kwargs?: Partial<GoogleAIBaseLanguageModelCallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, GoogleAIBaseLanguageModelCallOptions>;
|
|
58
59
|
_llmType(): string;
|
|
59
60
|
/**
|
|
60
61
|
* Get the parameters used to invoke the model
|
package/dist/chat_models.js
CHANGED
|
@@ -2,6 +2,7 @@ import { getEnvironmentVariable } from "@langchain/core/utils/env";
|
|
|
2
2
|
import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
|
|
3
3
|
import { ChatGenerationChunk } from "@langchain/core/outputs";
|
|
4
4
|
import { AIMessageChunk } from "@langchain/core/messages";
|
|
5
|
+
import { isOpenAITool, } from "@langchain/core/language_models/base";
|
|
5
6
|
import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables";
|
|
6
7
|
import { JsonOutputKeyToolsParser } from "@langchain/core/output_parsers/openai_tools";
|
|
7
8
|
import { isStructuredTool } from "@langchain/core/utils/function_calling";
|
|
@@ -10,7 +11,7 @@ import { AbstractGoogleLLMConnection } from "./connection.js";
|
|
|
10
11
|
import { baseMessageToContent, safeResponseToChatGeneration, safeResponseToChatResult, DefaultGeminiSafetyHandler, } from "./utils/gemini.js";
|
|
11
12
|
import { ApiKeyGoogleAuth } from "./auth.js";
|
|
12
13
|
import { ensureParams } from "./utils/failed_handler.js";
|
|
13
|
-
import { zodToGeminiParameters } from "./utils/zod_to_gemini_parameters.js";
|
|
14
|
+
import { jsonSchemaToGeminiParameters, zodToGeminiParameters, } from "./utils/zod_to_gemini_parameters.js";
|
|
14
15
|
class ChatConnection extends AbstractGoogleLLMConnection {
|
|
15
16
|
constructor(fields, caller, client, streaming) {
|
|
16
17
|
super(fields, caller, client, streaming);
|
|
@@ -93,6 +94,14 @@ function convertToGeminiTools(structuredTools) {
|
|
|
93
94
|
parameters: jsonSchema,
|
|
94
95
|
};
|
|
95
96
|
}
|
|
97
|
+
if (isOpenAITool(structuredTool)) {
|
|
98
|
+
return {
|
|
99
|
+
name: structuredTool.function.name,
|
|
100
|
+
description: structuredTool.function.description ??
|
|
101
|
+
`A function available to call.`,
|
|
102
|
+
parameters: jsonSchemaToGeminiParameters(structuredTool.function.parameters),
|
|
103
|
+
};
|
|
104
|
+
}
|
|
96
105
|
return structuredTool;
|
|
97
106
|
}),
|
|
98
107
|
},
|
|
@@ -181,6 +190,12 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
181
190
|
writable: true,
|
|
182
191
|
value: void 0
|
|
183
192
|
});
|
|
193
|
+
Object.defineProperty(this, "streamUsage", {
|
|
194
|
+
enumerable: true,
|
|
195
|
+
configurable: true,
|
|
196
|
+
writable: true,
|
|
197
|
+
value: true
|
|
198
|
+
});
|
|
184
199
|
Object.defineProperty(this, "connection", {
|
|
185
200
|
enumerable: true,
|
|
186
201
|
configurable: true,
|
|
@@ -196,6 +211,7 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
196
211
|
copyAndValidateModelParamsInto(fields, this);
|
|
197
212
|
this.safetyHandler =
|
|
198
213
|
fields?.safetyHandler ?? new DefaultGeminiSafetyHandler();
|
|
214
|
+
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
|
|
199
215
|
const client = this.buildClient(fields);
|
|
200
216
|
this.buildConnection(fields ?? {}, client);
|
|
201
217
|
}
|
|
@@ -257,11 +273,22 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
257
273
|
const response = await this.streamedConnection.request(_messages, parameters, options);
|
|
258
274
|
// Get the streaming parser of the response
|
|
259
275
|
const stream = response.data;
|
|
276
|
+
let usageMetadata;
|
|
260
277
|
// Loop until the end of the stream
|
|
261
278
|
// During the loop, yield each time we get a chunk from the streaming parser
|
|
262
279
|
// that is either available or added to the queue
|
|
263
280
|
while (!stream.streamDone) {
|
|
264
281
|
const output = await stream.nextChunk();
|
|
282
|
+
if (output &&
|
|
283
|
+
output.usageMetadata &&
|
|
284
|
+
this.streamUsage !== false &&
|
|
285
|
+
options.streamUsage !== false) {
|
|
286
|
+
usageMetadata = {
|
|
287
|
+
input_tokens: output.usageMetadata.promptTokenCount,
|
|
288
|
+
output_tokens: output.usageMetadata.candidatesTokenCount,
|
|
289
|
+
total_tokens: output.usageMetadata.totalTokenCount,
|
|
290
|
+
};
|
|
291
|
+
}
|
|
265
292
|
const chunk = output !== null
|
|
266
293
|
? safeResponseToChatGeneration({ data: output }, this.safetyHandler)
|
|
267
294
|
: new ChatGenerationChunk({
|
|
@@ -269,6 +296,7 @@ export class ChatGoogleBase extends BaseChatModel {
|
|
|
269
296
|
generationInfo: { finishReason: "stop" },
|
|
270
297
|
message: new AIMessageChunk({
|
|
271
298
|
content: "",
|
|
299
|
+
usage_metadata: usageMetadata,
|
|
272
300
|
}),
|
|
273
301
|
});
|
|
274
302
|
yield chunk;
|
package/dist/types.d.ts
CHANGED
|
@@ -95,6 +95,12 @@ export interface GoogleAIModelRequestParams extends GoogleAIModelParams {
|
|
|
95
95
|
export interface GoogleAIBaseLLMInput<AuthOptions> extends BaseLLMParams, GoogleConnectionParams<AuthOptions>, GoogleAIModelParams, GoogleAISafetyParams {
|
|
96
96
|
}
|
|
97
97
|
export interface GoogleAIBaseLanguageModelCallOptions extends BaseLanguageModelCallOptions, GoogleAIModelRequestParams, GoogleAISafetyParams {
|
|
98
|
+
/**
|
|
99
|
+
* Whether or not to include usage data, like token counts
|
|
100
|
+
* in the streamed response chunks.
|
|
101
|
+
* @default true
|
|
102
|
+
*/
|
|
103
|
+
streamUsage?: boolean;
|
|
98
104
|
}
|
|
99
105
|
/**
|
|
100
106
|
* Input to LLM class.
|
package/dist/utils/gemini.cjs
CHANGED
|
@@ -482,12 +482,22 @@ function responseToChatGenerations(response) {
|
|
|
482
482
|
id: toolCall.id,
|
|
483
483
|
index: i,
|
|
484
484
|
}));
|
|
485
|
+
let usageMetadata;
|
|
486
|
+
if ("usageMetadata" in response.data) {
|
|
487
|
+
usageMetadata = {
|
|
488
|
+
input_tokens: response.data.usageMetadata.promptTokenCount,
|
|
489
|
+
output_tokens: response.data.usageMetadata
|
|
490
|
+
.candidatesTokenCount,
|
|
491
|
+
total_tokens: response.data.usageMetadata.totalTokenCount,
|
|
492
|
+
};
|
|
493
|
+
}
|
|
485
494
|
ret = [
|
|
486
495
|
new outputs_1.ChatGenerationChunk({
|
|
487
496
|
message: new messages_1.AIMessageChunk({
|
|
488
497
|
content: combinedContent,
|
|
489
498
|
additional_kwargs: ret[ret.length - 1]?.message.additional_kwargs,
|
|
490
499
|
tool_call_chunks: toolCallChunks,
|
|
500
|
+
usage_metadata: usageMetadata,
|
|
491
501
|
}),
|
|
492
502
|
text: combinedText,
|
|
493
503
|
generationInfo: ret[ret.length - 1].generationInfo,
|
package/dist/utils/gemini.js
CHANGED
|
@@ -461,12 +461,22 @@ export function responseToChatGenerations(response) {
|
|
|
461
461
|
id: toolCall.id,
|
|
462
462
|
index: i,
|
|
463
463
|
}));
|
|
464
|
+
let usageMetadata;
|
|
465
|
+
if ("usageMetadata" in response.data) {
|
|
466
|
+
usageMetadata = {
|
|
467
|
+
input_tokens: response.data.usageMetadata.promptTokenCount,
|
|
468
|
+
output_tokens: response.data.usageMetadata
|
|
469
|
+
.candidatesTokenCount,
|
|
470
|
+
total_tokens: response.data.usageMetadata.totalTokenCount,
|
|
471
|
+
};
|
|
472
|
+
}
|
|
464
473
|
ret = [
|
|
465
474
|
new ChatGenerationChunk({
|
|
466
475
|
message: new AIMessageChunk({
|
|
467
476
|
content: combinedContent,
|
|
468
477
|
additional_kwargs: ret[ret.length - 1]?.message.additional_kwargs,
|
|
469
478
|
tool_call_chunks: toolCallChunks,
|
|
479
|
+
usage_metadata: usageMetadata,
|
|
470
480
|
}),
|
|
471
481
|
text: combinedText,
|
|
472
482
|
generationInfo: ret[ret.length - 1].generationInfo,
|
|
@@ -1,40 +1,51 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
/* eslint-disable @typescript-eslint/no-unused-vars */
|
|
3
3
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.zodToGeminiParameters = void 0;
|
|
4
|
+
exports.jsonSchemaToGeminiParameters = exports.zodToGeminiParameters = exports.removeAdditionalProperties = void 0;
|
|
5
5
|
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
6
|
-
function removeAdditionalProperties(
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
6
|
+
function removeAdditionalProperties(
|
|
7
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
8
|
+
obj) {
|
|
9
|
+
if (typeof obj === "object" && obj !== null) {
|
|
10
|
+
const newObj = { ...obj };
|
|
11
|
+
if ("additionalProperties" in newObj &&
|
|
12
|
+
typeof newObj.additionalProperties === "boolean") {
|
|
13
|
+
delete newObj.additionalProperties;
|
|
14
|
+
}
|
|
15
|
+
for (const key in newObj) {
|
|
16
|
+
if (key in newObj) {
|
|
17
|
+
if (Array.isArray(newObj[key])) {
|
|
18
|
+
newObj[key] = newObj[key].map(removeAdditionalProperties);
|
|
19
|
+
}
|
|
20
|
+
else if (typeof newObj[key] === "object" && newObj[key] !== null) {
|
|
21
|
+
newObj[key] = removeAdditionalProperties(newObj[key]);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
return newObj;
|
|
23
26
|
}
|
|
24
|
-
|
|
25
|
-
// eslint-disable-next-line no-param-reassign
|
|
26
|
-
properties[key] = removeAdditionalProperties(properties[key]);
|
|
27
|
-
removeProperties(properties, keys, index + 1);
|
|
27
|
+
return obj;
|
|
28
28
|
}
|
|
29
|
+
exports.removeAdditionalProperties = removeAdditionalProperties;
|
|
29
30
|
function zodToGeminiParameters(
|
|
30
31
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
31
32
|
zodObj) {
|
|
32
33
|
// Gemini doesn't accept either the $schema or additionalProperties
|
|
33
34
|
// attributes, so we need to explicitly remove them.
|
|
34
35
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
35
|
-
// const jsonSchema = zodToJsonSchema(zodObj) as any;
|
|
36
36
|
const jsonSchema = removeAdditionalProperties((0, zod_to_json_schema_1.zodToJsonSchema)(zodObj));
|
|
37
37
|
const { $schema, ...rest } = jsonSchema;
|
|
38
38
|
return rest;
|
|
39
39
|
}
|
|
40
40
|
exports.zodToGeminiParameters = zodToGeminiParameters;
|
|
41
|
+
function jsonSchemaToGeminiParameters(
|
|
42
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
43
|
+
schema) {
|
|
44
|
+
// Gemini doesn't accept either the $schema or additionalProperties
|
|
45
|
+
// attributes, so we need to explicitly remove them.
|
|
46
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
47
|
+
const jsonSchema = removeAdditionalProperties(schema);
|
|
48
|
+
const { $schema, ...rest } = jsonSchema;
|
|
49
|
+
return rest;
|
|
50
|
+
}
|
|
51
|
+
exports.jsonSchemaToGeminiParameters = jsonSchemaToGeminiParameters;
|
|
@@ -1,3 +1,5 @@
|
|
|
1
1
|
import type { z } from "zod";
|
|
2
|
-
import { GeminiFunctionSchema } from "../types.js";
|
|
2
|
+
import { GeminiFunctionSchema, GeminiJsonSchema } from "../types.js";
|
|
3
|
+
export declare function removeAdditionalProperties(obj: Record<string, any>): GeminiJsonSchema;
|
|
3
4
|
export declare function zodToGeminiParameters(zodObj: z.ZodType<any>): GeminiFunctionSchema;
|
|
5
|
+
export declare function jsonSchemaToGeminiParameters(schema: Record<string, any>): GeminiFunctionSchema;
|
|
@@ -1,27 +1,27 @@
|
|
|
1
1
|
/* eslint-disable @typescript-eslint/no-unused-vars */
|
|
2
2
|
import { zodToJsonSchema } from "zod-to-json-schema";
|
|
3
|
-
function removeAdditionalProperties(
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
3
|
+
export function removeAdditionalProperties(
|
|
4
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
5
|
+
obj) {
|
|
6
|
+
if (typeof obj === "object" && obj !== null) {
|
|
7
|
+
const newObj = { ...obj };
|
|
8
|
+
if ("additionalProperties" in newObj &&
|
|
9
|
+
typeof newObj.additionalProperties === "boolean") {
|
|
10
|
+
delete newObj.additionalProperties;
|
|
11
|
+
}
|
|
12
|
+
for (const key in newObj) {
|
|
13
|
+
if (key in newObj) {
|
|
14
|
+
if (Array.isArray(newObj[key])) {
|
|
15
|
+
newObj[key] = newObj[key].map(removeAdditionalProperties);
|
|
16
|
+
}
|
|
17
|
+
else if (typeof newObj[key] === "object" && newObj[key] !== null) {
|
|
18
|
+
newObj[key] = removeAdditionalProperties(newObj[key]);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return newObj;
|
|
20
23
|
}
|
|
21
|
-
|
|
22
|
-
// eslint-disable-next-line no-param-reassign
|
|
23
|
-
properties[key] = removeAdditionalProperties(properties[key]);
|
|
24
|
-
removeProperties(properties, keys, index + 1);
|
|
24
|
+
return obj;
|
|
25
25
|
}
|
|
26
26
|
export function zodToGeminiParameters(
|
|
27
27
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
@@ -29,8 +29,17 @@ zodObj) {
|
|
|
29
29
|
// Gemini doesn't accept either the $schema or additionalProperties
|
|
30
30
|
// attributes, so we need to explicitly remove them.
|
|
31
31
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
32
|
-
// const jsonSchema = zodToJsonSchema(zodObj) as any;
|
|
33
32
|
const jsonSchema = removeAdditionalProperties(zodToJsonSchema(zodObj));
|
|
34
33
|
const { $schema, ...rest } = jsonSchema;
|
|
35
34
|
return rest;
|
|
36
35
|
}
|
|
36
|
+
export function jsonSchemaToGeminiParameters(
|
|
37
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
38
|
+
schema) {
|
|
39
|
+
// Gemini doesn't accept either the $schema or additionalProperties
|
|
40
|
+
// attributes, so we need to explicitly remove them.
|
|
41
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
42
|
+
const jsonSchema = removeAdditionalProperties(schema);
|
|
43
|
+
const { $schema, ...rest } = jsonSchema;
|
|
44
|
+
return rest;
|
|
45
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/google-common",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.19",
|
|
4
4
|
"description": "Core types and classes for Google services.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -40,7 +40,7 @@
|
|
|
40
40
|
"author": "LangChain",
|
|
41
41
|
"license": "MIT",
|
|
42
42
|
"dependencies": {
|
|
43
|
-
"@langchain/core": "
|
|
43
|
+
"@langchain/core": ">=0.2.9 <0.3.0",
|
|
44
44
|
"uuid": "^9.0.0",
|
|
45
45
|
"zod-to-json-schema": "^3.22.4"
|
|
46
46
|
},
|