@aigne/gemini 0.12.1 → 0.12.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,39 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [0.12.3](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.12.2...gemini-v0.12.3) (2025-09-08)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Bug Fixes
|
|
7
|
+
|
|
8
|
+
* support optional field sturectured output for gemini ([#468](https://github.com/AIGNE-io/aigne-framework/issues/468)) ([70c6279](https://github.com/AIGNE-io/aigne-framework/commit/70c62795039a2862e3333f26707329489bf938de))
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
### Dependencies
|
|
12
|
+
|
|
13
|
+
* The following workspace dependencies were updated
|
|
14
|
+
* dependencies
|
|
15
|
+
* @aigne/openai bumped to 0.14.3
|
|
16
|
+
* devDependencies
|
|
17
|
+
* @aigne/core bumped to 1.58.3
|
|
18
|
+
* @aigne/test-utils bumped to 0.5.47
|
|
19
|
+
|
|
20
|
+
## [0.12.2](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.12.1...gemini-v0.12.2) (2025-09-05)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
### Bug Fixes
|
|
24
|
+
|
|
25
|
+
* **model:** transform local file to base64 before request llm ([#462](https://github.com/AIGNE-io/aigne-framework/issues/462)) ([58ef5d7](https://github.com/AIGNE-io/aigne-framework/commit/58ef5d77046c49f3c4eed15b7f0cc283cbbcd74a))
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
### Dependencies
|
|
29
|
+
|
|
30
|
+
* The following workspace dependencies were updated
|
|
31
|
+
* dependencies
|
|
32
|
+
* @aigne/openai bumped to 0.14.2
|
|
33
|
+
* devDependencies
|
|
34
|
+
* @aigne/core bumped to 1.58.2
|
|
35
|
+
* @aigne/test-utils bumped to 0.5.46
|
|
36
|
+
|
|
3
37
|
## [0.12.1](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.12.0...gemini-v0.12.1) (2025-09-05)
|
|
4
38
|
|
|
5
39
|
|
|
@@ -22,6 +22,7 @@ export declare class GeminiChatModel extends OpenAIChatModel {
|
|
|
22
22
|
protected supportsToolsUseWithJsonSchema: boolean;
|
|
23
23
|
protected supportsParallelToolCalls: boolean;
|
|
24
24
|
protected supportsToolStreaming: boolean;
|
|
25
|
+
protected optionalFieldMode: "optional";
|
|
25
26
|
protected _googleClient?: GoogleGenAI;
|
|
26
27
|
get googleClient(): GoogleGenAI;
|
|
27
28
|
process(input: ChatModelInput, options: AgentInvokeOptions): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
|
|
@@ -4,7 +4,6 @@ exports.GeminiChatModel = void 0;
|
|
|
4
4
|
const core_1 = require("@aigne/core");
|
|
5
5
|
const type_utils_js_1 = require("@aigne/core/utils/type-utils.js");
|
|
6
6
|
const openai_1 = require("@aigne/openai");
|
|
7
|
-
const index_js_1 = require("@aigne/platform-helpers/nodejs/index.js");
|
|
8
7
|
const genai_1 = require("@google/genai");
|
|
9
8
|
const uuid_1 = require("uuid");
|
|
10
9
|
const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
@@ -35,6 +34,7 @@ class GeminiChatModel extends openai_1.OpenAIChatModel {
|
|
|
35
34
|
supportsToolsUseWithJsonSchema = false;
|
|
36
35
|
supportsParallelToolCalls = false;
|
|
37
36
|
supportsToolStreaming = false;
|
|
37
|
+
optionalFieldMode = "optional";
|
|
38
38
|
_googleClient;
|
|
39
39
|
get googleClient() {
|
|
40
40
|
if (this._googleClient)
|
|
@@ -218,12 +218,7 @@ class GeminiChatModel extends openai_1.OpenAIChatModel {
|
|
|
218
218
|
case "file":
|
|
219
219
|
return { inlineData: { data: item.data, mimeType: item.mimeType } };
|
|
220
220
|
case "local":
|
|
221
|
-
|
|
222
|
-
inlineData: {
|
|
223
|
-
data: await index_js_1.nodejs.fs.readFile(item.path, "base64"),
|
|
224
|
-
mimeType: item.mimeType,
|
|
225
|
-
},
|
|
226
|
-
};
|
|
221
|
+
throw new Error(`Unsupported local file: ${item.path}, it should be converted to base64 at ChatModel`);
|
|
227
222
|
}
|
|
228
223
|
}));
|
|
229
224
|
}
|
|
@@ -22,6 +22,7 @@ export declare class GeminiChatModel extends OpenAIChatModel {
|
|
|
22
22
|
protected supportsToolsUseWithJsonSchema: boolean;
|
|
23
23
|
protected supportsParallelToolCalls: boolean;
|
|
24
24
|
protected supportsToolStreaming: boolean;
|
|
25
|
+
protected optionalFieldMode: "optional";
|
|
25
26
|
protected _googleClient?: GoogleGenAI;
|
|
26
27
|
get googleClient(): GoogleGenAI;
|
|
27
28
|
process(input: ChatModelInput, options: AgentInvokeOptions): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
|
|
@@ -22,6 +22,7 @@ export declare class GeminiChatModel extends OpenAIChatModel {
|
|
|
22
22
|
protected supportsToolsUseWithJsonSchema: boolean;
|
|
23
23
|
protected supportsParallelToolCalls: boolean;
|
|
24
24
|
protected supportsToolStreaming: boolean;
|
|
25
|
+
protected optionalFieldMode: "optional";
|
|
25
26
|
protected _googleClient?: GoogleGenAI;
|
|
26
27
|
get googleClient(): GoogleGenAI;
|
|
27
28
|
process(input: ChatModelInput, options: AgentInvokeOptions): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { safeParseJSON, } from "@aigne/core";
|
|
2
2
|
import { isNonNullable } from "@aigne/core/utils/type-utils.js";
|
|
3
3
|
import { OpenAIChatModel } from "@aigne/openai";
|
|
4
|
-
import { nodejs } from "@aigne/platform-helpers/nodejs/index.js";
|
|
5
4
|
import { FunctionCallingConfigMode, GoogleGenAI, } from "@google/genai";
|
|
6
5
|
import { v7 } from "uuid";
|
|
7
6
|
const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
@@ -32,6 +31,7 @@ export class GeminiChatModel extends OpenAIChatModel {
|
|
|
32
31
|
supportsToolsUseWithJsonSchema = false;
|
|
33
32
|
supportsParallelToolCalls = false;
|
|
34
33
|
supportsToolStreaming = false;
|
|
34
|
+
optionalFieldMode = "optional";
|
|
35
35
|
_googleClient;
|
|
36
36
|
get googleClient() {
|
|
37
37
|
if (this._googleClient)
|
|
@@ -215,12 +215,7 @@ export class GeminiChatModel extends OpenAIChatModel {
|
|
|
215
215
|
case "file":
|
|
216
216
|
return { inlineData: { data: item.data, mimeType: item.mimeType } };
|
|
217
217
|
case "local":
|
|
218
|
-
|
|
219
|
-
inlineData: {
|
|
220
|
-
data: await nodejs.fs.readFile(item.path, "base64"),
|
|
221
|
-
mimeType: item.mimeType,
|
|
222
|
-
},
|
|
223
|
-
};
|
|
218
|
+
throw new Error(`Unsupported local file: ${item.path}, it should be converted to base64 at ChatModel`);
|
|
224
219
|
}
|
|
225
220
|
}));
|
|
226
221
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/gemini",
|
|
3
|
-
"version": "0.12.
|
|
3
|
+
"version": "0.12.3",
|
|
4
4
|
"description": "AIGNE Gemini SDK for integrating with Google's Gemini AI models",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -38,8 +38,8 @@
|
|
|
38
38
|
"@google/genai": "^1.15.0",
|
|
39
39
|
"uuid": "^11.1.0",
|
|
40
40
|
"zod": "^3.25.67",
|
|
41
|
-
"@aigne/
|
|
42
|
-
"@aigne/
|
|
41
|
+
"@aigne/platform-helpers": "^0.6.2",
|
|
42
|
+
"@aigne/openai": "^0.14.3"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/bun": "^1.2.18",
|
|
@@ -47,8 +47,8 @@
|
|
|
47
47
|
"npm-run-all": "^4.1.5",
|
|
48
48
|
"rimraf": "^6.0.1",
|
|
49
49
|
"typescript": "^5.8.3",
|
|
50
|
-
"@aigne/test-utils": "^0.5.
|
|
51
|
-
"@aigne/core": "^1.58.
|
|
50
|
+
"@aigne/test-utils": "^0.5.47",
|
|
51
|
+
"@aigne/core": "^1.58.3"
|
|
52
52
|
},
|
|
53
53
|
"scripts": {
|
|
54
54
|
"lint": "tsc --noEmit",
|