@aigne/gemini 0.6.0 → 0.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,34 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [0.6.2](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.6.1...gemini-v0.6.2) (2025-07-09)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Dependencies
|
|
7
|
+
|
|
8
|
+
* The following workspace dependencies were updated
|
|
9
|
+
* dependencies
|
|
10
|
+
* @aigne/openai bumped to 0.8.2
|
|
11
|
+
* devDependencies
|
|
12
|
+
* @aigne/core bumped to 1.32.2
|
|
13
|
+
* @aigne/test-utils bumped to 0.5.4
|
|
14
|
+
|
|
15
|
+
## [0.6.1](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.6.0...gemini-v0.6.1) (2025-07-09)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
### Bug Fixes
|
|
19
|
+
|
|
20
|
+
* **model:** ensure last message is not system role for gemini ([#231](https://github.com/AIGNE-io/aigne-framework/issues/231)) ([1b72e1e](https://github.com/AIGNE-io/aigne-framework/commit/1b72e1e6be98060aa32e68585142b2eea401d109))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
### Dependencies
|
|
24
|
+
|
|
25
|
+
* The following workspace dependencies were updated
|
|
26
|
+
* dependencies
|
|
27
|
+
* @aigne/openai bumped to 0.8.1
|
|
28
|
+
* devDependencies
|
|
29
|
+
* @aigne/core bumped to 1.32.1
|
|
30
|
+
* @aigne/test-utils bumped to 0.5.3
|
|
31
|
+
|
|
3
32
|
## [0.6.0](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.5.1...gemini-v0.6.0) (2025-07-08)
|
|
4
33
|
|
|
5
34
|
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { ChatModelInput } from "@aigne/core";
|
|
1
2
|
import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
|
|
2
3
|
/**
|
|
3
4
|
* Implementation of the ChatModel interface for Google's Gemini API
|
|
@@ -16,8 +17,8 @@ import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
|
|
|
16
17
|
export declare class GeminiChatModel extends OpenAIChatModel {
|
|
17
18
|
constructor(options?: OpenAIChatModelOptions);
|
|
18
19
|
protected apiKeyEnvName: string;
|
|
19
|
-
protected supportsEndWithSystemMessage: boolean;
|
|
20
20
|
protected supportsToolsUseWithJsonSchema: boolean;
|
|
21
21
|
protected supportsParallelToolCalls: boolean;
|
|
22
22
|
protected supportsToolStreaming: boolean;
|
|
23
|
+
getRunMessages(input: ChatModelInput): ReturnType<OpenAIChatModel["getRunMessages"]>;
|
|
23
24
|
}
|
|
@@ -27,9 +27,16 @@ class GeminiChatModel extends openai_1.OpenAIChatModel {
|
|
|
27
27
|
});
|
|
28
28
|
}
|
|
29
29
|
apiKeyEnvName = "GEMINI_API_KEY";
|
|
30
|
-
supportsEndWithSystemMessage = false;
|
|
31
30
|
supportsToolsUseWithJsonSchema = false;
|
|
32
31
|
supportsParallelToolCalls = false;
|
|
33
32
|
supportsToolStreaming = false;
|
|
33
|
+
async getRunMessages(input) {
|
|
34
|
+
const messages = await super.getRunMessages(input);
|
|
35
|
+
const lastMessage = messages.at(-1);
|
|
36
|
+
if (lastMessage?.role === "system") {
|
|
37
|
+
lastMessage.role = "user"; // Ensure the last message is from the user
|
|
38
|
+
}
|
|
39
|
+
return messages;
|
|
40
|
+
}
|
|
34
41
|
}
|
|
35
42
|
exports.GeminiChatModel = GeminiChatModel;
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { ChatModelInput } from "@aigne/core";
|
|
1
2
|
import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
|
|
2
3
|
/**
|
|
3
4
|
* Implementation of the ChatModel interface for Google's Gemini API
|
|
@@ -16,8 +17,8 @@ import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
|
|
|
16
17
|
export declare class GeminiChatModel extends OpenAIChatModel {
|
|
17
18
|
constructor(options?: OpenAIChatModelOptions);
|
|
18
19
|
protected apiKeyEnvName: string;
|
|
19
|
-
protected supportsEndWithSystemMessage: boolean;
|
|
20
20
|
protected supportsToolsUseWithJsonSchema: boolean;
|
|
21
21
|
protected supportsParallelToolCalls: boolean;
|
|
22
22
|
protected supportsToolStreaming: boolean;
|
|
23
|
+
getRunMessages(input: ChatModelInput): ReturnType<OpenAIChatModel["getRunMessages"]>;
|
|
23
24
|
}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import type { ChatModelInput } from "@aigne/core";
|
|
1
2
|
import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
|
|
2
3
|
/**
|
|
3
4
|
* Implementation of the ChatModel interface for Google's Gemini API
|
|
@@ -16,8 +17,8 @@ import { OpenAIChatModel, type OpenAIChatModelOptions } from "@aigne/openai";
|
|
|
16
17
|
export declare class GeminiChatModel extends OpenAIChatModel {
|
|
17
18
|
constructor(options?: OpenAIChatModelOptions);
|
|
18
19
|
protected apiKeyEnvName: string;
|
|
19
|
-
protected supportsEndWithSystemMessage: boolean;
|
|
20
20
|
protected supportsToolsUseWithJsonSchema: boolean;
|
|
21
21
|
protected supportsParallelToolCalls: boolean;
|
|
22
22
|
protected supportsToolStreaming: boolean;
|
|
23
|
+
getRunMessages(input: ChatModelInput): ReturnType<OpenAIChatModel["getRunMessages"]>;
|
|
23
24
|
}
|
|
@@ -24,8 +24,15 @@ export class GeminiChatModel extends OpenAIChatModel {
|
|
|
24
24
|
});
|
|
25
25
|
}
|
|
26
26
|
apiKeyEnvName = "GEMINI_API_KEY";
|
|
27
|
-
supportsEndWithSystemMessage = false;
|
|
28
27
|
supportsToolsUseWithJsonSchema = false;
|
|
29
28
|
supportsParallelToolCalls = false;
|
|
30
29
|
supportsToolStreaming = false;
|
|
30
|
+
async getRunMessages(input) {
|
|
31
|
+
const messages = await super.getRunMessages(input);
|
|
32
|
+
const lastMessage = messages.at(-1);
|
|
33
|
+
if (lastMessage?.role === "system") {
|
|
34
|
+
lastMessage.role = "user"; // Ensure the last message is from the user
|
|
35
|
+
}
|
|
36
|
+
return messages;
|
|
37
|
+
}
|
|
31
38
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/gemini",
|
|
3
|
-
"version": "0.6.
|
|
3
|
+
"version": "0.6.2",
|
|
4
4
|
"description": "AIGNE Gemini SDK for integrating with Google's Gemini AI models",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -32,7 +32,7 @@
|
|
|
32
32
|
}
|
|
33
33
|
},
|
|
34
34
|
"dependencies": {
|
|
35
|
-
"@aigne/openai": "^0.8.
|
|
35
|
+
"@aigne/openai": "^0.8.2"
|
|
36
36
|
},
|
|
37
37
|
"devDependencies": {
|
|
38
38
|
"@types/bun": "^1.2.17",
|
|
@@ -40,8 +40,8 @@
|
|
|
40
40
|
"npm-run-all": "^4.1.5",
|
|
41
41
|
"rimraf": "^6.0.1",
|
|
42
42
|
"typescript": "^5.8.3",
|
|
43
|
-
"@aigne/core": "^1.32.
|
|
44
|
-
"@aigne/test-utils": "^0.5.
|
|
43
|
+
"@aigne/core": "^1.32.2",
|
|
44
|
+
"@aigne/test-utils": "^0.5.4"
|
|
45
45
|
},
|
|
46
46
|
"scripts": {
|
|
47
47
|
"lint": "tsc --noEmit",
|