@aigne/gemini 0.14.16-beta.1 → 0.14.16-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,32 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.14.16-beta.3](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.14.16-beta.2...gemini-v0.14.16-beta.3) (2025-12-19)
4
+
5
+
6
+ ### Features
7
+
8
+ * add prompt caching for OpenAI/Gemini/Anthropic and cache token display ([#838](https://github.com/AIGNE-io/aigne-framework/issues/838)) ([46c628f](https://github.com/AIGNE-io/aigne-framework/commit/46c628f180572ea1b955d1a9888aad6145204842))
9
+
10
+
11
+ ### Dependencies
12
+
13
+ * The following workspace dependencies were updated
14
+ * dependencies
15
+ * @aigne/core bumped to 1.72.0-beta.3
16
+ * devDependencies
17
+ * @aigne/test-utils bumped to 0.5.69-beta.3
18
+
19
+ ## [0.14.16-beta.2](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.14.16-beta.1...gemini-v0.14.16-beta.2) (2025-12-19)
20
+
21
+
22
+ ### Dependencies
23
+
24
+ * The following workspace dependencies were updated
25
+ * dependencies
26
+ * @aigne/core bumped to 1.72.0-beta.2
27
+ * devDependencies
28
+ * @aigne/test-utils bumped to 0.5.69-beta.2
29
+
3
30
  ## [0.14.16-beta.1](https://github.com/AIGNE-io/aigne-framework/compare/gemini-v0.14.16-beta...gemini-v0.14.16-beta.1) (2025-12-17)
4
31
 
5
32
 
@@ -63,6 +63,9 @@ export declare class GeminiChatModel extends ChatModel {
63
63
  reasoningEffort?: number | "minimal" | "low" | "medium" | "high" | {
64
64
  $get: string;
65
65
  } | undefined;
66
+ cacheConfig?: import("@aigne/core").CacheConfig | {
67
+ $get: string;
68
+ } | undefined;
66
69
  }> | undefined;
67
70
  process(input: ChatModelInput, options: AgentInvokeOptions): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
68
71
  protected thinkingBudgetModelMap: ({
@@ -230,6 +230,10 @@ class GeminiChatModel extends core_1.ChatModel {
230
230
  usage.outputTokens =
231
231
  (chunk.usageMetadata.candidatesTokenCount || 0) +
232
232
  (chunk.usageMetadata.thoughtsTokenCount || 0);
233
+ // Parse cache statistics if available
234
+ if (chunk.usageMetadata.cachedContentTokenCount) {
235
+ usage.cacheReadInputTokens = chunk.usageMetadata.cachedContentTokenCount;
236
+ }
233
237
  }
234
238
  }
235
239
  if (input.responseFormat?.type === "json_schema") {
@@ -63,6 +63,9 @@ export declare class GeminiChatModel extends ChatModel {
63
63
  reasoningEffort?: number | "minimal" | "low" | "medium" | "high" | {
64
64
  $get: string;
65
65
  } | undefined;
66
+ cacheConfig?: import("@aigne/core").CacheConfig | {
67
+ $get: string;
68
+ } | undefined;
66
69
  }> | undefined;
67
70
  process(input: ChatModelInput, options: AgentInvokeOptions): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
68
71
  protected thinkingBudgetModelMap: ({
@@ -63,6 +63,9 @@ export declare class GeminiChatModel extends ChatModel {
63
63
  reasoningEffort?: number | "minimal" | "low" | "medium" | "high" | {
64
64
  $get: string;
65
65
  } | undefined;
66
+ cacheConfig?: import("@aigne/core").CacheConfig | {
67
+ $get: string;
68
+ } | undefined;
66
69
  }> | undefined;
67
70
  process(input: ChatModelInput, options: AgentInvokeOptions): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
68
71
  protected thinkingBudgetModelMap: ({
@@ -227,6 +227,10 @@ export class GeminiChatModel extends ChatModel {
227
227
  usage.outputTokens =
228
228
  (chunk.usageMetadata.candidatesTokenCount || 0) +
229
229
  (chunk.usageMetadata.thoughtsTokenCount || 0);
230
+ // Parse cache statistics if available
231
+ if (chunk.usageMetadata.cachedContentTokenCount) {
232
+ usage.cacheReadInputTokens = chunk.usageMetadata.cachedContentTokenCount;
233
+ }
230
234
  }
231
235
  }
232
236
  if (input.responseFormat?.type === "json_schema") {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/gemini",
3
- "version": "0.14.16-beta.1",
3
+ "version": "0.14.16-beta.3",
4
4
  "description": "AIGNE Gemini SDK for integrating with Google's Gemini AI models",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -40,7 +40,7 @@
40
40
  "yaml": "^2.8.1",
41
41
  "zod": "^3.25.67",
42
42
  "zod-to-json-schema": "^3.24.6",
43
- "@aigne/core": "^1.72.0-beta.1",
43
+ "@aigne/core": "^1.72.0-beta.3",
44
44
  "@aigne/platform-helpers": "^0.6.7-beta"
45
45
  },
46
46
  "devDependencies": {
@@ -49,7 +49,7 @@
49
49
  "npm-run-all": "^4.1.5",
50
50
  "rimraf": "^6.0.1",
51
51
  "typescript": "^5.9.2",
52
- "@aigne/test-utils": "^0.5.69-beta.1"
52
+ "@aigne/test-utils": "^0.5.69-beta.3"
53
53
  },
54
54
  "scripts": {
55
55
  "lint": "tsc --noEmit",