@ai-sdk/google 4.0.0-beta.7 → 4.0.0-beta.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +8 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.js +10 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -8
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +4 -1
- package/dist/internal/index.d.ts +4 -1
- package/dist/internal/index.js +9 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +9 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/google-generative-ai-language-model.ts +19 -11
- package/src/google-generative-ai-prompt.ts +9 -0
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
|
@@ -73,6 +73,7 @@ declare const responseSchema: _ai_sdk_provider_utils.LazySchema<{
|
|
|
73
73
|
})[] | null | undefined;
|
|
74
74
|
} | null | undefined;
|
|
75
75
|
finishReason?: string | null | undefined;
|
|
76
|
+
finishMessage?: string | null | undefined;
|
|
76
77
|
safetyRatings?: {
|
|
77
78
|
category?: string | null | undefined;
|
|
78
79
|
probability?: string | null | undefined;
|
|
@@ -158,14 +159,21 @@ declare const responseSchema: _ai_sdk_provider_utils.LazySchema<{
|
|
|
158
159
|
type GroundingMetadataSchema = NonNullable<InferSchema<typeof responseSchema>['candidates'][number]['groundingMetadata']>;
|
|
159
160
|
type UrlContextMetadataSchema = NonNullable<InferSchema<typeof responseSchema>['candidates'][number]['urlContextMetadata']>;
|
|
160
161
|
type SafetyRatingSchema = NonNullable<InferSchema<typeof responseSchema>['candidates'][number]['safetyRatings']>[number];
|
|
162
|
+
type PromptFeedbackSchema = NonNullable<InferSchema<typeof responseSchema>['promptFeedback']>;
|
|
163
|
+
type UsageMetadataSchema = NonNullable<InferSchema<typeof responseSchema>['usageMetadata']>;
|
|
161
164
|
|
|
162
165
|
type GoogleGenerativeAIGroundingMetadata = GroundingMetadataSchema;
|
|
163
166
|
type GoogleGenerativeAIUrlContextMetadata = UrlContextMetadataSchema;
|
|
164
167
|
type GoogleGenerativeAISafetyRating = SafetyRatingSchema;
|
|
168
|
+
type GoogleGenerativeAIPromptFeedback = PromptFeedbackSchema;
|
|
169
|
+
type GoogleGenerativeAIUsageMetadata = UsageMetadataSchema;
|
|
165
170
|
interface GoogleGenerativeAIProviderMetadata {
|
|
171
|
+
promptFeedback: GoogleGenerativeAIPromptFeedback | null;
|
|
166
172
|
groundingMetadata: GoogleGenerativeAIGroundingMetadata | null;
|
|
167
173
|
urlContextMetadata: GoogleGenerativeAIUrlContextMetadata | null;
|
|
168
174
|
safetyRatings: GoogleGenerativeAISafetyRating[] | null;
|
|
175
|
+
usageMetadata: GoogleGenerativeAIUsageMetadata | null;
|
|
176
|
+
finishMessage: string | null;
|
|
169
177
|
}
|
|
170
178
|
|
|
171
179
|
type GoogleGenerativeAIImageModelId = 'imagen-4.0-generate-001' | 'imagen-4.0-ultra-generate-001' | 'imagen-4.0-fast-generate-001' | 'gemini-2.5-flash-image' | 'gemini-3-pro-image-preview' | 'gemini-3.1-flash-image-preview' | (string & {});
|
package/dist/index.d.ts
CHANGED
|
@@ -73,6 +73,7 @@ declare const responseSchema: _ai_sdk_provider_utils.LazySchema<{
|
|
|
73
73
|
})[] | null | undefined;
|
|
74
74
|
} | null | undefined;
|
|
75
75
|
finishReason?: string | null | undefined;
|
|
76
|
+
finishMessage?: string | null | undefined;
|
|
76
77
|
safetyRatings?: {
|
|
77
78
|
category?: string | null | undefined;
|
|
78
79
|
probability?: string | null | undefined;
|
|
@@ -158,14 +159,21 @@ declare const responseSchema: _ai_sdk_provider_utils.LazySchema<{
|
|
|
158
159
|
type GroundingMetadataSchema = NonNullable<InferSchema<typeof responseSchema>['candidates'][number]['groundingMetadata']>;
|
|
159
160
|
type UrlContextMetadataSchema = NonNullable<InferSchema<typeof responseSchema>['candidates'][number]['urlContextMetadata']>;
|
|
160
161
|
type SafetyRatingSchema = NonNullable<InferSchema<typeof responseSchema>['candidates'][number]['safetyRatings']>[number];
|
|
162
|
+
type PromptFeedbackSchema = NonNullable<InferSchema<typeof responseSchema>['promptFeedback']>;
|
|
163
|
+
type UsageMetadataSchema = NonNullable<InferSchema<typeof responseSchema>['usageMetadata']>;
|
|
161
164
|
|
|
162
165
|
type GoogleGenerativeAIGroundingMetadata = GroundingMetadataSchema;
|
|
163
166
|
type GoogleGenerativeAIUrlContextMetadata = UrlContextMetadataSchema;
|
|
164
167
|
type GoogleGenerativeAISafetyRating = SafetyRatingSchema;
|
|
168
|
+
type GoogleGenerativeAIPromptFeedback = PromptFeedbackSchema;
|
|
169
|
+
type GoogleGenerativeAIUsageMetadata = UsageMetadataSchema;
|
|
165
170
|
interface GoogleGenerativeAIProviderMetadata {
|
|
171
|
+
promptFeedback: GoogleGenerativeAIPromptFeedback | null;
|
|
166
172
|
groundingMetadata: GoogleGenerativeAIGroundingMetadata | null;
|
|
167
173
|
urlContextMetadata: GoogleGenerativeAIUrlContextMetadata | null;
|
|
168
174
|
safetyRatings: GoogleGenerativeAISafetyRating[] | null;
|
|
175
|
+
usageMetadata: GoogleGenerativeAIUsageMetadata | null;
|
|
176
|
+
finishMessage: string | null;
|
|
169
177
|
}
|
|
170
178
|
|
|
171
179
|
type GoogleGenerativeAIImageModelId = 'imagen-4.0-generate-001' | 'imagen-4.0-ultra-generate-001' | 'imagen-4.0-fast-generate-001' | 'gemini-2.5-flash-image' | 'gemini-3-pro-image-preview' | 'gemini-3.1-flash-image-preview' | (string & {});
|
package/dist/index.js
CHANGED
|
@@ -30,7 +30,7 @@ module.exports = __toCommonJS(src_exports);
|
|
|
30
30
|
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
31
31
|
|
|
32
32
|
// src/version.ts
|
|
33
|
-
var VERSION = true ? "4.0.0-beta.
|
|
33
|
+
var VERSION = true ? "4.0.0-beta.8" : "0.0.0-test";
|
|
34
34
|
|
|
35
35
|
// src/google-generative-ai-embedding-model.ts
|
|
36
36
|
var import_provider = require("@ai-sdk/provider");
|
|
@@ -1048,7 +1048,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
1048
1048
|
};
|
|
1049
1049
|
}
|
|
1050
1050
|
async doGenerate(options) {
|
|
1051
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
1051
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
1052
1052
|
const { args, warnings, providerOptionsName } = await this.getArgs(options);
|
|
1053
1053
|
const mergedHeaders = (0, import_provider_utils6.combineHeaders)(
|
|
1054
1054
|
await (0, import_provider_utils6.resolve)(this.config.headers),
|
|
@@ -1170,7 +1170,8 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
1170
1170
|
groundingMetadata: (_h = candidate.groundingMetadata) != null ? _h : null,
|
|
1171
1171
|
urlContextMetadata: (_i = candidate.urlContextMetadata) != null ? _i : null,
|
|
1172
1172
|
safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null,
|
|
1173
|
-
usageMetadata: usageMetadata != null ? usageMetadata : null
|
|
1173
|
+
usageMetadata: usageMetadata != null ? usageMetadata : null,
|
|
1174
|
+
finishMessage: (_k = candidate.finishMessage) != null ? _k : null
|
|
1174
1175
|
}
|
|
1175
1176
|
},
|
|
1176
1177
|
request: { body: args },
|
|
@@ -1220,7 +1221,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
1220
1221
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1221
1222
|
},
|
|
1222
1223
|
transform(chunk, controller) {
|
|
1223
|
-
var _a, _b, _c, _d, _e, _f;
|
|
1224
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
1224
1225
|
if (options.includeRawChunks) {
|
|
1225
1226
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1226
1227
|
}
|
|
@@ -1422,12 +1423,11 @@ var GoogleGenerativeAILanguageModel = class {
|
|
|
1422
1423
|
promptFeedback: (_e = value.promptFeedback) != null ? _e : null,
|
|
1423
1424
|
groundingMetadata: lastGroundingMetadata,
|
|
1424
1425
|
urlContextMetadata: lastUrlContextMetadata,
|
|
1425
|
-
safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null
|
|
1426
|
+
safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null,
|
|
1427
|
+
usageMetadata: usageMetadata != null ? usageMetadata : null,
|
|
1428
|
+
finishMessage: (_g = candidate.finishMessage) != null ? _g : null
|
|
1426
1429
|
}
|
|
1427
1430
|
};
|
|
1428
|
-
if (usageMetadata != null) {
|
|
1429
|
-
providerMetadata[providerOptionsName].usageMetadata = usageMetadata;
|
|
1430
|
-
}
|
|
1431
1431
|
}
|
|
1432
1432
|
},
|
|
1433
1433
|
flush(controller) {
|
|
@@ -1687,6 +1687,7 @@ var responseSchema = (0, import_provider_utils6.lazySchema)(
|
|
|
1687
1687
|
import_v45.z.object({
|
|
1688
1688
|
content: getContentSchema().nullish().or(import_v45.z.object({}).strict()),
|
|
1689
1689
|
finishReason: import_v45.z.string().nullish(),
|
|
1690
|
+
finishMessage: import_v45.z.string().nullish(),
|
|
1690
1691
|
safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish(),
|
|
1691
1692
|
groundingMetadata: getGroundingMetadataSchema().nullish(),
|
|
1692
1693
|
urlContextMetadata: getUrlContextMetadataSchema().nullish()
|
|
@@ -1707,6 +1708,7 @@ var chunkSchema = (0, import_provider_utils6.lazySchema)(
|
|
|
1707
1708
|
import_v45.z.object({
|
|
1708
1709
|
content: getContentSchema().nullish(),
|
|
1709
1710
|
finishReason: import_v45.z.string().nullish(),
|
|
1711
|
+
finishMessage: import_v45.z.string().nullish(),
|
|
1710
1712
|
safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish(),
|
|
1711
1713
|
groundingMetadata: getGroundingMetadataSchema().nullish(),
|
|
1712
1714
|
urlContextMetadata: getUrlContextMetadataSchema().nullish()
|