@ai-sdk/google 1.2.8 → 2.0.0-canary.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -9
- package/dist/index.d.mts +7 -7
- package/dist/index.d.ts +7 -7
- package/dist/index.js +171 -165
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +171 -163
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +6 -6
- package/internal/dist/index.d.ts +6 -6
- package/internal/dist/index.js +1 -1
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +1 -1
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
@@ -1,21 +1,113 @@
|
|
1
1
|
// src/google-provider.ts
|
2
|
+
import {
|
3
|
+
NoSuchModelError
|
4
|
+
} from "@ai-sdk/provider";
|
2
5
|
import {
|
3
6
|
generateId,
|
4
7
|
loadApiKey,
|
5
8
|
withoutTrailingSlash
|
6
9
|
} from "@ai-sdk/provider-utils";
|
7
10
|
|
8
|
-
// src/google-generative-ai-
|
11
|
+
// src/google-generative-ai-embedding-model.ts
|
12
|
+
import {
|
13
|
+
TooManyEmbeddingValuesForCallError
|
14
|
+
} from "@ai-sdk/provider";
|
9
15
|
import {
|
10
16
|
combineHeaders,
|
11
|
-
createEventSourceResponseHandler,
|
12
17
|
createJsonResponseHandler,
|
13
|
-
parseProviderOptions,
|
14
18
|
postJsonToApi,
|
15
19
|
resolve
|
16
20
|
} from "@ai-sdk/provider-utils";
|
17
21
|
import { z as z2 } from "zod";
|
18
22
|
|
23
|
+
// src/google-error.ts
|
24
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
25
|
+
import { z } from "zod";
|
26
|
+
var googleErrorDataSchema = z.object({
|
27
|
+
error: z.object({
|
28
|
+
code: z.number().nullable(),
|
29
|
+
message: z.string(),
|
30
|
+
status: z.string()
|
31
|
+
})
|
32
|
+
});
|
33
|
+
var googleFailedResponseHandler = createJsonErrorResponseHandler({
|
34
|
+
errorSchema: googleErrorDataSchema,
|
35
|
+
errorToMessage: (data) => data.error.message
|
36
|
+
});
|
37
|
+
|
38
|
+
// src/google-generative-ai-embedding-model.ts
|
39
|
+
var GoogleGenerativeAIEmbeddingModel = class {
|
40
|
+
constructor(modelId, settings, config) {
|
41
|
+
this.specificationVersion = "v1";
|
42
|
+
this.modelId = modelId;
|
43
|
+
this.settings = settings;
|
44
|
+
this.config = config;
|
45
|
+
}
|
46
|
+
get provider() {
|
47
|
+
return this.config.provider;
|
48
|
+
}
|
49
|
+
get maxEmbeddingsPerCall() {
|
50
|
+
return 2048;
|
51
|
+
}
|
52
|
+
get supportsParallelCalls() {
|
53
|
+
return true;
|
54
|
+
}
|
55
|
+
async doEmbed({
|
56
|
+
values,
|
57
|
+
headers,
|
58
|
+
abortSignal
|
59
|
+
}) {
|
60
|
+
if (values.length > this.maxEmbeddingsPerCall) {
|
61
|
+
throw new TooManyEmbeddingValuesForCallError({
|
62
|
+
provider: this.provider,
|
63
|
+
modelId: this.modelId,
|
64
|
+
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
65
|
+
values
|
66
|
+
});
|
67
|
+
}
|
68
|
+
const mergedHeaders = combineHeaders(
|
69
|
+
await resolve(this.config.headers),
|
70
|
+
headers
|
71
|
+
);
|
72
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
73
|
+
url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
|
74
|
+
headers: mergedHeaders,
|
75
|
+
body: {
|
76
|
+
requests: values.map((value) => ({
|
77
|
+
model: `models/${this.modelId}`,
|
78
|
+
content: { role: "user", parts: [{ text: value }] },
|
79
|
+
outputDimensionality: this.settings.outputDimensionality
|
80
|
+
}))
|
81
|
+
},
|
82
|
+
failedResponseHandler: googleFailedResponseHandler,
|
83
|
+
successfulResponseHandler: createJsonResponseHandler(
|
84
|
+
googleGenerativeAITextEmbeddingResponseSchema
|
85
|
+
),
|
86
|
+
abortSignal,
|
87
|
+
fetch: this.config.fetch
|
88
|
+
});
|
89
|
+
return {
|
90
|
+
embeddings: response.embeddings.map((item) => item.values),
|
91
|
+
usage: void 0,
|
92
|
+
rawResponse: { headers: responseHeaders }
|
93
|
+
};
|
94
|
+
}
|
95
|
+
};
|
96
|
+
var googleGenerativeAITextEmbeddingResponseSchema = z2.object({
|
97
|
+
embeddings: z2.array(z2.object({ values: z2.array(z2.number()) }))
|
98
|
+
});
|
99
|
+
|
100
|
+
// src/google-generative-ai-language-model.ts
|
101
|
+
import {
|
102
|
+
combineHeaders as combineHeaders2,
|
103
|
+
createEventSourceResponseHandler,
|
104
|
+
createJsonResponseHandler as createJsonResponseHandler2,
|
105
|
+
parseProviderOptions,
|
106
|
+
postJsonToApi as postJsonToApi2,
|
107
|
+
resolve as resolve2
|
108
|
+
} from "@ai-sdk/provider-utils";
|
109
|
+
import { z as z3 } from "zod";
|
110
|
+
|
19
111
|
// src/convert-json-schema-to-openapi-schema.ts
|
20
112
|
function convertJSONSchemaToOpenAPISchema(jsonSchema) {
|
21
113
|
if (isEmptyObjectSchema(jsonSchema)) {
|
@@ -249,21 +341,6 @@ function getModelPath(modelId) {
|
|
249
341
|
return modelId.includes("/") ? modelId : `models/${modelId}`;
|
250
342
|
}
|
251
343
|
|
252
|
-
// src/google-error.ts
|
253
|
-
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
254
|
-
import { z } from "zod";
|
255
|
-
var googleErrorDataSchema = z.object({
|
256
|
-
error: z.object({
|
257
|
-
code: z.number().nullable(),
|
258
|
-
message: z.string(),
|
259
|
-
status: z.string()
|
260
|
-
})
|
261
|
-
});
|
262
|
-
var googleFailedResponseHandler = createJsonErrorResponseHandler({
|
263
|
-
errorSchema: googleErrorDataSchema,
|
264
|
-
errorToMessage: (data) => data.error.message
|
265
|
-
});
|
266
|
-
|
267
344
|
// src/google-prepare-tools.ts
|
268
345
|
import {
|
269
346
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
@@ -376,7 +453,7 @@ function mapGoogleGenerativeAIFinishReason({
|
|
376
453
|
// src/google-generative-ai-language-model.ts
|
377
454
|
var GoogleGenerativeAILanguageModel = class {
|
378
455
|
constructor(modelId, settings, config) {
|
379
|
-
this.specificationVersion = "
|
456
|
+
this.specificationVersion = "v2";
|
380
457
|
this.defaultObjectGenerationMode = "json";
|
381
458
|
this.supportsImageUrls = false;
|
382
459
|
this.modelId = modelId;
|
@@ -509,22 +586,22 @@ var GoogleGenerativeAILanguageModel = class {
|
|
509
586
|
var _a, _b, _c, _d, _e;
|
510
587
|
const { args, warnings } = await this.getArgs(options);
|
511
588
|
const body = JSON.stringify(args);
|
512
|
-
const mergedHeaders =
|
513
|
-
await
|
589
|
+
const mergedHeaders = combineHeaders2(
|
590
|
+
await resolve2(this.config.headers),
|
514
591
|
options.headers
|
515
592
|
);
|
516
593
|
const {
|
517
594
|
responseHeaders,
|
518
595
|
value: response,
|
519
596
|
rawValue: rawResponse
|
520
|
-
} = await
|
597
|
+
} = await postJsonToApi2({
|
521
598
|
url: `${this.config.baseURL}/${getModelPath(
|
522
599
|
this.modelId
|
523
600
|
)}:generateContent`,
|
524
601
|
headers: mergedHeaders,
|
525
602
|
body: args,
|
526
603
|
failedResponseHandler: googleFailedResponseHandler,
|
527
|
-
successfulResponseHandler:
|
604
|
+
successfulResponseHandler: createJsonResponseHandler2(responseSchema),
|
528
605
|
abortSignal: options.abortSignal,
|
529
606
|
fetch: this.config.fetch
|
530
607
|
});
|
@@ -570,11 +647,11 @@ var GoogleGenerativeAILanguageModel = class {
|
|
570
647
|
async doStream(options) {
|
571
648
|
const { args, warnings } = await this.getArgs(options);
|
572
649
|
const body = JSON.stringify(args);
|
573
|
-
const headers =
|
574
|
-
await
|
650
|
+
const headers = combineHeaders2(
|
651
|
+
await resolve2(this.config.headers),
|
575
652
|
options.headers
|
576
653
|
);
|
577
|
-
const { responseHeaders, value: response } = await
|
654
|
+
const { responseHeaders, value: response } = await postJsonToApi2({
|
578
655
|
url: `${this.config.baseURL}/${getModelPath(
|
579
656
|
this.modelId
|
580
657
|
)}:streamGenerateContent?alt=sse`,
|
@@ -732,170 +809,98 @@ function extractSources({
|
|
732
809
|
title: chunk.web.title
|
733
810
|
}));
|
734
811
|
}
|
735
|
-
var contentSchema =
|
736
|
-
role:
|
737
|
-
parts:
|
738
|
-
|
739
|
-
|
740
|
-
text:
|
812
|
+
var contentSchema = z3.object({
|
813
|
+
role: z3.string(),
|
814
|
+
parts: z3.array(
|
815
|
+
z3.union([
|
816
|
+
z3.object({
|
817
|
+
text: z3.string()
|
741
818
|
}),
|
742
|
-
|
743
|
-
functionCall:
|
744
|
-
name:
|
745
|
-
args:
|
819
|
+
z3.object({
|
820
|
+
functionCall: z3.object({
|
821
|
+
name: z3.string(),
|
822
|
+
args: z3.unknown()
|
746
823
|
})
|
747
824
|
}),
|
748
|
-
|
749
|
-
inlineData:
|
750
|
-
mimeType:
|
751
|
-
data:
|
825
|
+
z3.object({
|
826
|
+
inlineData: z3.object({
|
827
|
+
mimeType: z3.string(),
|
828
|
+
data: z3.string()
|
752
829
|
})
|
753
830
|
})
|
754
831
|
])
|
755
832
|
).nullish()
|
756
833
|
});
|
757
|
-
var groundingChunkSchema =
|
758
|
-
web:
|
759
|
-
retrievedContext:
|
834
|
+
var groundingChunkSchema = z3.object({
|
835
|
+
web: z3.object({ uri: z3.string(), title: z3.string() }).nullish(),
|
836
|
+
retrievedContext: z3.object({ uri: z3.string(), title: z3.string() }).nullish()
|
760
837
|
});
|
761
|
-
var groundingMetadataSchema =
|
762
|
-
webSearchQueries:
|
763
|
-
retrievalQueries:
|
764
|
-
searchEntryPoint:
|
765
|
-
groundingChunks:
|
766
|
-
groundingSupports:
|
767
|
-
|
768
|
-
segment:
|
769
|
-
startIndex:
|
770
|
-
endIndex:
|
771
|
-
text:
|
838
|
+
var groundingMetadataSchema = z3.object({
|
839
|
+
webSearchQueries: z3.array(z3.string()).nullish(),
|
840
|
+
retrievalQueries: z3.array(z3.string()).nullish(),
|
841
|
+
searchEntryPoint: z3.object({ renderedContent: z3.string() }).nullish(),
|
842
|
+
groundingChunks: z3.array(groundingChunkSchema).nullish(),
|
843
|
+
groundingSupports: z3.array(
|
844
|
+
z3.object({
|
845
|
+
segment: z3.object({
|
846
|
+
startIndex: z3.number().nullish(),
|
847
|
+
endIndex: z3.number().nullish(),
|
848
|
+
text: z3.string().nullish()
|
772
849
|
}),
|
773
|
-
segment_text:
|
774
|
-
groundingChunkIndices:
|
775
|
-
supportChunkIndices:
|
776
|
-
confidenceScores:
|
777
|
-
confidenceScore:
|
850
|
+
segment_text: z3.string().nullish(),
|
851
|
+
groundingChunkIndices: z3.array(z3.number()).nullish(),
|
852
|
+
supportChunkIndices: z3.array(z3.number()).nullish(),
|
853
|
+
confidenceScores: z3.array(z3.number()).nullish(),
|
854
|
+
confidenceScore: z3.array(z3.number()).nullish()
|
778
855
|
})
|
779
856
|
).nullish(),
|
780
|
-
retrievalMetadata:
|
781
|
-
|
782
|
-
webDynamicRetrievalScore:
|
857
|
+
retrievalMetadata: z3.union([
|
858
|
+
z3.object({
|
859
|
+
webDynamicRetrievalScore: z3.number()
|
783
860
|
}),
|
784
|
-
|
861
|
+
z3.object({})
|
785
862
|
]).nullish()
|
786
863
|
});
|
787
|
-
var safetyRatingSchema =
|
788
|
-
category:
|
789
|
-
probability:
|
790
|
-
probabilityScore:
|
791
|
-
severity:
|
792
|
-
severityScore:
|
793
|
-
blocked:
|
864
|
+
var safetyRatingSchema = z3.object({
|
865
|
+
category: z3.string(),
|
866
|
+
probability: z3.string(),
|
867
|
+
probabilityScore: z3.number().nullish(),
|
868
|
+
severity: z3.string().nullish(),
|
869
|
+
severityScore: z3.number().nullish(),
|
870
|
+
blocked: z3.boolean().nullish()
|
794
871
|
});
|
795
|
-
var responseSchema =
|
796
|
-
candidates:
|
797
|
-
|
798
|
-
content: contentSchema.nullish().or(
|
799
|
-
finishReason:
|
800
|
-
safetyRatings:
|
872
|
+
var responseSchema = z3.object({
|
873
|
+
candidates: z3.array(
|
874
|
+
z3.object({
|
875
|
+
content: contentSchema.nullish().or(z3.object({}).strict()),
|
876
|
+
finishReason: z3.string().nullish(),
|
877
|
+
safetyRatings: z3.array(safetyRatingSchema).nullish(),
|
801
878
|
groundingMetadata: groundingMetadataSchema.nullish()
|
802
879
|
})
|
803
880
|
),
|
804
|
-
usageMetadata:
|
805
|
-
promptTokenCount:
|
806
|
-
candidatesTokenCount:
|
807
|
-
totalTokenCount:
|
881
|
+
usageMetadata: z3.object({
|
882
|
+
promptTokenCount: z3.number().nullish(),
|
883
|
+
candidatesTokenCount: z3.number().nullish(),
|
884
|
+
totalTokenCount: z3.number().nullish()
|
808
885
|
}).nullish()
|
809
886
|
});
|
810
|
-
var chunkSchema =
|
811
|
-
candidates:
|
812
|
-
|
887
|
+
var chunkSchema = z3.object({
|
888
|
+
candidates: z3.array(
|
889
|
+
z3.object({
|
813
890
|
content: contentSchema.nullish(),
|
814
|
-
finishReason:
|
815
|
-
safetyRatings:
|
891
|
+
finishReason: z3.string().nullish(),
|
892
|
+
safetyRatings: z3.array(safetyRatingSchema).nullish(),
|
816
893
|
groundingMetadata: groundingMetadataSchema.nullish()
|
817
894
|
})
|
818
895
|
).nullish(),
|
819
|
-
usageMetadata:
|
820
|
-
promptTokenCount:
|
821
|
-
candidatesTokenCount:
|
822
|
-
totalTokenCount:
|
896
|
+
usageMetadata: z3.object({
|
897
|
+
promptTokenCount: z3.number().nullish(),
|
898
|
+
candidatesTokenCount: z3.number().nullish(),
|
899
|
+
totalTokenCount: z3.number().nullish()
|
823
900
|
}).nullish()
|
824
901
|
});
|
825
|
-
var googleGenerativeAIProviderOptionsSchema =
|
826
|
-
responseModalities:
|
827
|
-
});
|
828
|
-
|
829
|
-
// src/google-generative-ai-embedding-model.ts
|
830
|
-
import {
|
831
|
-
TooManyEmbeddingValuesForCallError
|
832
|
-
} from "@ai-sdk/provider";
|
833
|
-
import {
|
834
|
-
combineHeaders as combineHeaders2,
|
835
|
-
createJsonResponseHandler as createJsonResponseHandler2,
|
836
|
-
postJsonToApi as postJsonToApi2,
|
837
|
-
resolve as resolve2
|
838
|
-
} from "@ai-sdk/provider-utils";
|
839
|
-
import { z as z3 } from "zod";
|
840
|
-
var GoogleGenerativeAIEmbeddingModel = class {
|
841
|
-
constructor(modelId, settings, config) {
|
842
|
-
this.specificationVersion = "v1";
|
843
|
-
this.modelId = modelId;
|
844
|
-
this.settings = settings;
|
845
|
-
this.config = config;
|
846
|
-
}
|
847
|
-
get provider() {
|
848
|
-
return this.config.provider;
|
849
|
-
}
|
850
|
-
get maxEmbeddingsPerCall() {
|
851
|
-
return 2048;
|
852
|
-
}
|
853
|
-
get supportsParallelCalls() {
|
854
|
-
return true;
|
855
|
-
}
|
856
|
-
async doEmbed({
|
857
|
-
values,
|
858
|
-
headers,
|
859
|
-
abortSignal
|
860
|
-
}) {
|
861
|
-
if (values.length > this.maxEmbeddingsPerCall) {
|
862
|
-
throw new TooManyEmbeddingValuesForCallError({
|
863
|
-
provider: this.provider,
|
864
|
-
modelId: this.modelId,
|
865
|
-
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
866
|
-
values
|
867
|
-
});
|
868
|
-
}
|
869
|
-
const mergedHeaders = combineHeaders2(
|
870
|
-
await resolve2(this.config.headers),
|
871
|
-
headers
|
872
|
-
);
|
873
|
-
const { responseHeaders, value: response } = await postJsonToApi2({
|
874
|
-
url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
|
875
|
-
headers: mergedHeaders,
|
876
|
-
body: {
|
877
|
-
requests: values.map((value) => ({
|
878
|
-
model: `models/${this.modelId}`,
|
879
|
-
content: { role: "user", parts: [{ text: value }] },
|
880
|
-
outputDimensionality: this.settings.outputDimensionality
|
881
|
-
}))
|
882
|
-
},
|
883
|
-
failedResponseHandler: googleFailedResponseHandler,
|
884
|
-
successfulResponseHandler: createJsonResponseHandler2(
|
885
|
-
googleGenerativeAITextEmbeddingResponseSchema
|
886
|
-
),
|
887
|
-
abortSignal,
|
888
|
-
fetch: this.config.fetch
|
889
|
-
});
|
890
|
-
return {
|
891
|
-
embeddings: response.embeddings.map((item) => item.values),
|
892
|
-
usage: void 0,
|
893
|
-
rawResponse: { headers: responseHeaders }
|
894
|
-
};
|
895
|
-
}
|
896
|
-
};
|
897
|
-
var googleGenerativeAITextEmbeddingResponseSchema = z3.object({
|
898
|
-
embeddings: z3.array(z3.object({ values: z3.array(z3.number()) }))
|
902
|
+
var googleGenerativeAIProviderOptionsSchema = z3.object({
|
903
|
+
responseModalities: z3.array(z3.enum(["TEXT", "IMAGE"])).nullish()
|
899
904
|
});
|
900
905
|
|
901
906
|
// src/google-supported-file-url.ts
|
@@ -946,6 +951,9 @@ function createGoogleGenerativeAI(options = {}) {
|
|
946
951
|
provider.embedding = createEmbeddingModel;
|
947
952
|
provider.textEmbedding = createEmbeddingModel;
|
948
953
|
provider.textEmbeddingModel = createEmbeddingModel;
|
954
|
+
provider.imageModel = (modelId) => {
|
955
|
+
throw new NoSuchModelError({ modelId, modelType: "imageModel" });
|
956
|
+
};
|
949
957
|
return provider;
|
950
958
|
}
|
951
959
|
var google = createGoogleGenerativeAI();
|