@ai-sdk/google 1.0.4 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.d.mts +5 -2
- package/dist/index.d.ts +5 -2
- package/dist/index.js +35 -13
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +39 -15
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +61 -0
- package/internal/dist/index.d.ts +61 -0
- package/internal/dist/index.js +676 -0
- package/internal/dist/index.js.map +1 -0
- package/internal/dist/index.mjs +659 -0
- package/internal/dist/index.mjs.map +1 -0
- package/package.json +10 -3
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
@@ -22,7 +22,7 @@ interface GoogleGenerativeAISettings {
|
|
22
22
|
Optional. A list of unique safety settings for blocking unsafe content.
|
23
23
|
*/
|
24
24
|
safetySettings?: Array<{
|
25
|
-
category: 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT';
|
25
|
+
category: 'HARM_CATEGORY_UNSPECIFIED' | 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT' | 'HARM_CATEGORY_CIVIC_INTEGRITY';
|
26
26
|
threshold: 'HARM_BLOCK_THRESHOLD_UNSPECIFIED' | 'BLOCK_LOW_AND_ABOVE' | 'BLOCK_MEDIUM_AND_ABOVE' | 'BLOCK_ONLY_HIGH' | 'BLOCK_NONE';
|
27
27
|
}>;
|
28
28
|
}
|
@@ -68,12 +68,15 @@ interface GoogleGenerativeAIProviderSettings {
|
|
68
68
|
/**
|
69
69
|
Custom headers to include in the requests.
|
70
70
|
*/
|
71
|
-
headers?: Record<string, string>;
|
71
|
+
headers?: Record<string, string | undefined>;
|
72
72
|
/**
|
73
73
|
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
74
74
|
or to provide a custom fetch implementation for e.g. testing.
|
75
75
|
*/
|
76
76
|
fetch?: FetchFunction;
|
77
|
+
/**
|
78
|
+
Optional function to generate a unique ID for each request.
|
79
|
+
*/
|
77
80
|
generateId?: () => string;
|
78
81
|
}
|
79
82
|
/**
|
package/dist/index.d.ts
CHANGED
@@ -22,7 +22,7 @@ interface GoogleGenerativeAISettings {
|
|
22
22
|
Optional. A list of unique safety settings for blocking unsafe content.
|
23
23
|
*/
|
24
24
|
safetySettings?: Array<{
|
25
|
-
category: 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT';
|
25
|
+
category: 'HARM_CATEGORY_UNSPECIFIED' | 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT' | 'HARM_CATEGORY_CIVIC_INTEGRITY';
|
26
26
|
threshold: 'HARM_BLOCK_THRESHOLD_UNSPECIFIED' | 'BLOCK_LOW_AND_ABOVE' | 'BLOCK_MEDIUM_AND_ABOVE' | 'BLOCK_ONLY_HIGH' | 'BLOCK_NONE';
|
27
27
|
}>;
|
28
28
|
}
|
@@ -68,12 +68,15 @@ interface GoogleGenerativeAIProviderSettings {
|
|
68
68
|
/**
|
69
69
|
Custom headers to include in the requests.
|
70
70
|
*/
|
71
|
-
headers?: Record<string, string>;
|
71
|
+
headers?: Record<string, string | undefined>;
|
72
72
|
/**
|
73
73
|
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
74
74
|
or to provide a custom fetch implementation for e.g. testing.
|
75
75
|
*/
|
76
76
|
fetch?: FetchFunction;
|
77
|
+
/**
|
78
|
+
Optional function to generate a unique ID for each request.
|
79
|
+
*/
|
77
80
|
generateId?: () => string;
|
78
81
|
}
|
79
82
|
/**
|
package/dist/index.js
CHANGED
@@ -251,10 +251,17 @@ var googleFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResp
|
|
251
251
|
|
252
252
|
// src/google-prepare-tools.ts
|
253
253
|
var import_provider2 = require("@ai-sdk/provider");
|
254
|
-
function prepareTools(mode) {
|
254
|
+
function prepareTools(mode, useSearchGrounding) {
|
255
255
|
var _a, _b;
|
256
256
|
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
257
257
|
const toolWarnings = [];
|
258
|
+
if (useSearchGrounding) {
|
259
|
+
return {
|
260
|
+
tools: { googleSearchRetrieval: {} },
|
261
|
+
toolConfig: void 0,
|
262
|
+
toolWarnings
|
263
|
+
};
|
264
|
+
}
|
258
265
|
if (tools == null) {
|
259
266
|
return { tools: void 0, toolConfig: void 0, toolWarnings };
|
260
267
|
}
|
@@ -369,7 +376,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
369
376
|
responseFormat,
|
370
377
|
seed
|
371
378
|
}) {
|
372
|
-
var _a;
|
379
|
+
var _a, _b;
|
373
380
|
const type = mode.type;
|
374
381
|
const warnings = [];
|
375
382
|
if (seed != null) {
|
@@ -396,7 +403,10 @@ var GoogleGenerativeAILanguageModel = class {
|
|
396
403
|
const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
|
397
404
|
switch (type) {
|
398
405
|
case "regular": {
|
399
|
-
const { tools, toolConfig, toolWarnings } = prepareTools(
|
406
|
+
const { tools, toolConfig, toolWarnings } = prepareTools(
|
407
|
+
mode,
|
408
|
+
(_a = this.settings.useSearchGrounding) != null ? _a : false
|
409
|
+
);
|
400
410
|
return {
|
401
411
|
args: {
|
402
412
|
generationConfig,
|
@@ -437,7 +447,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
437
447
|
functionDeclarations: [
|
438
448
|
{
|
439
449
|
name: mode.tool.name,
|
440
|
-
description: (
|
450
|
+
description: (_b = mode.tool.description) != null ? _b : "",
|
441
451
|
parameters: convertJSONSchemaToOpenAPISchema(
|
442
452
|
mode.tool.parameters
|
443
453
|
)
|
@@ -461,14 +471,18 @@ var GoogleGenerativeAILanguageModel = class {
|
|
461
471
|
return url.toString().startsWith("https://generativelanguage.googleapis.com/v1beta/files/");
|
462
472
|
}
|
463
473
|
async doGenerate(options) {
|
464
|
-
var _a, _b;
|
474
|
+
var _a, _b, _c, _d, _e, _f;
|
465
475
|
const { args, warnings } = await this.getArgs(options);
|
466
476
|
const body = JSON.stringify(args);
|
477
|
+
const mergedHeaders = (0, import_provider_utils3.combineHeaders)(
|
478
|
+
await (0, import_provider_utils3.resolve)(this.config.headers),
|
479
|
+
options.headers
|
480
|
+
);
|
467
481
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
468
482
|
url: `${this.config.baseURL}/${getModelPath(
|
469
483
|
this.modelId
|
470
484
|
)}:generateContent`,
|
471
|
-
headers:
|
485
|
+
headers: mergedHeaders,
|
472
486
|
body: args,
|
473
487
|
failedResponseHandler: googleFailedResponseHandler,
|
474
488
|
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(responseSchema),
|
@@ -478,20 +492,20 @@ var GoogleGenerativeAILanguageModel = class {
|
|
478
492
|
const { contents: rawPrompt, ...rawSettings } = args;
|
479
493
|
const candidate = response.candidates[0];
|
480
494
|
const toolCalls = getToolCallsFromParts({
|
481
|
-
parts: candidate.content.parts,
|
495
|
+
parts: (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [],
|
482
496
|
generateId: this.config.generateId
|
483
497
|
});
|
484
498
|
const usageMetadata = response.usageMetadata;
|
485
499
|
return {
|
486
|
-
text: getTextFromParts(candidate.content.parts),
|
500
|
+
text: getTextFromParts((_d = (_c = candidate.content) == null ? void 0 : _c.parts) != null ? _d : []),
|
487
501
|
toolCalls,
|
488
502
|
finishReason: mapGoogleGenerativeAIFinishReason({
|
489
503
|
finishReason: candidate.finishReason,
|
490
504
|
hasToolCalls: toolCalls != null && toolCalls.length > 0
|
491
505
|
}),
|
492
506
|
usage: {
|
493
|
-
promptTokens: (
|
494
|
-
completionTokens: (
|
507
|
+
promptTokens: (_e = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _e : NaN,
|
508
|
+
completionTokens: (_f = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _f : NaN
|
495
509
|
},
|
496
510
|
rawCall: { rawPrompt, rawSettings },
|
497
511
|
rawResponse: { headers: responseHeaders },
|
@@ -502,11 +516,15 @@ var GoogleGenerativeAILanguageModel = class {
|
|
502
516
|
async doStream(options) {
|
503
517
|
const { args, warnings } = await this.getArgs(options);
|
504
518
|
const body = JSON.stringify(args);
|
519
|
+
const headers = (0, import_provider_utils3.combineHeaders)(
|
520
|
+
await (0, import_provider_utils3.resolve)(this.config.headers),
|
521
|
+
options.headers
|
522
|
+
);
|
505
523
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
506
524
|
url: `${this.config.baseURL}/${getModelPath(
|
507
525
|
this.modelId
|
508
526
|
)}:streamGenerateContent?alt=sse`,
|
509
|
-
headers
|
527
|
+
headers,
|
510
528
|
body: args,
|
511
529
|
failedResponseHandler: googleFailedResponseHandler,
|
512
530
|
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(chunkSchema),
|
@@ -632,7 +650,7 @@ var contentSchema = import_zod2.z.object({
|
|
632
650
|
var responseSchema = import_zod2.z.object({
|
633
651
|
candidates: import_zod2.z.array(
|
634
652
|
import_zod2.z.object({
|
635
|
-
content: contentSchema,
|
653
|
+
content: contentSchema.optional(),
|
636
654
|
finishReason: import_zod2.z.string().optional()
|
637
655
|
})
|
638
656
|
),
|
@@ -689,9 +707,13 @@ var GoogleGenerativeAIEmbeddingModel = class {
|
|
689
707
|
values
|
690
708
|
});
|
691
709
|
}
|
710
|
+
const mergedHeaders = (0, import_provider_utils4.combineHeaders)(
|
711
|
+
await (0, import_provider_utils4.resolve)(this.config.headers),
|
712
|
+
headers
|
713
|
+
);
|
692
714
|
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
693
715
|
url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
|
694
|
-
headers:
|
716
|
+
headers: mergedHeaders,
|
695
717
|
body: {
|
696
718
|
requests: values.map((value) => ({
|
697
719
|
model: `models/${this.modelId}`,
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-error.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-generative-ai-embedding-model.ts"],"sourcesContent":["export { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV1 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: GoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n };\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const { tools, toolConfig, toolWarnings } = prepareTools(mode);\n\n return {\n args: {\n generationConfig,\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools,\n toolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n responseMimeType: 'application/json',\n responseSchema:\n mode.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(mode.schema)\n : undefined,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n generationConfig,\n contents,\n tools: {\n functionDeclarations: [\n {\n name: mode.tool.name,\n description: mode.tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(\n mode.tool.parameters,\n ),\n },\n ],\n },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n supportsUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content.parts,\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(candidate.content.parts),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema,\n finishReason: z.string().optional(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .optional(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) result.minLength = minLength;\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'image': {\n parts.push(\n part.image instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n fileUri: part.image.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n },\n );\n\n break;\n }\n\n case 'file': {\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType,\n data: part.data,\n },\n },\n );\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = part;\n throw new UnsupportedFunctionalityError({\n functionality: `prompt part: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n default:\n return 'unknown';\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAKO;;;ACCP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACTX,SAAS,iCACd,YACS;AAET,MAAI,oBAAoB,UAAU,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EACF,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc;AAAW,WAAO,YAAY;AAEhD,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChGA,sBAGO;AACP,4BAA0C;AAOnC,SAAS,oCACd,QAC0B;AAb5B;AAcE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,SAAS;AACZ,oBAAM;AAAA,gBACJ,KAAK,iBAAiB,MAClB;AAAA,kBACE,UAAU;AAAA,oBACR,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,SAAS,KAAK,MAAM,SAAS;AAAA,kBAC/B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,UAAM,iDAA0B,KAAK,KAAK;AAAA,kBAC5C;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AACX,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU,KAAK;AAAA,oBACf,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe,gBAAgB,gBAAgB;AAAA,cACjD,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC9JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aACd,MAsBA;AA9BF;AA+BE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AClGO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANYO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AAxClC;AAyCI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA3EnD;AA4EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,MACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,MAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,IACR;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,YAAY,aAAa,IAAI,aAAa,IAAI;AAE7D,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B;AAAA,YACA;AAAA,YACA,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,kBAAkB;AAAA,cAClB,gBACE,KAAK,UAAU;AAAA;AAAA,cAGf,KAAK,4BACD,iCAAiC,KAAK,MAAM,IAC5C;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OAAO;AAAA,cACL,sBAAsB;AAAA,gBACpB;AAAA,kBACE,MAAM,KAAK,KAAK;AAAA,kBAChB,cAAa,UAAK,KAAK,gBAAV,YAAyB;AAAA,kBACtC,YAAY;AAAA,oBACV,KAAK,KAAK;AAAA,kBACZ;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,YACrD,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AAAA,EACzE;AAAA,EAEA,MAAM,WACJ,SAC6D;AAjMjE;AAkMI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,OAAO,UAAU,QAAQ;AAAA,MACzB,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,iBAAiB,UAAU,QAAQ,KAAK;AAAA,MAC9C;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAlRvC;AAmRY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAAA,YACH;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,MAAM,OAAO,UAAQ,UAAU,IAAI;AAIrD,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE;AAAA,IACP,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAID,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS;AAAA,MACT,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,SAAS;AACd,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,QAAQ;AACb,CAAC;;;AOhcD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;ARHM,SAAS,yBACd,UAA8C,CAAC,GACnB;AApG9B;AAqGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AArHJ,QAAAC;AAsHI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","generateId","import_provider","import_provider_utils","import_zod","_a"]}
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-error.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-generative-ai-embedding-model.ts"],"sourcesContent":["export { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV1 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n resolve,\n Resolvable,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n generateId: () => string;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n };\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const { tools, toolConfig, toolWarnings } = prepareTools(\n mode,\n this.settings.useSearchGrounding ?? false,\n );\n\n return {\n args: {\n generationConfig,\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools,\n toolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n responseMimeType: 'application/json',\n responseSchema:\n mode.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(mode.schema)\n : undefined,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n generationConfig,\n contents,\n tools: {\n functionDeclarations: [\n {\n name: mode.tool.name,\n description: mode.tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(\n mode.tool.parameters,\n ),\n },\n ],\n },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n supportsUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content?.parts ?? [],\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(candidate.content?.parts ?? []),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .optional(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) result.minLength = minLength;\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'image': {\n parts.push(\n part.image instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n fileUri: part.image.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n },\n );\n\n break;\n }\n\n case 'file': {\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType,\n data: part.data,\n },\n },\n );\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = part;\n throw new UnsupportedFunctionalityError({\n functionality: `prompt part: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n useSearchGrounding: boolean,\n): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | { googleSearchRetrieval: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (useSearchGrounding) {\n return {\n tools: { googleSearchRetrieval: {} },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n default:\n return 'unknown';\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAKO;;;ACCP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACXX,SAAS,iCACd,YACS;AAET,MAAI,oBAAoB,UAAU,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EACF,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc;AAAW,WAAO,YAAY;AAEhD,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChGA,sBAGO;AACP,4BAA0C;AAOnC,SAAS,oCACd,QAC0B;AAb5B;AAcE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,SAAS;AACZ,oBAAM;AAAA,gBACJ,KAAK,iBAAiB,MAClB;AAAA,kBACE,UAAU;AAAA,oBACR,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,SAAS,KAAK,MAAM,SAAS;AAAA,kBAC/B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,UAAM,iDAA0B,KAAK,KAAK;AAAA,kBAC5C;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AACX,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU,KAAK;AAAA,oBACf,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe,gBAAgB,gBAAgB;AAAA,cACjD,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC9JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aACd,MAGA,oBAqBA;AAhCF;AAiCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,uBAAuB,CAAC,EAAE;AAAA,MACnC,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC5GO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANcO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AA1ClC;AA2CI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7EnD;AA8EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,MACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,MAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,IACR;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,YAAY,aAAa,IAAI;AAAA,UAC1C;AAAA,WACA,UAAK,SAAS,uBAAd,YAAoC;AAAA,QACtC;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B;AAAA,YACA;AAAA,YACA,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,kBAAkB;AAAA,cAClB,gBACE,KAAK,UAAU;AAAA;AAAA,cAGf,KAAK,4BACD,iCAAiC,KAAK,MAAM,IAC5C;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OAAO;AAAA,cACL,sBAAsB;AAAA,gBACpB;AAAA,kBACE,MAAM,KAAK,KAAK;AAAA,kBAChB,cAAa,UAAK,KAAK,gBAAV,YAAyB;AAAA,kBACtC,YAAY;AAAA,oBACV,KAAK,KAAK;AAAA,kBACZ;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,YACrD,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AAAA,EACzE;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtMjE;AAuMI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,QAAO,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC;AAAA,MACpC,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,kBAAiB,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC,CAAC;AAAA,MACrD;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA/RvC;AAgSY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAAA,YACH;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,MAAM,OAAO,UAAQ,UAAU,IAAI;AAIrD,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE;AAAA,IACP,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAID,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,SAAS;AACd,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,QAAQ;AACb,CAAC;;;AO7cD,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAcX,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;ARNM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAvG9B;AAwGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AAxHJ,QAAAC;AAyHI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","generateId","import_provider","import_provider_utils","import_zod","_a"]}
|
package/dist/index.mjs
CHANGED
@@ -10,7 +10,8 @@ import {
|
|
10
10
|
combineHeaders,
|
11
11
|
createEventSourceResponseHandler,
|
12
12
|
createJsonResponseHandler,
|
13
|
-
postJsonToApi
|
13
|
+
postJsonToApi,
|
14
|
+
resolve
|
14
15
|
} from "@ai-sdk/provider-utils";
|
15
16
|
import { z as z2 } from "zod";
|
16
17
|
|
@@ -237,10 +238,17 @@ var googleFailedResponseHandler = createJsonErrorResponseHandler({
|
|
237
238
|
import {
|
238
239
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
239
240
|
} from "@ai-sdk/provider";
|
240
|
-
function prepareTools(mode) {
|
241
|
+
function prepareTools(mode, useSearchGrounding) {
|
241
242
|
var _a, _b;
|
242
243
|
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
243
244
|
const toolWarnings = [];
|
245
|
+
if (useSearchGrounding) {
|
246
|
+
return {
|
247
|
+
tools: { googleSearchRetrieval: {} },
|
248
|
+
toolConfig: void 0,
|
249
|
+
toolWarnings
|
250
|
+
};
|
251
|
+
}
|
244
252
|
if (tools == null) {
|
245
253
|
return { tools: void 0, toolConfig: void 0, toolWarnings };
|
246
254
|
}
|
@@ -355,7 +363,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
355
363
|
responseFormat,
|
356
364
|
seed
|
357
365
|
}) {
|
358
|
-
var _a;
|
366
|
+
var _a, _b;
|
359
367
|
const type = mode.type;
|
360
368
|
const warnings = [];
|
361
369
|
if (seed != null) {
|
@@ -382,7 +390,10 @@ var GoogleGenerativeAILanguageModel = class {
|
|
382
390
|
const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
|
383
391
|
switch (type) {
|
384
392
|
case "regular": {
|
385
|
-
const { tools, toolConfig, toolWarnings } = prepareTools(
|
393
|
+
const { tools, toolConfig, toolWarnings } = prepareTools(
|
394
|
+
mode,
|
395
|
+
(_a = this.settings.useSearchGrounding) != null ? _a : false
|
396
|
+
);
|
386
397
|
return {
|
387
398
|
args: {
|
388
399
|
generationConfig,
|
@@ -423,7 +434,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
423
434
|
functionDeclarations: [
|
424
435
|
{
|
425
436
|
name: mode.tool.name,
|
426
|
-
description: (
|
437
|
+
description: (_b = mode.tool.description) != null ? _b : "",
|
427
438
|
parameters: convertJSONSchemaToOpenAPISchema(
|
428
439
|
mode.tool.parameters
|
429
440
|
)
|
@@ -447,14 +458,18 @@ var GoogleGenerativeAILanguageModel = class {
|
|
447
458
|
return url.toString().startsWith("https://generativelanguage.googleapis.com/v1beta/files/");
|
448
459
|
}
|
449
460
|
async doGenerate(options) {
|
450
|
-
var _a, _b;
|
461
|
+
var _a, _b, _c, _d, _e, _f;
|
451
462
|
const { args, warnings } = await this.getArgs(options);
|
452
463
|
const body = JSON.stringify(args);
|
464
|
+
const mergedHeaders = combineHeaders(
|
465
|
+
await resolve(this.config.headers),
|
466
|
+
options.headers
|
467
|
+
);
|
453
468
|
const { responseHeaders, value: response } = await postJsonToApi({
|
454
469
|
url: `${this.config.baseURL}/${getModelPath(
|
455
470
|
this.modelId
|
456
471
|
)}:generateContent`,
|
457
|
-
headers:
|
472
|
+
headers: mergedHeaders,
|
458
473
|
body: args,
|
459
474
|
failedResponseHandler: googleFailedResponseHandler,
|
460
475
|
successfulResponseHandler: createJsonResponseHandler(responseSchema),
|
@@ -464,20 +479,20 @@ var GoogleGenerativeAILanguageModel = class {
|
|
464
479
|
const { contents: rawPrompt, ...rawSettings } = args;
|
465
480
|
const candidate = response.candidates[0];
|
466
481
|
const toolCalls = getToolCallsFromParts({
|
467
|
-
parts: candidate.content.parts,
|
482
|
+
parts: (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [],
|
468
483
|
generateId: this.config.generateId
|
469
484
|
});
|
470
485
|
const usageMetadata = response.usageMetadata;
|
471
486
|
return {
|
472
|
-
text: getTextFromParts(candidate.content.parts),
|
487
|
+
text: getTextFromParts((_d = (_c = candidate.content) == null ? void 0 : _c.parts) != null ? _d : []),
|
473
488
|
toolCalls,
|
474
489
|
finishReason: mapGoogleGenerativeAIFinishReason({
|
475
490
|
finishReason: candidate.finishReason,
|
476
491
|
hasToolCalls: toolCalls != null && toolCalls.length > 0
|
477
492
|
}),
|
478
493
|
usage: {
|
479
|
-
promptTokens: (
|
480
|
-
completionTokens: (
|
494
|
+
promptTokens: (_e = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _e : NaN,
|
495
|
+
completionTokens: (_f = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _f : NaN
|
481
496
|
},
|
482
497
|
rawCall: { rawPrompt, rawSettings },
|
483
498
|
rawResponse: { headers: responseHeaders },
|
@@ -488,11 +503,15 @@ var GoogleGenerativeAILanguageModel = class {
|
|
488
503
|
async doStream(options) {
|
489
504
|
const { args, warnings } = await this.getArgs(options);
|
490
505
|
const body = JSON.stringify(args);
|
506
|
+
const headers = combineHeaders(
|
507
|
+
await resolve(this.config.headers),
|
508
|
+
options.headers
|
509
|
+
);
|
491
510
|
const { responseHeaders, value: response } = await postJsonToApi({
|
492
511
|
url: `${this.config.baseURL}/${getModelPath(
|
493
512
|
this.modelId
|
494
513
|
)}:streamGenerateContent?alt=sse`,
|
495
|
-
headers
|
514
|
+
headers,
|
496
515
|
body: args,
|
497
516
|
failedResponseHandler: googleFailedResponseHandler,
|
498
517
|
successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),
|
@@ -618,7 +637,7 @@ var contentSchema = z2.object({
|
|
618
637
|
var responseSchema = z2.object({
|
619
638
|
candidates: z2.array(
|
620
639
|
z2.object({
|
621
|
-
content: contentSchema,
|
640
|
+
content: contentSchema.optional(),
|
622
641
|
finishReason: z2.string().optional()
|
623
642
|
})
|
624
643
|
),
|
@@ -649,7 +668,8 @@ import {
|
|
649
668
|
import {
|
650
669
|
combineHeaders as combineHeaders2,
|
651
670
|
createJsonResponseHandler as createJsonResponseHandler2,
|
652
|
-
postJsonToApi as postJsonToApi2
|
671
|
+
postJsonToApi as postJsonToApi2,
|
672
|
+
resolve as resolve2
|
653
673
|
} from "@ai-sdk/provider-utils";
|
654
674
|
import { z as z3 } from "zod";
|
655
675
|
var GoogleGenerativeAIEmbeddingModel = class {
|
@@ -681,9 +701,13 @@ var GoogleGenerativeAIEmbeddingModel = class {
|
|
681
701
|
values
|
682
702
|
});
|
683
703
|
}
|
704
|
+
const mergedHeaders = combineHeaders2(
|
705
|
+
await resolve2(this.config.headers),
|
706
|
+
headers
|
707
|
+
);
|
684
708
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
685
709
|
url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
|
686
|
-
headers:
|
710
|
+
headers: mergedHeaders,
|
687
711
|
body: {
|
688
712
|
requests: values.map((value) => ({
|
689
713
|
model: `models/${this.modelId}`,
|