@ai-sdk/google 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.d.mts +5 -2
- package/dist/index.d.ts +5 -2
- package/dist/index.js +40 -17
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +44 -19
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +61 -0
- package/internal/dist/index.d.ts +61 -0
- package/internal/dist/index.js +676 -0
- package/internal/dist/index.js.map +1 -0
- package/internal/dist/index.mjs +659 -0
- package/internal/dist/index.mjs.map +1 -0
- package/package.json +10 -3
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,19 @@
|
|
1
1
|
# @ai-sdk/google
|
2
2
|
|
3
|
+
## 1.0.5
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- 0984f0b: feat (provider/google-vertex): Rewrite for Edge runtime support.
|
8
|
+
- Updated dependencies [0984f0b]
|
9
|
+
- @ai-sdk/provider-utils@2.0.3
|
10
|
+
|
11
|
+
## 1.0.4
|
12
|
+
|
13
|
+
### Patch Changes
|
14
|
+
|
15
|
+
- 6373c60: fix (provider/google): send json schema into provider
|
16
|
+
|
3
17
|
## 1.0.3
|
4
18
|
|
5
19
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
@@ -22,7 +22,7 @@ interface GoogleGenerativeAISettings {
|
|
22
22
|
Optional. A list of unique safety settings for blocking unsafe content.
|
23
23
|
*/
|
24
24
|
safetySettings?: Array<{
|
25
|
-
category: 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT';
|
25
|
+
category: 'HARM_CATEGORY_UNSPECIFIED' | 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT' | 'HARM_CATEGORY_CIVIC_INTEGRITY';
|
26
26
|
threshold: 'HARM_BLOCK_THRESHOLD_UNSPECIFIED' | 'BLOCK_LOW_AND_ABOVE' | 'BLOCK_MEDIUM_AND_ABOVE' | 'BLOCK_ONLY_HIGH' | 'BLOCK_NONE';
|
27
27
|
}>;
|
28
28
|
}
|
@@ -68,12 +68,15 @@ interface GoogleGenerativeAIProviderSettings {
|
|
68
68
|
/**
|
69
69
|
Custom headers to include in the requests.
|
70
70
|
*/
|
71
|
-
headers?: Record<string, string>;
|
71
|
+
headers?: Record<string, string | undefined>;
|
72
72
|
/**
|
73
73
|
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
74
74
|
or to provide a custom fetch implementation for e.g. testing.
|
75
75
|
*/
|
76
76
|
fetch?: FetchFunction;
|
77
|
+
/**
|
78
|
+
Optional function to generate a unique ID for each request.
|
79
|
+
*/
|
77
80
|
generateId?: () => string;
|
78
81
|
}
|
79
82
|
/**
|
package/dist/index.d.ts
CHANGED
@@ -22,7 +22,7 @@ interface GoogleGenerativeAISettings {
|
|
22
22
|
Optional. A list of unique safety settings for blocking unsafe content.
|
23
23
|
*/
|
24
24
|
safetySettings?: Array<{
|
25
|
-
category: 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT';
|
25
|
+
category: 'HARM_CATEGORY_UNSPECIFIED' | 'HARM_CATEGORY_HATE_SPEECH' | 'HARM_CATEGORY_DANGEROUS_CONTENT' | 'HARM_CATEGORY_HARASSMENT' | 'HARM_CATEGORY_SEXUALLY_EXPLICIT' | 'HARM_CATEGORY_CIVIC_INTEGRITY';
|
26
26
|
threshold: 'HARM_BLOCK_THRESHOLD_UNSPECIFIED' | 'BLOCK_LOW_AND_ABOVE' | 'BLOCK_MEDIUM_AND_ABOVE' | 'BLOCK_ONLY_HIGH' | 'BLOCK_NONE';
|
27
27
|
}>;
|
28
28
|
}
|
@@ -68,12 +68,15 @@ interface GoogleGenerativeAIProviderSettings {
|
|
68
68
|
/**
|
69
69
|
Custom headers to include in the requests.
|
70
70
|
*/
|
71
|
-
headers?: Record<string, string>;
|
71
|
+
headers?: Record<string, string | undefined>;
|
72
72
|
/**
|
73
73
|
Custom fetch implementation. You can use it as a middleware to intercept requests,
|
74
74
|
or to provide a custom fetch implementation for e.g. testing.
|
75
75
|
*/
|
76
76
|
fetch?: FetchFunction;
|
77
|
+
/**
|
78
|
+
Optional function to generate a unique ID for each request.
|
79
|
+
*/
|
77
80
|
generateId?: () => string;
|
78
81
|
}
|
79
82
|
/**
|
package/dist/index.js
CHANGED
@@ -251,10 +251,17 @@ var googleFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResp
|
|
251
251
|
|
252
252
|
// src/google-prepare-tools.ts
|
253
253
|
var import_provider2 = require("@ai-sdk/provider");
|
254
|
-
function prepareTools(mode) {
|
254
|
+
function prepareTools(mode, useSearchGrounding) {
|
255
255
|
var _a, _b;
|
256
256
|
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
257
257
|
const toolWarnings = [];
|
258
|
+
if (useSearchGrounding) {
|
259
|
+
return {
|
260
|
+
tools: { googleSearchRetrieval: {} },
|
261
|
+
toolConfig: void 0,
|
262
|
+
toolWarnings
|
263
|
+
};
|
264
|
+
}
|
258
265
|
if (tools == null) {
|
259
266
|
return { tools: void 0, toolConfig: void 0, toolWarnings };
|
260
267
|
}
|
@@ -349,8 +356,9 @@ var GoogleGenerativeAILanguageModel = class {
|
|
349
356
|
this.settings = settings;
|
350
357
|
this.config = config;
|
351
358
|
}
|
352
|
-
get
|
353
|
-
|
359
|
+
get supportsStructuredOutputs() {
|
360
|
+
var _a;
|
361
|
+
return (_a = this.settings.structuredOutputs) != null ? _a : true;
|
354
362
|
}
|
355
363
|
get provider() {
|
356
364
|
return this.config.provider;
|
@@ -368,7 +376,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
368
376
|
responseFormat,
|
369
377
|
seed
|
370
378
|
}) {
|
371
|
-
var _a;
|
379
|
+
var _a, _b;
|
372
380
|
const type = mode.type;
|
373
381
|
const warnings = [];
|
374
382
|
if (seed != null) {
|
@@ -390,12 +398,15 @@ var GoogleGenerativeAILanguageModel = class {
|
|
390
398
|
responseMimeType: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? "application/json" : void 0,
|
391
399
|
responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
|
392
400
|
// so this is needed as an escape hatch:
|
393
|
-
this.
|
401
|
+
this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0
|
394
402
|
};
|
395
403
|
const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
|
396
404
|
switch (type) {
|
397
405
|
case "regular": {
|
398
|
-
const { tools, toolConfig, toolWarnings } = prepareTools(
|
406
|
+
const { tools, toolConfig, toolWarnings } = prepareTools(
|
407
|
+
mode,
|
408
|
+
(_a = this.settings.useSearchGrounding) != null ? _a : false
|
409
|
+
);
|
399
410
|
return {
|
400
411
|
args: {
|
401
412
|
generationConfig,
|
@@ -417,7 +428,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
417
428
|
responseMimeType: "application/json",
|
418
429
|
responseSchema: mode.schema != null && // Google GenAI does not support all OpenAPI Schema features,
|
419
430
|
// so this is needed as an escape hatch:
|
420
|
-
this.
|
431
|
+
this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(mode.schema) : void 0
|
421
432
|
},
|
422
433
|
contents,
|
423
434
|
systemInstruction,
|
@@ -436,7 +447,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
436
447
|
functionDeclarations: [
|
437
448
|
{
|
438
449
|
name: mode.tool.name,
|
439
|
-
description: (
|
450
|
+
description: (_b = mode.tool.description) != null ? _b : "",
|
440
451
|
parameters: convertJSONSchemaToOpenAPISchema(
|
441
452
|
mode.tool.parameters
|
442
453
|
)
|
@@ -460,14 +471,18 @@ var GoogleGenerativeAILanguageModel = class {
|
|
460
471
|
return url.toString().startsWith("https://generativelanguage.googleapis.com/v1beta/files/");
|
461
472
|
}
|
462
473
|
async doGenerate(options) {
|
463
|
-
var _a, _b;
|
474
|
+
var _a, _b, _c, _d, _e, _f;
|
464
475
|
const { args, warnings } = await this.getArgs(options);
|
465
476
|
const body = JSON.stringify(args);
|
477
|
+
const mergedHeaders = (0, import_provider_utils3.combineHeaders)(
|
478
|
+
await (0, import_provider_utils3.resolve)(this.config.headers),
|
479
|
+
options.headers
|
480
|
+
);
|
466
481
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
467
482
|
url: `${this.config.baseURL}/${getModelPath(
|
468
483
|
this.modelId
|
469
484
|
)}:generateContent`,
|
470
|
-
headers:
|
485
|
+
headers: mergedHeaders,
|
471
486
|
body: args,
|
472
487
|
failedResponseHandler: googleFailedResponseHandler,
|
473
488
|
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(responseSchema),
|
@@ -477,20 +492,20 @@ var GoogleGenerativeAILanguageModel = class {
|
|
477
492
|
const { contents: rawPrompt, ...rawSettings } = args;
|
478
493
|
const candidate = response.candidates[0];
|
479
494
|
const toolCalls = getToolCallsFromParts({
|
480
|
-
parts: candidate.content.parts,
|
495
|
+
parts: (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [],
|
481
496
|
generateId: this.config.generateId
|
482
497
|
});
|
483
498
|
const usageMetadata = response.usageMetadata;
|
484
499
|
return {
|
485
|
-
text: getTextFromParts(candidate.content.parts),
|
500
|
+
text: getTextFromParts((_d = (_c = candidate.content) == null ? void 0 : _c.parts) != null ? _d : []),
|
486
501
|
toolCalls,
|
487
502
|
finishReason: mapGoogleGenerativeAIFinishReason({
|
488
503
|
finishReason: candidate.finishReason,
|
489
504
|
hasToolCalls: toolCalls != null && toolCalls.length > 0
|
490
505
|
}),
|
491
506
|
usage: {
|
492
|
-
promptTokens: (
|
493
|
-
completionTokens: (
|
507
|
+
promptTokens: (_e = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _e : NaN,
|
508
|
+
completionTokens: (_f = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _f : NaN
|
494
509
|
},
|
495
510
|
rawCall: { rawPrompt, rawSettings },
|
496
511
|
rawResponse: { headers: responseHeaders },
|
@@ -501,11 +516,15 @@ var GoogleGenerativeAILanguageModel = class {
|
|
501
516
|
async doStream(options) {
|
502
517
|
const { args, warnings } = await this.getArgs(options);
|
503
518
|
const body = JSON.stringify(args);
|
519
|
+
const headers = (0, import_provider_utils3.combineHeaders)(
|
520
|
+
await (0, import_provider_utils3.resolve)(this.config.headers),
|
521
|
+
options.headers
|
522
|
+
);
|
504
523
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
505
524
|
url: `${this.config.baseURL}/${getModelPath(
|
506
525
|
this.modelId
|
507
526
|
)}:streamGenerateContent?alt=sse`,
|
508
|
-
headers
|
527
|
+
headers,
|
509
528
|
body: args,
|
510
529
|
failedResponseHandler: googleFailedResponseHandler,
|
511
530
|
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(chunkSchema),
|
@@ -631,7 +650,7 @@ var contentSchema = import_zod2.z.object({
|
|
631
650
|
var responseSchema = import_zod2.z.object({
|
632
651
|
candidates: import_zod2.z.array(
|
633
652
|
import_zod2.z.object({
|
634
|
-
content: contentSchema,
|
653
|
+
content: contentSchema.optional(),
|
635
654
|
finishReason: import_zod2.z.string().optional()
|
636
655
|
})
|
637
656
|
),
|
@@ -688,9 +707,13 @@ var GoogleGenerativeAIEmbeddingModel = class {
|
|
688
707
|
values
|
689
708
|
});
|
690
709
|
}
|
710
|
+
const mergedHeaders = (0, import_provider_utils4.combineHeaders)(
|
711
|
+
await (0, import_provider_utils4.resolve)(this.config.headers),
|
712
|
+
headers
|
713
|
+
);
|
691
714
|
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
692
715
|
url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
|
693
|
-
headers:
|
716
|
+
headers: mergedHeaders,
|
694
717
|
body: {
|
695
718
|
requests: values.map((value) => ({
|
696
719
|
model: `models/${this.modelId}`,
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-error.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-generative-ai-embedding-model.ts"],"sourcesContent":["export { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV1 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsObjectGeneration() {\n return this.settings.structuredOutputs !== false;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: GoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsObjectGeneration\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n };\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const { tools, toolConfig, toolWarnings } = prepareTools(mode);\n\n return {\n args: {\n generationConfig,\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools,\n toolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n responseMimeType: 'application/json',\n responseSchema:\n mode.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsObjectGeneration\n ? convertJSONSchemaToOpenAPISchema(mode.schema)\n : undefined,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n generationConfig,\n contents,\n tools: {\n functionDeclarations: [\n {\n name: mode.tool.name,\n description: mode.tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(\n mode.tool.parameters,\n ),\n },\n ],\n },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n supportsUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content.parts,\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(candidate.content.parts),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema,\n finishReason: z.string().optional(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .optional(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) result.minLength = minLength;\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'image': {\n parts.push(\n part.image instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n fileUri: part.image.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n },\n );\n\n break;\n }\n\n case 'file': {\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType,\n data: part.data,\n },\n },\n );\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = part;\n throw new UnsupportedFunctionalityError({\n functionality: `prompt part: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n default:\n return 'unknown';\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAKO;;;ACCP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACTX,SAAS,iCACd,YACS;AAET,MAAI,oBAAoB,UAAU,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EACF,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc;AAAW,WAAO,YAAY;AAEhD,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChGA,sBAGO;AACP,4BAA0C;AAOnC,SAAS,oCACd,QAC0B;AAb5B;AAcE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,SAAS;AACZ,oBAAM;AAAA,gBACJ,KAAK,iBAAiB,MAClB;AAAA,kBACE,UAAU;AAAA,oBACR,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,SAAS,KAAK,MAAM,SAAS;AAAA,kBAC/B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,UAAM,iDAA0B,KAAK,KAAK;AAAA,kBAC5C;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AACX,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU,KAAK;AAAA,oBACf,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe,gBAAgB,gBAAgB;AAAA,cACjD,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC9JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aACd,MAsBA;AA9BF;AA+BE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AClGO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANYO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,2BAA2B;AAC7B,WAAO,KAAK,SAAS,sBAAsB;AAAA,EAC7C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA3EnD;AA4EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,MACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,MAGzB,KAAK,2BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,IACR;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,YAAY,aAAa,IAAI,aAAa,IAAI;AAE7D,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B;AAAA,YACA;AAAA,YACA,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,kBAAkB;AAAA,cAClB,gBACE,KAAK,UAAU;AAAA;AAAA,cAGf,KAAK,2BACD,iCAAiC,KAAK,MAAM,IAC5C;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OAAO;AAAA,cACL,sBAAsB;AAAA,gBACpB;AAAA,kBACE,MAAM,KAAK,KAAK;AAAA,kBAChB,cAAa,UAAK,KAAK,gBAAV,YAAyB;AAAA,kBACtC,YAAY;AAAA,oBACV,KAAK,KAAK;AAAA,kBACZ;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,YACrD,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AAAA,EACzE;AAAA,EAEA,MAAM,WACJ,SAC6D;AAjMjE;AAkMI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,OAAO,UAAU,QAAQ;AAAA,MACzB,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,iBAAiB,UAAU,QAAQ,KAAK;AAAA,MAC9C;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAlRvC;AAmRY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAAA,YACH;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,MAAM,OAAO,UAAQ,UAAU,IAAI;AAIrD,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE;AAAA,IACP,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAID,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS;AAAA,MACT,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,SAAS;AACd,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,QAAQ;AACb,CAAC;;;AOhcD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;ARHM,SAAS,yBACd,UAA8C,CAAC,GACnB;AApG9B;AAqGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AArHJ,QAAAC;AAsHI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","generateId","import_provider","import_provider_utils","import_zod","_a"]}
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-error.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-generative-ai-embedding-model.ts"],"sourcesContent":["export { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV1 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n resolve,\n Resolvable,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n generateId: () => string;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n };\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const { tools, toolConfig, toolWarnings } = prepareTools(\n mode,\n this.settings.useSearchGrounding ?? false,\n );\n\n return {\n args: {\n generationConfig,\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools,\n toolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n responseMimeType: 'application/json',\n responseSchema:\n mode.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(mode.schema)\n : undefined,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n generationConfig,\n contents,\n tools: {\n functionDeclarations: [\n {\n name: mode.tool.name,\n description: mode.tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(\n mode.tool.parameters,\n ),\n },\n ],\n },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n supportsUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content?.parts ?? [],\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(candidate.content?.parts ?? []),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .optional(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number(),\n })\n .nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) result.minLength = minLength;\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'image': {\n parts.push(\n part.image instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n fileUri: part.image.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n },\n );\n\n break;\n }\n\n case 'file': {\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType,\n data: part.data,\n },\n },\n );\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = part;\n throw new UnsupportedFunctionalityError({\n functionality: `prompt part: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n useSearchGrounding: boolean,\n): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | { googleSearchRetrieval: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (useSearchGrounding) {\n return {\n tools: { googleSearchRetrieval: {} },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n default:\n return 'unknown';\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAKO;;;ACCP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACXX,SAAS,iCACd,YACS;AAET,MAAI,oBAAoB,UAAU,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,EACF,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc;AAAW,WAAO,YAAY;AAEhD,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChGA,sBAGO;AACP,4BAA0C;AAOnC,SAAS,oCACd,QAC0B;AAb5B;AAcE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,SAAS;AACZ,oBAAM;AAAA,gBACJ,KAAK,iBAAiB,MAClB;AAAA,kBACE,UAAU;AAAA,oBACR,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,SAAS,KAAK,MAAM,SAAS;AAAA,kBAC/B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,UAAM,iDAA0B,KAAK,KAAK;AAAA,kBAC5C;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AACX,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU,KAAK;AAAA,oBACf,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe,gBAAgB,gBAAgB;AAAA,cACjD,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC9JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aACd,MAGA,oBAqBA;AAhCF;AAiCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,uBAAuB,CAAC,EAAE;AAAA,MACnC,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC5GO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANcO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AA1ClC;AA2CI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7EnD;AA8EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,MACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,MAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,IACR;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,YAAY,aAAa,IAAI;AAAA,UAC1C;AAAA,WACA,UAAK,SAAS,uBAAd,YAAoC;AAAA,QACtC;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B;AAAA,YACA;AAAA,YACA,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,kBAAkB;AAAA,cAClB,gBACE,KAAK,UAAU;AAAA;AAAA,cAGf,KAAK,4BACD,iCAAiC,KAAK,MAAM,IAC5C;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OAAO;AAAA,cACL,sBAAsB;AAAA,gBACpB;AAAA,kBACE,MAAM,KAAK,KAAK;AAAA,kBAChB,cAAa,UAAK,KAAK,gBAAV,YAAyB;AAAA,kBACtC,YAAY;AAAA,oBACV,KAAK,KAAK;AAAA,kBACZ;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,YACrD,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AAAA,EACzE;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtMjE;AAuMI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,QAAO,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC;AAAA,MACpC,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,kBAAiB,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC,CAAC;AAAA,MACrD;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA/RvC;AAgSY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAAA,YACH;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,MAAM,OAAO,UAAQ,UAAU,IAAI;AAIrD,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE;AAAA,IACP,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAID,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,SAAS;AACd,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAc,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO;AAAA,IAC3B,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,QAAQ;AACb,CAAC;;;AO7cD,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAcX,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;ARNM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAvG9B;AAwGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AAxHJ,QAAAC;AAyHI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","generateId","import_provider","import_provider_utils","import_zod","_a"]}
|
package/dist/index.mjs
CHANGED
@@ -10,7 +10,8 @@ import {
|
|
10
10
|
combineHeaders,
|
11
11
|
createEventSourceResponseHandler,
|
12
12
|
createJsonResponseHandler,
|
13
|
-
postJsonToApi
|
13
|
+
postJsonToApi,
|
14
|
+
resolve
|
14
15
|
} from "@ai-sdk/provider-utils";
|
15
16
|
import { z as z2 } from "zod";
|
16
17
|
|
@@ -237,10 +238,17 @@ var googleFailedResponseHandler = createJsonErrorResponseHandler({
|
|
237
238
|
import {
|
238
239
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
239
240
|
} from "@ai-sdk/provider";
|
240
|
-
function prepareTools(mode) {
|
241
|
+
function prepareTools(mode, useSearchGrounding) {
|
241
242
|
var _a, _b;
|
242
243
|
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
243
244
|
const toolWarnings = [];
|
245
|
+
if (useSearchGrounding) {
|
246
|
+
return {
|
247
|
+
tools: { googleSearchRetrieval: {} },
|
248
|
+
toolConfig: void 0,
|
249
|
+
toolWarnings
|
250
|
+
};
|
251
|
+
}
|
244
252
|
if (tools == null) {
|
245
253
|
return { tools: void 0, toolConfig: void 0, toolWarnings };
|
246
254
|
}
|
@@ -335,8 +343,9 @@ var GoogleGenerativeAILanguageModel = class {
|
|
335
343
|
this.settings = settings;
|
336
344
|
this.config = config;
|
337
345
|
}
|
338
|
-
get
|
339
|
-
|
346
|
+
get supportsStructuredOutputs() {
|
347
|
+
var _a;
|
348
|
+
return (_a = this.settings.structuredOutputs) != null ? _a : true;
|
340
349
|
}
|
341
350
|
get provider() {
|
342
351
|
return this.config.provider;
|
@@ -354,7 +363,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
354
363
|
responseFormat,
|
355
364
|
seed
|
356
365
|
}) {
|
357
|
-
var _a;
|
366
|
+
var _a, _b;
|
358
367
|
const type = mode.type;
|
359
368
|
const warnings = [];
|
360
369
|
if (seed != null) {
|
@@ -376,12 +385,15 @@ var GoogleGenerativeAILanguageModel = class {
|
|
376
385
|
responseMimeType: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? "application/json" : void 0,
|
377
386
|
responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
|
378
387
|
// so this is needed as an escape hatch:
|
379
|
-
this.
|
388
|
+
this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0
|
380
389
|
};
|
381
390
|
const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
|
382
391
|
switch (type) {
|
383
392
|
case "regular": {
|
384
|
-
const { tools, toolConfig, toolWarnings } = prepareTools(
|
393
|
+
const { tools, toolConfig, toolWarnings } = prepareTools(
|
394
|
+
mode,
|
395
|
+
(_a = this.settings.useSearchGrounding) != null ? _a : false
|
396
|
+
);
|
385
397
|
return {
|
386
398
|
args: {
|
387
399
|
generationConfig,
|
@@ -403,7 +415,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
403
415
|
responseMimeType: "application/json",
|
404
416
|
responseSchema: mode.schema != null && // Google GenAI does not support all OpenAPI Schema features,
|
405
417
|
// so this is needed as an escape hatch:
|
406
|
-
this.
|
418
|
+
this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(mode.schema) : void 0
|
407
419
|
},
|
408
420
|
contents,
|
409
421
|
systemInstruction,
|
@@ -422,7 +434,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
422
434
|
functionDeclarations: [
|
423
435
|
{
|
424
436
|
name: mode.tool.name,
|
425
|
-
description: (
|
437
|
+
description: (_b = mode.tool.description) != null ? _b : "",
|
426
438
|
parameters: convertJSONSchemaToOpenAPISchema(
|
427
439
|
mode.tool.parameters
|
428
440
|
)
|
@@ -446,14 +458,18 @@ var GoogleGenerativeAILanguageModel = class {
|
|
446
458
|
return url.toString().startsWith("https://generativelanguage.googleapis.com/v1beta/files/");
|
447
459
|
}
|
448
460
|
async doGenerate(options) {
|
449
|
-
var _a, _b;
|
461
|
+
var _a, _b, _c, _d, _e, _f;
|
450
462
|
const { args, warnings } = await this.getArgs(options);
|
451
463
|
const body = JSON.stringify(args);
|
464
|
+
const mergedHeaders = combineHeaders(
|
465
|
+
await resolve(this.config.headers),
|
466
|
+
options.headers
|
467
|
+
);
|
452
468
|
const { responseHeaders, value: response } = await postJsonToApi({
|
453
469
|
url: `${this.config.baseURL}/${getModelPath(
|
454
470
|
this.modelId
|
455
471
|
)}:generateContent`,
|
456
|
-
headers:
|
472
|
+
headers: mergedHeaders,
|
457
473
|
body: args,
|
458
474
|
failedResponseHandler: googleFailedResponseHandler,
|
459
475
|
successfulResponseHandler: createJsonResponseHandler(responseSchema),
|
@@ -463,20 +479,20 @@ var GoogleGenerativeAILanguageModel = class {
|
|
463
479
|
const { contents: rawPrompt, ...rawSettings } = args;
|
464
480
|
const candidate = response.candidates[0];
|
465
481
|
const toolCalls = getToolCallsFromParts({
|
466
|
-
parts: candidate.content.parts,
|
482
|
+
parts: (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [],
|
467
483
|
generateId: this.config.generateId
|
468
484
|
});
|
469
485
|
const usageMetadata = response.usageMetadata;
|
470
486
|
return {
|
471
|
-
text: getTextFromParts(candidate.content.parts),
|
487
|
+
text: getTextFromParts((_d = (_c = candidate.content) == null ? void 0 : _c.parts) != null ? _d : []),
|
472
488
|
toolCalls,
|
473
489
|
finishReason: mapGoogleGenerativeAIFinishReason({
|
474
490
|
finishReason: candidate.finishReason,
|
475
491
|
hasToolCalls: toolCalls != null && toolCalls.length > 0
|
476
492
|
}),
|
477
493
|
usage: {
|
478
|
-
promptTokens: (
|
479
|
-
completionTokens: (
|
494
|
+
promptTokens: (_e = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _e : NaN,
|
495
|
+
completionTokens: (_f = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _f : NaN
|
480
496
|
},
|
481
497
|
rawCall: { rawPrompt, rawSettings },
|
482
498
|
rawResponse: { headers: responseHeaders },
|
@@ -487,11 +503,15 @@ var GoogleGenerativeAILanguageModel = class {
|
|
487
503
|
async doStream(options) {
|
488
504
|
const { args, warnings } = await this.getArgs(options);
|
489
505
|
const body = JSON.stringify(args);
|
506
|
+
const headers = combineHeaders(
|
507
|
+
await resolve(this.config.headers),
|
508
|
+
options.headers
|
509
|
+
);
|
490
510
|
const { responseHeaders, value: response } = await postJsonToApi({
|
491
511
|
url: `${this.config.baseURL}/${getModelPath(
|
492
512
|
this.modelId
|
493
513
|
)}:streamGenerateContent?alt=sse`,
|
494
|
-
headers
|
514
|
+
headers,
|
495
515
|
body: args,
|
496
516
|
failedResponseHandler: googleFailedResponseHandler,
|
497
517
|
successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),
|
@@ -617,7 +637,7 @@ var contentSchema = z2.object({
|
|
617
637
|
var responseSchema = z2.object({
|
618
638
|
candidates: z2.array(
|
619
639
|
z2.object({
|
620
|
-
content: contentSchema,
|
640
|
+
content: contentSchema.optional(),
|
621
641
|
finishReason: z2.string().optional()
|
622
642
|
})
|
623
643
|
),
|
@@ -648,7 +668,8 @@ import {
|
|
648
668
|
import {
|
649
669
|
combineHeaders as combineHeaders2,
|
650
670
|
createJsonResponseHandler as createJsonResponseHandler2,
|
651
|
-
postJsonToApi as postJsonToApi2
|
671
|
+
postJsonToApi as postJsonToApi2,
|
672
|
+
resolve as resolve2
|
652
673
|
} from "@ai-sdk/provider-utils";
|
653
674
|
import { z as z3 } from "zod";
|
654
675
|
var GoogleGenerativeAIEmbeddingModel = class {
|
@@ -680,9 +701,13 @@ var GoogleGenerativeAIEmbeddingModel = class {
|
|
680
701
|
values
|
681
702
|
});
|
682
703
|
}
|
704
|
+
const mergedHeaders = combineHeaders2(
|
705
|
+
await resolve2(this.config.headers),
|
706
|
+
headers
|
707
|
+
);
|
683
708
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
684
709
|
url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
|
685
|
-
headers:
|
710
|
+
headers: mergedHeaders,
|
686
711
|
body: {
|
687
712
|
requests: values.map((value) => ({
|
688
713
|
model: `models/${this.modelId}`,
|