@ai-sdk/google 1.1.5 → 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +62 -12
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +62 -12
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +3 -2
- package/internal/dist/index.d.ts +3 -2
- package/internal/dist/index.js +56 -12
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +56 -12
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,17 @@
|
|
1
1
|
# @ai-sdk/google
|
2
2
|
|
3
|
+
## 1.1.7
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- d399f25: feat (provider/google-vertex): support public file urls in messages
|
8
|
+
|
9
|
+
## 1.1.6
|
10
|
+
|
11
|
+
### Patch Changes
|
12
|
+
|
13
|
+
- e012cd8: feat (provider/google): add reasoning support
|
14
|
+
|
3
15
|
## 1.1.5
|
4
16
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
@@ -2,7 +2,7 @@ import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
2
2
|
import { ProviderV1, LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
|
3
3
|
import { z } from 'zod';
|
4
4
|
|
5
|
-
type GoogleGenerativeAIModelId = 'gemini-2.0-flash-exp' | 'gemini-1.5-flash' | 'gemini-1.5-flash-latest' | 'gemini-1.5-flash-001' | 'gemini-1.5-flash-002' | 'gemini-1.5-flash-exp-0827' | 'gemini-1.5-flash-8b' | 'gemini-1.5-flash-8b-latest' | 'gemini-1.5-flash-8b-exp-0924' | 'gemini-1.5-flash-8b-exp-0827' | 'gemini-1.5-pro-latest' | 'gemini-1.5-pro' | 'gemini-1.5-pro-001' | 'gemini-1.5-pro-002' | 'gemini-1.5-pro-exp-0827' | 'gemini-1.0-pro' | (string & {});
|
5
|
+
type GoogleGenerativeAIModelId = 'gemini-2.0-flash-thinking-exp' | 'gemini-2.0-flash-exp' | 'gemini-1.5-flash' | 'gemini-1.5-flash-latest' | 'gemini-1.5-flash-001' | 'gemini-1.5-flash-002' | 'gemini-1.5-flash-exp-0827' | 'gemini-1.5-flash-8b' | 'gemini-1.5-flash-8b-latest' | 'gemini-1.5-flash-8b-exp-0924' | 'gemini-1.5-flash-8b-exp-0827' | 'gemini-1.5-pro-latest' | 'gemini-1.5-pro' | 'gemini-1.5-pro-001' | 'gemini-1.5-pro-002' | 'gemini-1.5-pro-exp-0827' | 'gemini-1.0-pro' | (string & {});
|
6
6
|
interface GoogleGenerativeAISettings {
|
7
7
|
/**
|
8
8
|
Optional.
|
package/dist/index.d.ts
CHANGED
@@ -2,7 +2,7 @@ import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
2
2
|
import { ProviderV1, LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
|
3
3
|
import { z } from 'zod';
|
4
4
|
|
5
|
-
type GoogleGenerativeAIModelId = 'gemini-2.0-flash-exp' | 'gemini-1.5-flash' | 'gemini-1.5-flash-latest' | 'gemini-1.5-flash-001' | 'gemini-1.5-flash-002' | 'gemini-1.5-flash-exp-0827' | 'gemini-1.5-flash-8b' | 'gemini-1.5-flash-8b-latest' | 'gemini-1.5-flash-8b-exp-0924' | 'gemini-1.5-flash-8b-exp-0827' | 'gemini-1.5-pro-latest' | 'gemini-1.5-pro' | 'gemini-1.5-pro-001' | 'gemini-1.5-pro-002' | 'gemini-1.5-pro-exp-0827' | 'gemini-1.0-pro' | (string & {});
|
5
|
+
type GoogleGenerativeAIModelId = 'gemini-2.0-flash-thinking-exp' | 'gemini-2.0-flash-exp' | 'gemini-1.5-flash' | 'gemini-1.5-flash-latest' | 'gemini-1.5-flash-001' | 'gemini-1.5-flash-002' | 'gemini-1.5-flash-exp-0827' | 'gemini-1.5-flash-8b' | 'gemini-1.5-flash-8b-latest' | 'gemini-1.5-flash-8b-exp-0924' | 'gemini-1.5-flash-8b-exp-0827' | 'gemini-1.5-pro-latest' | 'gemini-1.5-pro' | 'gemini-1.5-pro-001' | 'gemini-1.5-pro-002' | 'gemini-1.5-pro-exp-0827' | 'gemini-1.0-pro' | (string & {});
|
6
6
|
interface GoogleGenerativeAISettings {
|
7
7
|
/**
|
8
8
|
Optional.
|
package/dist/index.js
CHANGED
@@ -406,6 +406,10 @@ var GoogleGenerativeAILanguageModel = class {
|
|
406
406
|
this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
|
407
407
|
...this.settings.audioTimestamp && {
|
408
408
|
audioTimestamp: this.settings.audioTimestamp
|
409
|
+
},
|
410
|
+
// reasoning models:
|
411
|
+
...isReasoningModel(this.modelId) && {
|
412
|
+
thinking_config: { include_thoughts: true }
|
409
413
|
}
|
410
414
|
};
|
411
415
|
const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
|
@@ -477,7 +481,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
477
481
|
}
|
478
482
|
}
|
479
483
|
supportsUrl(url) {
|
480
|
-
return
|
484
|
+
return this.config.isSupportedUrl(url);
|
481
485
|
}
|
482
486
|
async doGenerate(options) {
|
483
487
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
@@ -487,10 +491,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
487
491
|
await (0, import_provider_utils3.resolve)(this.config.headers),
|
488
492
|
options.headers
|
489
493
|
);
|
494
|
+
let url = `${this.config.baseURL}/${getModelPath(
|
495
|
+
this.modelId
|
496
|
+
)}:generateContent`;
|
497
|
+
if (isReasoningModel(this.modelId)) {
|
498
|
+
url = url.replace("v1beta", "v1alpha");
|
499
|
+
}
|
490
500
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
491
|
-
url
|
492
|
-
this.modelId
|
493
|
-
)}:generateContent`,
|
501
|
+
url,
|
494
502
|
headers: mergedHeaders,
|
495
503
|
body: args,
|
496
504
|
failedResponseHandler: googleFailedResponseHandler,
|
@@ -506,7 +514,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
506
514
|
});
|
507
515
|
const usageMetadata = response.usageMetadata;
|
508
516
|
return {
|
509
|
-
text: getTextFromParts(
|
517
|
+
text: getTextFromParts({
|
518
|
+
parts: (_c = candidate.content) == null ? void 0 : _c.parts,
|
519
|
+
isThought: false
|
520
|
+
}),
|
521
|
+
reasoning: getTextFromParts({
|
522
|
+
parts: (_d = candidate.content) == null ? void 0 : _d.parts,
|
523
|
+
isThought: true
|
524
|
+
}),
|
510
525
|
toolCalls,
|
511
526
|
finishReason: mapGoogleGenerativeAIFinishReason({
|
512
527
|
finishReason: candidate.finishReason,
|
@@ -535,10 +550,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
535
550
|
await (0, import_provider_utils3.resolve)(this.config.headers),
|
536
551
|
options.headers
|
537
552
|
);
|
553
|
+
let url = `${this.config.baseURL}/${getModelPath(
|
554
|
+
this.modelId
|
555
|
+
)}:streamGenerateContent?alt=sse`;
|
556
|
+
if (isReasoningModel(this.modelId)) {
|
557
|
+
url = url.replace("v1beta", "v1alpha");
|
558
|
+
}
|
538
559
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
539
|
-
url
|
540
|
-
this.modelId
|
541
|
-
)}:streamGenerateContent?alt=sse`,
|
560
|
+
url,
|
542
561
|
headers,
|
543
562
|
body: args,
|
544
563
|
failedResponseHandler: googleFailedResponseHandler,
|
@@ -592,13 +611,26 @@ var GoogleGenerativeAILanguageModel = class {
|
|
592
611
|
if (content == null) {
|
593
612
|
return;
|
594
613
|
}
|
595
|
-
const deltaText = getTextFromParts(
|
614
|
+
const deltaText = getTextFromParts({
|
615
|
+
parts: content.parts,
|
616
|
+
isThought: false
|
617
|
+
});
|
596
618
|
if (deltaText != null) {
|
597
619
|
controller.enqueue({
|
598
620
|
type: "text-delta",
|
599
621
|
textDelta: deltaText
|
600
622
|
});
|
601
623
|
}
|
624
|
+
const reasoningText = getTextFromParts({
|
625
|
+
parts: content.parts,
|
626
|
+
isThought: true
|
627
|
+
});
|
628
|
+
if (reasoningText != null) {
|
629
|
+
controller.enqueue({
|
630
|
+
type: "reasoning",
|
631
|
+
textDelta: reasoningText
|
632
|
+
});
|
633
|
+
}
|
602
634
|
const toolCallDeltas = getToolCallsFromParts({
|
603
635
|
parts: content.parts,
|
604
636
|
generateId: generateId2
|
@@ -654,8 +686,16 @@ function getToolCallsFromParts({
|
|
654
686
|
args: JSON.stringify(part.functionCall.args)
|
655
687
|
}));
|
656
688
|
}
|
657
|
-
function getTextFromParts(
|
658
|
-
|
689
|
+
function getTextFromParts({
|
690
|
+
parts,
|
691
|
+
isThought
|
692
|
+
}) {
|
693
|
+
const textParts = (parts != null ? parts : []).filter(
|
694
|
+
(part) => {
|
695
|
+
var _a;
|
696
|
+
return "text" in part && ((_a = part.thought) != null ? _a : false) === isThought;
|
697
|
+
}
|
698
|
+
);
|
659
699
|
return textParts.length === 0 ? void 0 : textParts.map((part) => part.text).join("");
|
660
700
|
}
|
661
701
|
var contentSchema = import_zod2.z.object({
|
@@ -663,7 +703,8 @@ var contentSchema = import_zod2.z.object({
|
|
663
703
|
parts: import_zod2.z.array(
|
664
704
|
import_zod2.z.union([
|
665
705
|
import_zod2.z.object({
|
666
|
-
text: import_zod2.z.string()
|
706
|
+
text: import_zod2.z.string(),
|
707
|
+
thought: import_zod2.z.boolean().nullish()
|
667
708
|
}),
|
668
709
|
import_zod2.z.object({
|
669
710
|
functionCall: import_zod2.z.object({
|
@@ -751,6 +792,9 @@ var chunkSchema = import_zod2.z.object({
|
|
751
792
|
totalTokenCount: import_zod2.z.number().nullish()
|
752
793
|
}).nullish()
|
753
794
|
});
|
795
|
+
function isReasoningModel(modelId) {
|
796
|
+
return modelId === "gemini-2.0-flash-thinking-exp";
|
797
|
+
}
|
754
798
|
|
755
799
|
// src/google-generative-ai-embedding-model.ts
|
756
800
|
var import_provider3 = require("@ai-sdk/provider");
|
@@ -817,6 +861,11 @@ var googleGenerativeAITextEmbeddingResponseSchema = import_zod3.z.object({
|
|
817
861
|
embeddings: import_zod3.z.array(import_zod3.z.object({ values: import_zod3.z.array(import_zod3.z.number()) }))
|
818
862
|
});
|
819
863
|
|
864
|
+
// src/google-supported-file-url.ts
|
865
|
+
function isSupportedFileUrl(url) {
|
866
|
+
return url.toString().startsWith("https://generativelanguage.googleapis.com/v1beta/files/");
|
867
|
+
}
|
868
|
+
|
820
869
|
// src/google-provider.ts
|
821
870
|
function createGoogleGenerativeAI(options = {}) {
|
822
871
|
var _a;
|
@@ -836,6 +885,7 @@ function createGoogleGenerativeAI(options = {}) {
|
|
836
885
|
baseURL,
|
837
886
|
headers: getHeaders,
|
838
887
|
generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils5.generateId,
|
888
|
+
isSupportedUrl: isSupportedFileUrl,
|
839
889
|
fetch: options.fetch
|
840
890
|
});
|
841
891
|
};
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-error.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-generative-ai-embedding-model.ts"],"sourcesContent":["export { createGoogleGenerativeAI, google } from './google-provider';\nexport type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV1 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n LanguageModelV1ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n generateId: () => string;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(this.settings.audioTimestamp && {\n audioTimestamp: this.settings.audioTimestamp,\n }),\n };\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const { tools, toolConfig, toolWarnings } = prepareTools(\n mode,\n this.settings.useSearchGrounding ?? false,\n this.modelId.includes('gemini-2'),\n );\n\n return {\n args: {\n generationConfig,\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools,\n toolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n responseMimeType: 'application/json',\n responseSchema:\n mode.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(mode.schema)\n : undefined,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n generationConfig,\n contents,\n tools: {\n functionDeclarations: [\n {\n name: mode.tool.name,\n description: mode.tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(\n mode.tool.parameters,\n ),\n },\n ],\n },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n supportsUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content?.parts ?? [],\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(candidate.content?.parts ?? []),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined =\n undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z\n .object({\n renderedContent: z.string(),\n })\n .nullish(),\n groundingChunks: z\n .array(\n z.object({\n web: z\n .object({\n uri: z.string(),\n title: z.string(),\n })\n .nullish(),\n retrievedContext: z\n .object({\n uri: z.string(),\n title: z.string(),\n })\n .nullish(),\n }),\n )\n .nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string(),\n probability: z.string(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'image': {\n parts.push(\n part.image instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n fileUri: part.image.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n },\n );\n\n break;\n }\n\n case 'file': {\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType,\n data: part.data,\n },\n },\n );\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = part;\n throw new UnsupportedFunctionalityError({\n functionality: `prompt part: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n useSearchGrounding: boolean,\n isGemini2: boolean,\n): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | { googleSearchRetrieval: Record<string, never> }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2 ? { googleSearch: {} } : { googleSearchRetrieval: {} },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n default:\n return 'unknown';\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAKO;;;ACEP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACZX,SAAS,iCACd,YACS;AAET,MAAI,oBAAoB,UAAU,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;ACxGA,sBAGO;AACP,4BAA0C;AAOnC,SAAS,oCACd,QAC0B;AAb5B;AAcE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,SAAS;AACZ,oBAAM;AAAA,gBACJ,KAAK,iBAAiB,MAClB;AAAA,kBACE,UAAU;AAAA,oBACR,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,SAAS,KAAK,MAAM,SAAS;AAAA,kBAC/B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,UAAM,iDAA0B,KAAK,KAAK;AAAA,kBAC5C;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AACX,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU,KAAK;AAAA,oBACf,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe,gBAAgB,gBAAgB;AAAA,cACjD,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC9JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aACd,MAGA,oBACA,WAsBA;AAlCF;AAmCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YAAY,EAAE,cAAc,CAAC,EAAE,IAAI,EAAE,uBAAuB,CAAC,EAAE;AAAA,MACtE,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC9GO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANeO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AA3ClC;AA4CI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA9EnD;AA+EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,MACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,MAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,MACN,GAAI,KAAK,SAAS,kBAAkB;AAAA,QAClC,gBAAgB,KAAK,SAAS;AAAA,MAChC;AAAA,IACF;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,YAAY,aAAa,IAAI;AAAA,UAC1C;AAAA,WACA,UAAK,SAAS,uBAAd,YAAoC;AAAA,UACpC,KAAK,QAAQ,SAAS,UAAU;AAAA,QAClC;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B;AAAA,YACA;AAAA,YACA,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,kBAAkB;AAAA,cAClB,gBACE,KAAK,UAAU;AAAA;AAAA,cAGf,KAAK,4BACD,iCAAiC,KAAK,MAAM,IAC5C;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OAAO;AAAA,cACL,sBAAsB;AAAA,gBACpB;AAAA,kBACE,MAAM,KAAK,KAAK;AAAA,kBAChB,cAAa,UAAK,KAAK,gBAAV,YAAyB;AAAA,kBACtC,YAAY;AAAA,oBACV,KAAK,KAAK;AAAA,kBACZ;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,YACrD,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AAAA,EACzE;AAAA,EAEA,MAAM,WACJ,SAC6D;AA3MjE;AA4MI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,QAAO,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC;AAAA,MACpC,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,kBAAiB,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC,CAAC;AAAA,MACrD;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,mBACF;AAEF,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA5SvC;AA6SY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,MAAM,OAAO,UAAQ,UAAU,IAAI;AAIrD,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE;AAAA,IACP,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAIM,IAAM,0BAA0B,cAAE,OAAO;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cACf,OAAO;AAAA,IACN,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,QAAQ;AAAA,EACX,iBAAiB,cACd;AAAA,IACC,cAAE,OAAO;AAAA,MACP,KAAK,cACF,OAAO;AAAA,QACN,KAAK,cAAE,OAAO;AAAA,QACd,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC,EACA,QAAQ;AAAA,MACX,kBAAkB,cACf,OAAO;AAAA,QACN,KAAK,cAAE,OAAO;AAAA,QACd,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC,EACA,QAAQ;AAAA,IACb,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB,MAAM;AAAA,IACL,cAAE,OAAO;AAAA,MACP,0BAA0B,cAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,cAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,cAAE,OAAO;AAAA,EACzC,UAAU,cAAE,OAAO;AAAA,EACnB,aAAa,cAAE,OAAO;AAAA,EACtB,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;;;AOxiBD,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAcX,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;ARNM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAvG9B;AAwGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AAxHJ,QAAAC;AAyHI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","generateId","import_provider","import_provider_utils","import_zod","_a"]}
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-error.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-generative-ai-embedding-model.ts","../src/google-supported-file-url.ts"],"sourcesContent":["export { createGoogleGenerativeAI, google } from './google-provider';\nexport type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport { isSupportedFileUrl } from './google-supported-file-url';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV1 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n isSupportedUrl: isSupportedFileUrl,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n LanguageModelV1ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n isSupportedUrl: (url: URL) => boolean;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(this.settings.audioTimestamp && {\n audioTimestamp: this.settings.audioTimestamp,\n }),\n\n // reasoning models:\n ...(isReasoningModel(this.modelId) && {\n thinking_config: { include_thoughts: true },\n }),\n };\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const { tools, toolConfig, toolWarnings } = prepareTools(\n mode,\n this.settings.useSearchGrounding ?? false,\n this.modelId.includes('gemini-2'),\n );\n\n return {\n args: {\n generationConfig,\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools,\n toolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n responseMimeType: 'application/json',\n responseSchema:\n mode.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(mode.schema)\n : undefined,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n generationConfig,\n contents,\n tools: {\n functionDeclarations: [\n {\n name: mode.tool.name,\n description: mode.tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(\n mode.tool.parameters,\n ),\n },\n ],\n },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n safetySettings: this.settings.safetySettings,\n cachedContent: this.settings.cachedContent,\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n supportsUrl(url: URL): boolean {\n return this.config.isSupportedUrl(url);\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n let url = `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`;\n\n // reasoning models are only available in the alpha version of the API:\n if (isReasoningModel(this.modelId)) {\n url = url.replace('v1beta', 'v1alpha');\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content?.parts ?? [],\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts({\n parts: candidate.content?.parts,\n isThought: false,\n }),\n reasoning: getTextFromParts({\n parts: candidate.content?.parts,\n isThought: true,\n }),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n let url = `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`;\n\n if (isReasoningModel(this.modelId)) {\n url = url.replace('v1beta', 'v1alpha');\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined =\n undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts({\n parts: content.parts,\n isThought: false,\n });\n\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const reasoningText = getTextFromParts({\n parts: content.parts,\n isThought: true,\n });\n\n if (reasoningText != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: reasoningText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts({\n parts,\n isThought,\n}: {\n parts: z.infer<typeof contentSchema>['parts'] | undefined;\n isThought: boolean;\n}) {\n const textParts = (parts ?? []).filter(\n (part): part is GoogleGenerativeAIContentPart & { text: string } =>\n 'text' in part && (part.thought ?? false) === isThought,\n );\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n thought: z.boolean().nullish(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z\n .object({\n renderedContent: z.string(),\n })\n .nullish(),\n groundingChunks: z\n .array(\n z.object({\n web: z\n .object({\n uri: z.string(),\n title: z.string(),\n })\n .nullish(),\n retrievedContext: z\n .object({\n uri: z.string(),\n title: z.string(),\n })\n .nullish(),\n }),\n )\n .nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string(),\n probability: z.string(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\nfunction isReasoningModel(modelId: string) {\n return modelId === 'gemini-2.0-flash-thinking-exp';\n}\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'image': {\n parts.push(\n part.image instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n fileUri: part.image.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n },\n );\n\n break;\n }\n\n case 'file': {\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: part.mimeType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: part.mimeType,\n data: part.data,\n },\n },\n );\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = part;\n throw new UnsupportedFunctionalityError({\n functionality: `prompt part: ${_exhaustiveCheck}`,\n });\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n useSearchGrounding: boolean,\n isGemini2: boolean,\n): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | { googleSearchRetrieval: Record<string, never> }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2 ? { googleSearch: {} } : { googleSearchRetrieval: {} },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n default:\n return 'unknown';\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","export function isSupportedFileUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAKO;;;ACEP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACZX,SAAS,iCACd,YACS;AAET,MAAI,oBAAoB,UAAU,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;ACxGA,sBAGO;AACP,4BAA0C;AAOnC,SAAS,oCACd,QAC0B;AAb5B;AAcE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,SAAS;AACZ,oBAAM;AAAA,gBACJ,KAAK,iBAAiB,MAClB;AAAA,kBACE,UAAU;AAAA,oBACR,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,SAAS,KAAK,MAAM,SAAS;AAAA,kBAC/B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,oBAC3B,UAAM,iDAA0B,KAAK,KAAK;AAAA,kBAC5C;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AACX,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU,KAAK;AAAA,oBACf,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,YAEA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,8CAA8B;AAAA,gBACtC,eAAe,gBAAgB,gBAAgB;AAAA,cACjD,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC9JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aACd,MAGA,oBACA,WAsBA;AAlCF;AAmCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YAAY,EAAE,cAAc,CAAC,EAAE,IAAI,EAAE,uBAAuB,CAAC,EAAE;AAAA,MACtE,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC9GO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANgBO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AA5ClC;AA6CI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA/EnD;AAgFI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,MAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,MACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,MAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,MACN,GAAI,KAAK,SAAS,kBAAkB;AAAA,QAClC,gBAAgB,KAAK,SAAS;AAAA,MAChC;AAAA;AAAA,MAGA,GAAI,iBAAiB,KAAK,OAAO,KAAK;AAAA,QACpC,iBAAiB,EAAE,kBAAkB,KAAK;AAAA,MAC5C;AAAA,IACF;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,YAAY,aAAa,IAAI;AAAA,UAC1C;AAAA,WACA,UAAK,SAAS,uBAAd,YAAoC;AAAA,UACpC,KAAK,QAAQ,SAAS,UAAU;AAAA,QAClC;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B;AAAA,YACA;AAAA,YACA,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,kBAAkB;AAAA,cAClB,gBACE,KAAK,UAAU;AAAA;AAAA,cAGf,KAAK,4BACD,iCAAiC,KAAK,MAAM,IAC5C;AAAA,YACR;AAAA,YACA;AAAA,YACA;AAAA,YACA,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OAAO;AAAA,cACL,sBAAsB;AAAA,gBACpB;AAAA,kBACE,MAAM,KAAK,KAAK;AAAA,kBAChB,cAAa,UAAK,KAAK,gBAAV,YAAyB;AAAA,kBACtC,YAAY;AAAA,oBACV,KAAK,KAAK;AAAA,kBACZ;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,YACrD,gBAAgB,KAAK,SAAS;AAAA,YAC9B,eAAe,KAAK,SAAS;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,KAAK,OAAO,eAAe,GAAG;AAAA,EACvC;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/MjE;AAgNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,QAAI,MAAM,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,MAClC,KAAK;AAAA,IACP,CAAC;AAGD,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAClC,YAAM,IAAI,QAAQ,UAAU,SAAS;AAAA,IACvC;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D;AAAA,MACA,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,QAAO,qBAAU,YAAV,mBAAmB,UAAnB,YAA4B,CAAC;AAAA,MACpC,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,iBAAiB;AAAA,QACrB,QAAO,eAAU,YAAV,mBAAmB;AAAA,QAC1B,WAAW;AAAA,MACb,CAAC;AAAA,MACD,WAAW,iBAAiB;AAAA,QAC1B,QAAO,eAAU,YAAV,mBAAmB;AAAA,QAC1B,WAAW;AAAA,MACb,CAAC;AAAA,MACD;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,QAAI,MAAM,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,MAClC,KAAK;AAAA,IACP,CAAC;AAED,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAClC,YAAM,IAAI,QAAQ,UAAU,SAAS;AAAA,IACvC;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,mBACF;AAEF,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AApUvC;AAqUY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB;AAAA,cACjC,OAAO,QAAQ;AAAA,cACf,WAAW;AAAA,YACb,CAAC;AAED,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,gBAAgB,iBAAiB;AAAA,cACrC,OAAO,QAAQ;AAAA,cACf,WAAW;AAAA,YACb,CAAC;AAED,gBAAI,iBAAiB,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB;AAAA,EACxB;AAAA,EACA;AACF,GAGG;AACD,QAAM,aAAa,wBAAS,CAAC,GAAG;AAAA,IAC9B,CAAC,SAAgE;AAvdrE;AAwdM,uBAAU,UAAS,UAAK,YAAL,YAAgB,WAAW;AAAA;AAAA,EAClD;AAEA,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE;AAAA,IACP,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAIM,IAAM,0BAA0B,cAAE,OAAO;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cACf,OAAO;AAAA,IACN,iBAAiB,cAAE,OAAO;AAAA,EAC5B,CAAC,EACA,QAAQ;AAAA,EACX,iBAAiB,cACd;AAAA,IACC,cAAE,OAAO;AAAA,MACP,KAAK,cACF,OAAO;AAAA,QACN,KAAK,cAAE,OAAO;AAAA,QACd,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC,EACA,QAAQ;AAAA,MACX,kBAAkB,cACf,OAAO;AAAA,QACN,KAAK,cAAE,OAAO;AAAA,QACd,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC,EACA,QAAQ;AAAA,IACb,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB,MAAM;AAAA,IACL,cAAE,OAAO;AAAA,MACP,0BAA0B,cAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,cAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,cAAE,OAAO;AAAA,EACzC,UAAU,cAAE,OAAO;AAAA,EACnB,aAAa,cAAE,OAAO;AAAA,EACtB,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAED,SAAS,iBAAiB,SAAiB;AACzC,SAAO,YAAY;AACrB;;;AO5lBA,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAcX,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AC3GM,SAAS,mBAAmB,KAAmB;AACpD,SAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AACzE;;;ATkGO,SAAS,yBACd,UAA8C,CAAC,GACnB;AAxG9B;AAyGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AAzHJ,QAAAC;AA0HI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,gBAAgB;AAAA,MAChB,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","generateId","import_provider","import_provider_utils","import_zod","_a"]}
|
package/dist/index.mjs
CHANGED
@@ -393,6 +393,10 @@ var GoogleGenerativeAILanguageModel = class {
|
|
393
393
|
this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
|
394
394
|
...this.settings.audioTimestamp && {
|
395
395
|
audioTimestamp: this.settings.audioTimestamp
|
396
|
+
},
|
397
|
+
// reasoning models:
|
398
|
+
...isReasoningModel(this.modelId) && {
|
399
|
+
thinking_config: { include_thoughts: true }
|
396
400
|
}
|
397
401
|
};
|
398
402
|
const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
|
@@ -464,7 +468,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
464
468
|
}
|
465
469
|
}
|
466
470
|
supportsUrl(url) {
|
467
|
-
return
|
471
|
+
return this.config.isSupportedUrl(url);
|
468
472
|
}
|
469
473
|
async doGenerate(options) {
|
470
474
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
@@ -474,10 +478,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
474
478
|
await resolve(this.config.headers),
|
475
479
|
options.headers
|
476
480
|
);
|
481
|
+
let url = `${this.config.baseURL}/${getModelPath(
|
482
|
+
this.modelId
|
483
|
+
)}:generateContent`;
|
484
|
+
if (isReasoningModel(this.modelId)) {
|
485
|
+
url = url.replace("v1beta", "v1alpha");
|
486
|
+
}
|
477
487
|
const { responseHeaders, value: response } = await postJsonToApi({
|
478
|
-
url
|
479
|
-
this.modelId
|
480
|
-
)}:generateContent`,
|
488
|
+
url,
|
481
489
|
headers: mergedHeaders,
|
482
490
|
body: args,
|
483
491
|
failedResponseHandler: googleFailedResponseHandler,
|
@@ -493,7 +501,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
493
501
|
});
|
494
502
|
const usageMetadata = response.usageMetadata;
|
495
503
|
return {
|
496
|
-
text: getTextFromParts(
|
504
|
+
text: getTextFromParts({
|
505
|
+
parts: (_c = candidate.content) == null ? void 0 : _c.parts,
|
506
|
+
isThought: false
|
507
|
+
}),
|
508
|
+
reasoning: getTextFromParts({
|
509
|
+
parts: (_d = candidate.content) == null ? void 0 : _d.parts,
|
510
|
+
isThought: true
|
511
|
+
}),
|
497
512
|
toolCalls,
|
498
513
|
finishReason: mapGoogleGenerativeAIFinishReason({
|
499
514
|
finishReason: candidate.finishReason,
|
@@ -522,10 +537,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
522
537
|
await resolve(this.config.headers),
|
523
538
|
options.headers
|
524
539
|
);
|
540
|
+
let url = `${this.config.baseURL}/${getModelPath(
|
541
|
+
this.modelId
|
542
|
+
)}:streamGenerateContent?alt=sse`;
|
543
|
+
if (isReasoningModel(this.modelId)) {
|
544
|
+
url = url.replace("v1beta", "v1alpha");
|
545
|
+
}
|
525
546
|
const { responseHeaders, value: response } = await postJsonToApi({
|
526
|
-
url
|
527
|
-
this.modelId
|
528
|
-
)}:streamGenerateContent?alt=sse`,
|
547
|
+
url,
|
529
548
|
headers,
|
530
549
|
body: args,
|
531
550
|
failedResponseHandler: googleFailedResponseHandler,
|
@@ -579,13 +598,26 @@ var GoogleGenerativeAILanguageModel = class {
|
|
579
598
|
if (content == null) {
|
580
599
|
return;
|
581
600
|
}
|
582
|
-
const deltaText = getTextFromParts(
|
601
|
+
const deltaText = getTextFromParts({
|
602
|
+
parts: content.parts,
|
603
|
+
isThought: false
|
604
|
+
});
|
583
605
|
if (deltaText != null) {
|
584
606
|
controller.enqueue({
|
585
607
|
type: "text-delta",
|
586
608
|
textDelta: deltaText
|
587
609
|
});
|
588
610
|
}
|
611
|
+
const reasoningText = getTextFromParts({
|
612
|
+
parts: content.parts,
|
613
|
+
isThought: true
|
614
|
+
});
|
615
|
+
if (reasoningText != null) {
|
616
|
+
controller.enqueue({
|
617
|
+
type: "reasoning",
|
618
|
+
textDelta: reasoningText
|
619
|
+
});
|
620
|
+
}
|
589
621
|
const toolCallDeltas = getToolCallsFromParts({
|
590
622
|
parts: content.parts,
|
591
623
|
generateId: generateId2
|
@@ -641,8 +673,16 @@ function getToolCallsFromParts({
|
|
641
673
|
args: JSON.stringify(part.functionCall.args)
|
642
674
|
}));
|
643
675
|
}
|
644
|
-
function getTextFromParts(
|
645
|
-
|
676
|
+
function getTextFromParts({
|
677
|
+
parts,
|
678
|
+
isThought
|
679
|
+
}) {
|
680
|
+
const textParts = (parts != null ? parts : []).filter(
|
681
|
+
(part) => {
|
682
|
+
var _a;
|
683
|
+
return "text" in part && ((_a = part.thought) != null ? _a : false) === isThought;
|
684
|
+
}
|
685
|
+
);
|
646
686
|
return textParts.length === 0 ? void 0 : textParts.map((part) => part.text).join("");
|
647
687
|
}
|
648
688
|
var contentSchema = z2.object({
|
@@ -650,7 +690,8 @@ var contentSchema = z2.object({
|
|
650
690
|
parts: z2.array(
|
651
691
|
z2.union([
|
652
692
|
z2.object({
|
653
|
-
text: z2.string()
|
693
|
+
text: z2.string(),
|
694
|
+
thought: z2.boolean().nullish()
|
654
695
|
}),
|
655
696
|
z2.object({
|
656
697
|
functionCall: z2.object({
|
@@ -738,6 +779,9 @@ var chunkSchema = z2.object({
|
|
738
779
|
totalTokenCount: z2.number().nullish()
|
739
780
|
}).nullish()
|
740
781
|
});
|
782
|
+
function isReasoningModel(modelId) {
|
783
|
+
return modelId === "gemini-2.0-flash-thinking-exp";
|
784
|
+
}
|
741
785
|
|
742
786
|
// src/google-generative-ai-embedding-model.ts
|
743
787
|
import {
|
@@ -811,6 +855,11 @@ var googleGenerativeAITextEmbeddingResponseSchema = z3.object({
|
|
811
855
|
embeddings: z3.array(z3.object({ values: z3.array(z3.number()) }))
|
812
856
|
});
|
813
857
|
|
858
|
+
// src/google-supported-file-url.ts
|
859
|
+
function isSupportedFileUrl(url) {
|
860
|
+
return url.toString().startsWith("https://generativelanguage.googleapis.com/v1beta/files/");
|
861
|
+
}
|
862
|
+
|
814
863
|
// src/google-provider.ts
|
815
864
|
function createGoogleGenerativeAI(options = {}) {
|
816
865
|
var _a;
|
@@ -830,6 +879,7 @@ function createGoogleGenerativeAI(options = {}) {
|
|
830
879
|
baseURL,
|
831
880
|
headers: getHeaders,
|
832
881
|
generateId: (_a2 = options.generateId) != null ? _a2 : generateId,
|
882
|
+
isSupportedUrl: isSupportedFileUrl,
|
833
883
|
fetch: options.fetch
|
834
884
|
});
|
835
885
|
};
|