@ai-sdk/google-vertex 4.0.40 → 4.0.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/dist/anthropic/edge/index.js +1 -1
- package/dist/anthropic/edge/index.mjs +1 -1
- package/dist/edge/index.d.mts +7 -1
- package/dist/edge/index.d.ts +7 -1
- package/dist/edge/index.js +282 -20
- package/dist/edge/index.js.map +1 -1
- package/dist/edge/index.mjs +279 -5
- package/dist/edge/index.mjs.map +1 -1
- package/dist/index.d.mts +22 -2
- package/dist/index.d.ts +22 -2
- package/dist/index.js +276 -14
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +279 -5
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
- package/src/google-vertex-provider.ts +20 -1
- package/src/google-vertex-video-model.ts +374 -0
- package/src/google-vertex-video-settings.ts +16 -0
- package/src/index.ts +2 -0
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// src/google-vertex-provider-node.ts
|
|
2
|
-
import { loadOptionalSetting as loadOptionalSetting2, resolve as
|
|
2
|
+
import { loadOptionalSetting as loadOptionalSetting2, resolve as resolve5 } from "@ai-sdk/provider-utils";
|
|
3
3
|
|
|
4
4
|
// src/google-vertex-auth-google-auth-library.ts
|
|
5
5
|
import { GoogleAuth } from "google-auth-library";
|
|
@@ -29,13 +29,13 @@ import {
|
|
|
29
29
|
loadOptionalSetting,
|
|
30
30
|
loadSetting,
|
|
31
31
|
normalizeHeaders,
|
|
32
|
-
resolve as
|
|
32
|
+
resolve as resolve4,
|
|
33
33
|
withoutTrailingSlash,
|
|
34
34
|
withUserAgentSuffix
|
|
35
35
|
} from "@ai-sdk/provider-utils";
|
|
36
36
|
|
|
37
37
|
// src/version.ts
|
|
38
|
-
var VERSION = true ? "4.0.
|
|
38
|
+
var VERSION = true ? "4.0.41" : "0.0.0-test";
|
|
39
39
|
|
|
40
40
|
// src/google-vertex-embedding-model.ts
|
|
41
41
|
import {
|
|
@@ -435,6 +435,272 @@ var googleVertexTools = {
|
|
|
435
435
|
vertexRagStore: googleTools.vertexRagStore
|
|
436
436
|
};
|
|
437
437
|
|
|
438
|
+
// src/google-vertex-video-model.ts
|
|
439
|
+
import {
|
|
440
|
+
AISDKError
|
|
441
|
+
} from "@ai-sdk/provider";
|
|
442
|
+
import {
|
|
443
|
+
combineHeaders as combineHeaders3,
|
|
444
|
+
convertUint8ArrayToBase64 as convertUint8ArrayToBase642,
|
|
445
|
+
createJsonResponseHandler as createJsonResponseHandler3,
|
|
446
|
+
delay,
|
|
447
|
+
lazySchema,
|
|
448
|
+
parseProviderOptions as parseProviderOptions3,
|
|
449
|
+
postJsonToApi as postJsonToApi3,
|
|
450
|
+
resolve as resolve3,
|
|
451
|
+
zodSchema
|
|
452
|
+
} from "@ai-sdk/provider-utils";
|
|
453
|
+
import { z as z5 } from "zod/v4";
|
|
454
|
+
var GoogleVertexVideoModel = class {
|
|
455
|
+
constructor(modelId, config) {
|
|
456
|
+
this.modelId = modelId;
|
|
457
|
+
this.config = config;
|
|
458
|
+
this.specificationVersion = "v3";
|
|
459
|
+
}
|
|
460
|
+
get provider() {
|
|
461
|
+
return this.config.provider;
|
|
462
|
+
}
|
|
463
|
+
get maxVideosPerCall() {
|
|
464
|
+
return 4;
|
|
465
|
+
}
|
|
466
|
+
async doGenerate(options) {
|
|
467
|
+
var _a, _b, _c, _d, _e, _f;
|
|
468
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
469
|
+
const warnings = [];
|
|
470
|
+
const vertexOptions = await parseProviderOptions3({
|
|
471
|
+
provider: "vertex",
|
|
472
|
+
providerOptions: options.providerOptions,
|
|
473
|
+
schema: vertexVideoProviderOptionsSchema
|
|
474
|
+
});
|
|
475
|
+
const instances = [{}];
|
|
476
|
+
const instance = instances[0];
|
|
477
|
+
if (options.prompt != null) {
|
|
478
|
+
instance.prompt = options.prompt;
|
|
479
|
+
}
|
|
480
|
+
if (options.image != null) {
|
|
481
|
+
if (options.image.type === "url") {
|
|
482
|
+
warnings.push({
|
|
483
|
+
type: "unsupported",
|
|
484
|
+
feature: "URL-based image input",
|
|
485
|
+
details: "Vertex AI video models require base64-encoded images or GCS URIs. URL will be ignored."
|
|
486
|
+
});
|
|
487
|
+
} else {
|
|
488
|
+
const base64Data = typeof options.image.data === "string" ? options.image.data : convertUint8ArrayToBase642(options.image.data);
|
|
489
|
+
instance.image = {
|
|
490
|
+
bytesBase64Encoded: base64Data
|
|
491
|
+
};
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
if ((vertexOptions == null ? void 0 : vertexOptions.referenceImages) != null) {
|
|
495
|
+
instance.referenceImages = vertexOptions.referenceImages;
|
|
496
|
+
}
|
|
497
|
+
const parameters = {
|
|
498
|
+
sampleCount: options.n
|
|
499
|
+
};
|
|
500
|
+
if (options.aspectRatio) {
|
|
501
|
+
parameters.aspectRatio = options.aspectRatio;
|
|
502
|
+
}
|
|
503
|
+
if (options.resolution) {
|
|
504
|
+
const resolutionMap = {
|
|
505
|
+
"1280x720": "720p",
|
|
506
|
+
"1920x1080": "1080p",
|
|
507
|
+
"3840x2160": "4k"
|
|
508
|
+
};
|
|
509
|
+
parameters.resolution = resolutionMap[options.resolution] || options.resolution;
|
|
510
|
+
}
|
|
511
|
+
if (options.duration) {
|
|
512
|
+
parameters.durationSeconds = options.duration;
|
|
513
|
+
}
|
|
514
|
+
if (options.seed) {
|
|
515
|
+
parameters.seed = options.seed;
|
|
516
|
+
}
|
|
517
|
+
if (vertexOptions != null) {
|
|
518
|
+
const opts = vertexOptions;
|
|
519
|
+
if (opts.personGeneration !== void 0 && opts.personGeneration !== null) {
|
|
520
|
+
parameters.personGeneration = opts.personGeneration;
|
|
521
|
+
}
|
|
522
|
+
if (opts.negativePrompt !== void 0 && opts.negativePrompt !== null) {
|
|
523
|
+
parameters.negativePrompt = opts.negativePrompt;
|
|
524
|
+
}
|
|
525
|
+
if (opts.generateAudio !== void 0 && opts.generateAudio !== null) {
|
|
526
|
+
parameters.generateAudio = opts.generateAudio;
|
|
527
|
+
}
|
|
528
|
+
if (opts.gcsOutputDirectory !== void 0 && opts.gcsOutputDirectory !== null) {
|
|
529
|
+
parameters.gcsOutputDirectory = opts.gcsOutputDirectory;
|
|
530
|
+
}
|
|
531
|
+
for (const [key, value] of Object.entries(opts)) {
|
|
532
|
+
if (![
|
|
533
|
+
"pollIntervalMs",
|
|
534
|
+
"pollTimeoutMs",
|
|
535
|
+
"personGeneration",
|
|
536
|
+
"negativePrompt",
|
|
537
|
+
"generateAudio",
|
|
538
|
+
"gcsOutputDirectory",
|
|
539
|
+
"referenceImages"
|
|
540
|
+
].includes(key)) {
|
|
541
|
+
parameters[key] = value;
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
const { value: operation } = await postJsonToApi3({
|
|
546
|
+
url: `${this.config.baseURL}/models/${this.modelId}:predictLongRunning`,
|
|
547
|
+
headers: combineHeaders3(
|
|
548
|
+
await resolve3(this.config.headers),
|
|
549
|
+
options.headers
|
|
550
|
+
),
|
|
551
|
+
body: {
|
|
552
|
+
instances,
|
|
553
|
+
parameters
|
|
554
|
+
},
|
|
555
|
+
successfulResponseHandler: createJsonResponseHandler3(
|
|
556
|
+
vertexOperationSchema
|
|
557
|
+
),
|
|
558
|
+
failedResponseHandler: googleVertexFailedResponseHandler,
|
|
559
|
+
abortSignal: options.abortSignal,
|
|
560
|
+
fetch: this.config.fetch
|
|
561
|
+
});
|
|
562
|
+
const operationName = operation.name;
|
|
563
|
+
if (!operationName) {
|
|
564
|
+
throw new AISDKError({
|
|
565
|
+
name: "VERTEX_VIDEO_GENERATION_ERROR",
|
|
566
|
+
message: "No operation name returned from API"
|
|
567
|
+
});
|
|
568
|
+
}
|
|
569
|
+
const pollIntervalMs = (_d = vertexOptions == null ? void 0 : vertexOptions.pollIntervalMs) != null ? _d : 1e4;
|
|
570
|
+
const pollTimeoutMs = (_e = vertexOptions == null ? void 0 : vertexOptions.pollTimeoutMs) != null ? _e : 6e5;
|
|
571
|
+
const startTime = Date.now();
|
|
572
|
+
let finalOperation = operation;
|
|
573
|
+
let responseHeaders;
|
|
574
|
+
while (!finalOperation.done) {
|
|
575
|
+
if (Date.now() - startTime > pollTimeoutMs) {
|
|
576
|
+
throw new AISDKError({
|
|
577
|
+
name: "VERTEX_VIDEO_GENERATION_TIMEOUT",
|
|
578
|
+
message: `Video generation timed out after ${pollTimeoutMs}ms`
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
await delay(pollIntervalMs);
|
|
582
|
+
if ((_f = options.abortSignal) == null ? void 0 : _f.aborted) {
|
|
583
|
+
throw new AISDKError({
|
|
584
|
+
name: "VERTEX_VIDEO_GENERATION_ABORTED",
|
|
585
|
+
message: "Video generation request was aborted"
|
|
586
|
+
});
|
|
587
|
+
}
|
|
588
|
+
const { value: statusOperation, responseHeaders: pollHeaders } = await postJsonToApi3({
|
|
589
|
+
url: `${this.config.baseURL}/models/${this.modelId}:fetchPredictOperation`,
|
|
590
|
+
headers: combineHeaders3(
|
|
591
|
+
await resolve3(this.config.headers),
|
|
592
|
+
options.headers
|
|
593
|
+
),
|
|
594
|
+
body: {
|
|
595
|
+
operationName
|
|
596
|
+
},
|
|
597
|
+
successfulResponseHandler: createJsonResponseHandler3(
|
|
598
|
+
vertexOperationSchema
|
|
599
|
+
),
|
|
600
|
+
failedResponseHandler: googleVertexFailedResponseHandler,
|
|
601
|
+
abortSignal: options.abortSignal,
|
|
602
|
+
fetch: this.config.fetch
|
|
603
|
+
});
|
|
604
|
+
finalOperation = statusOperation;
|
|
605
|
+
responseHeaders = pollHeaders;
|
|
606
|
+
}
|
|
607
|
+
if (finalOperation.error) {
|
|
608
|
+
throw new AISDKError({
|
|
609
|
+
name: "VERTEX_VIDEO_GENERATION_FAILED",
|
|
610
|
+
message: `Video generation failed: ${finalOperation.error.message}`
|
|
611
|
+
});
|
|
612
|
+
}
|
|
613
|
+
const response = finalOperation.response;
|
|
614
|
+
if (!(response == null ? void 0 : response.videos) || response.videos.length === 0) {
|
|
615
|
+
throw new AISDKError({
|
|
616
|
+
name: "VERTEX_VIDEO_GENERATION_ERROR",
|
|
617
|
+
message: `No videos in response. Response: ${JSON.stringify(finalOperation)}`
|
|
618
|
+
});
|
|
619
|
+
}
|
|
620
|
+
const videos = [];
|
|
621
|
+
const videoMetadata = [];
|
|
622
|
+
for (const video of response.videos) {
|
|
623
|
+
if (video.bytesBase64Encoded) {
|
|
624
|
+
videos.push({
|
|
625
|
+
type: "base64",
|
|
626
|
+
data: video.bytesBase64Encoded,
|
|
627
|
+
mediaType: video.mimeType || "video/mp4"
|
|
628
|
+
});
|
|
629
|
+
videoMetadata.push({
|
|
630
|
+
mimeType: video.mimeType
|
|
631
|
+
});
|
|
632
|
+
} else if (video.gcsUri) {
|
|
633
|
+
videos.push({
|
|
634
|
+
type: "url",
|
|
635
|
+
url: video.gcsUri,
|
|
636
|
+
mediaType: video.mimeType || "video/mp4"
|
|
637
|
+
});
|
|
638
|
+
videoMetadata.push({
|
|
639
|
+
gcsUri: video.gcsUri,
|
|
640
|
+
mimeType: video.mimeType
|
|
641
|
+
});
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
if (videos.length === 0) {
|
|
645
|
+
throw new AISDKError({
|
|
646
|
+
name: "VERTEX_VIDEO_GENERATION_ERROR",
|
|
647
|
+
message: "No valid videos in response"
|
|
648
|
+
});
|
|
649
|
+
}
|
|
650
|
+
return {
|
|
651
|
+
videos,
|
|
652
|
+
warnings,
|
|
653
|
+
response: {
|
|
654
|
+
timestamp: currentDate,
|
|
655
|
+
modelId: this.modelId,
|
|
656
|
+
headers: responseHeaders
|
|
657
|
+
},
|
|
658
|
+
providerMetadata: {
|
|
659
|
+
"google-vertex": {
|
|
660
|
+
videos: videoMetadata
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
};
|
|
664
|
+
}
|
|
665
|
+
};
|
|
666
|
+
var vertexOperationSchema = z5.object({
|
|
667
|
+
name: z5.string().nullish(),
|
|
668
|
+
done: z5.boolean().nullish(),
|
|
669
|
+
error: z5.object({
|
|
670
|
+
code: z5.number().nullish(),
|
|
671
|
+
message: z5.string(),
|
|
672
|
+
status: z5.string().nullish()
|
|
673
|
+
}).nullish(),
|
|
674
|
+
response: z5.object({
|
|
675
|
+
videos: z5.array(
|
|
676
|
+
z5.object({
|
|
677
|
+
bytesBase64Encoded: z5.string().nullish(),
|
|
678
|
+
gcsUri: z5.string().nullish(),
|
|
679
|
+
mimeType: z5.string().nullish()
|
|
680
|
+
})
|
|
681
|
+
).nullish(),
|
|
682
|
+
raiMediaFilteredCount: z5.number().nullish()
|
|
683
|
+
}).nullish()
|
|
684
|
+
});
|
|
685
|
+
var vertexVideoProviderOptionsSchema = lazySchema(
|
|
686
|
+
() => zodSchema(
|
|
687
|
+
z5.object({
|
|
688
|
+
pollIntervalMs: z5.number().positive().nullish(),
|
|
689
|
+
pollTimeoutMs: z5.number().positive().nullish(),
|
|
690
|
+
personGeneration: z5.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
|
|
691
|
+
negativePrompt: z5.string().nullish(),
|
|
692
|
+
generateAudio: z5.boolean().nullish(),
|
|
693
|
+
gcsOutputDirectory: z5.string().nullish(),
|
|
694
|
+
referenceImages: z5.array(
|
|
695
|
+
z5.object({
|
|
696
|
+
bytesBase64Encoded: z5.string().nullish(),
|
|
697
|
+
gcsUri: z5.string().nullish()
|
|
698
|
+
})
|
|
699
|
+
).nullish()
|
|
700
|
+
}).passthrough()
|
|
701
|
+
)
|
|
702
|
+
);
|
|
703
|
+
|
|
438
704
|
// src/google-vertex-provider.ts
|
|
439
705
|
var EXPRESS_MODE_BASE_URL = "https://aiplatform.googleapis.com/v1/publishers/google";
|
|
440
706
|
function createExpressModeFetch(apiKey, customFetch) {
|
|
@@ -479,7 +745,7 @@ function createVertex(options = {}) {
|
|
|
479
745
|
const createConfig = (name) => {
|
|
480
746
|
const getHeaders = async () => {
|
|
481
747
|
var _a;
|
|
482
|
-
const originalHeaders = await
|
|
748
|
+
const originalHeaders = await resolve4((_a = options.headers) != null ? _a : {});
|
|
483
749
|
return withUserAgentSuffix(
|
|
484
750
|
originalHeaders,
|
|
485
751
|
`ai-sdk/google-vertex/${VERSION}`
|
|
@@ -509,6 +775,13 @@ function createVertex(options = {}) {
|
|
|
509
775
|
};
|
|
510
776
|
const createEmbeddingModel = (modelId) => new GoogleVertexEmbeddingModel(modelId, createConfig("embedding"));
|
|
511
777
|
const createImageModel = (modelId) => new GoogleVertexImageModel(modelId, createConfig("image"));
|
|
778
|
+
const createVideoModel = (modelId) => {
|
|
779
|
+
var _a;
|
|
780
|
+
return new GoogleVertexVideoModel(modelId, {
|
|
781
|
+
...createConfig("video"),
|
|
782
|
+
generateId: (_a = options.generateId) != null ? _a : generateId
|
|
783
|
+
});
|
|
784
|
+
};
|
|
512
785
|
const provider = function(modelId) {
|
|
513
786
|
if (new.target) {
|
|
514
787
|
throw new Error(
|
|
@@ -523,6 +796,7 @@ function createVertex(options = {}) {
|
|
|
523
796
|
provider.textEmbeddingModel = createEmbeddingModel;
|
|
524
797
|
provider.image = createImageModel;
|
|
525
798
|
provider.imageModel = createImageModel;
|
|
799
|
+
provider.video = createVideoModel;
|
|
526
800
|
provider.tools = googleVertexTools;
|
|
527
801
|
return provider;
|
|
528
802
|
}
|
|
@@ -542,7 +816,7 @@ function createVertex2(options = {}) {
|
|
|
542
816
|
Authorization: `Bearer ${await generateAuthToken(
|
|
543
817
|
options.googleAuthOptions
|
|
544
818
|
)}`,
|
|
545
|
-
...await
|
|
819
|
+
...await resolve5(options.headers)
|
|
546
820
|
})
|
|
547
821
|
});
|
|
548
822
|
}
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/google-vertex-provider-node.ts","../src/google-vertex-auth-google-auth-library.ts","../src/google-vertex-provider.ts","../src/version.ts","../src/google-vertex-embedding-model.ts","../src/google-vertex-error.ts","../src/google-vertex-embedding-options.ts","../src/google-vertex-image-model.ts","../src/google-vertex-tools.ts"],"sourcesContent":["import { loadOptionalSetting, resolve } from '@ai-sdk/provider-utils';\nimport { GoogleAuthOptions } from 'google-auth-library';\nimport { generateAuthToken } from './google-vertex-auth-google-auth-library';\nimport {\n createVertex as createVertexOriginal,\n GoogleVertexProvider,\n GoogleVertexProviderSettings as GoogleVertexProviderSettingsOriginal,\n} from './google-vertex-provider';\n\nexport interface GoogleVertexProviderSettings\n extends GoogleVertexProviderSettingsOriginal {\n /**\n * Optional. The Authentication options provided by google-auth-library.\n * Complete list of authentication options is documented in the\n * GoogleAuthOptions interface:\n * https://github.com/googleapis/google-auth-library-nodejs/blob/main/src/auth/googleauth.ts.\n */\n googleAuthOptions?: GoogleAuthOptions;\n}\n\nexport type { GoogleVertexProvider };\n\nexport function createVertex(\n options: GoogleVertexProviderSettings = {},\n): GoogleVertexProvider {\n const apiKey = loadOptionalSetting({\n settingValue: options.apiKey,\n environmentVariableName: 'GOOGLE_VERTEX_API_KEY',\n });\n\n if (apiKey) {\n return createVertexOriginal(options);\n }\n\n return createVertexOriginal({\n ...options,\n headers: async () => ({\n Authorization: `Bearer ${await generateAuthToken(\n options.googleAuthOptions,\n )}`,\n ...(await resolve(options.headers)),\n }),\n });\n}\n\n/**\n * Default Google Vertex AI provider instance.\n */\nexport const vertex = createVertex();\n","import { GoogleAuth, GoogleAuthOptions } from 'google-auth-library';\n\nlet authInstance: GoogleAuth | null = null;\nlet authOptions: GoogleAuthOptions | null = null;\n\nfunction getAuth(options: GoogleAuthOptions) {\n if (!authInstance || options !== authOptions) {\n authInstance = new GoogleAuth({\n scopes: ['https://www.googleapis.com/auth/cloud-platform'],\n ...options,\n });\n authOptions = options;\n }\n return authInstance;\n}\n\nexport async function generateAuthToken(options?: GoogleAuthOptions) {\n const auth = getAuth(options || {});\n const client = await auth.getClient();\n const token = await client.getAccessToken();\n return token?.token || null;\n}\n\n// For testing purposes only\nexport function _resetAuthInstance() {\n authInstance = null;\n}\n","import { GoogleGenerativeAILanguageModel } from '@ai-sdk/google/internal';\nimport { ImageModelV3, LanguageModelV3, ProviderV3 } from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadOptionalSetting,\n loadSetting,\n normalizeHeaders,\n resolve,\n Resolvable,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { VERSION } from './version';\nimport { GoogleVertexConfig } from './google-vertex-config';\nimport { GoogleVertexEmbeddingModel } from './google-vertex-embedding-model';\nimport { GoogleVertexEmbeddingModelId } from './google-vertex-embedding-options';\nimport { GoogleVertexImageModel } from './google-vertex-image-model';\nimport { GoogleVertexImageModelId } from './google-vertex-image-settings';\nimport { GoogleVertexModelId } from './google-vertex-options';\nimport { googleVertexTools } from './google-vertex-tools';\n\nconst EXPRESS_MODE_BASE_URL =\n 'https://aiplatform.googleapis.com/v1/publishers/google';\n\n// set `x-goog-api-key` header to API key for express mode\nfunction createExpressModeFetch(\n apiKey: string,\n customFetch?: FetchFunction,\n): FetchFunction {\n return async (url, init) => {\n const modifiedInit: RequestInit = {\n ...init,\n headers: {\n ...(init?.headers ? normalizeHeaders(init.headers) : {}),\n 'x-goog-api-key': apiKey,\n },\n };\n return (customFetch ?? fetch)(url.toString(), modifiedInit);\n };\n}\n\nexport interface GoogleVertexProvider extends ProviderV3 {\n /**\n * Creates a model for text generation.\n */\n (modelId: GoogleVertexModelId): LanguageModelV3;\n\n languageModel: (modelId: GoogleVertexModelId) => LanguageModelV3;\n\n /**\n * Creates a model for image generation.\n */\n image(modelId: GoogleVertexImageModelId): ImageModelV3;\n\n /**\n * Creates a model for image generation.\n */\n imageModel(modelId: GoogleVertexImageModelId): ImageModelV3;\n\n tools: typeof googleVertexTools;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(\n modelId: GoogleVertexEmbeddingModelId,\n ): GoogleVertexEmbeddingModel;\n}\n\nexport interface GoogleVertexProviderSettings {\n /**\n * Optional. The API key for the Google Cloud project. If provided, the\n * provider will use express mode with API key authentication. Defaults to\n * the value of the `GOOGLE_VERTEX_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\n * Your Google Vertex location. Defaults to the environment variable `GOOGLE_VERTEX_LOCATION`.\n */\n location?: string;\n\n /**\n * Your Google Vertex project. Defaults to the environment variable `GOOGLE_VERTEX_PROJECT`.\n */\n project?: string;\n\n /**\n * Headers to use for requests. Can be:\n * - A headers object\n * - A Promise that resolves to a headers object\n * - A function that returns a headers object\n * - A function that returns a Promise of a headers object\n */\n headers?: Resolvable<Record<string, string | undefined>>;\n\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n // for testing\n generateId?: () => string;\n\n /**\n * Base URL for the Google Vertex API calls.\n */\n baseURL?: string;\n}\n\n/**\n * Create a Google Vertex AI provider instance.\n */\nexport function createVertex(\n options: GoogleVertexProviderSettings = {},\n): GoogleVertexProvider {\n const apiKey = loadOptionalSetting({\n settingValue: options.apiKey,\n environmentVariableName: 'GOOGLE_VERTEX_API_KEY',\n });\n\n const loadVertexProject = () =>\n loadSetting({\n settingValue: options.project,\n settingName: 'project',\n environmentVariableName: 'GOOGLE_VERTEX_PROJECT',\n description: 'Google Vertex project',\n });\n\n const loadVertexLocation = () =>\n loadSetting({\n settingValue: options.location,\n settingName: 'location',\n environmentVariableName: 'GOOGLE_VERTEX_LOCATION',\n description: 'Google Vertex location',\n });\n\n const loadBaseURL = () => {\n if (apiKey) {\n return withoutTrailingSlash(options.baseURL) ?? EXPRESS_MODE_BASE_URL;\n }\n\n const region = loadVertexLocation();\n const project = loadVertexProject();\n\n // For global region, use aiplatform.googleapis.com directly\n // For other regions, use region-aiplatform.googleapis.com\n const baseHost = `${region === 'global' ? '' : region + '-'}aiplatform.googleapis.com`;\n\n return (\n withoutTrailingSlash(options.baseURL) ??\n `https://${baseHost}/v1beta1/projects/${project}/locations/${region}/publishers/google`\n );\n };\n\n const createConfig = (name: string): GoogleVertexConfig => {\n const getHeaders = async () => {\n const originalHeaders = await resolve(options.headers ?? {});\n return withUserAgentSuffix(\n originalHeaders,\n `ai-sdk/google-vertex/${VERSION}`,\n );\n };\n\n return {\n provider: `google.vertex.${name}`,\n headers: getHeaders,\n fetch: apiKey\n ? createExpressModeFetch(apiKey, options.fetch)\n : options.fetch,\n baseURL: loadBaseURL(),\n };\n };\n\n const createChatModel = (modelId: GoogleVertexModelId) => {\n return new GoogleGenerativeAILanguageModel(modelId, {\n ...createConfig('chat'),\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // HTTP URLs:\n /^https?:\\/\\/.*$/,\n // Google Cloud Storage URLs:\n /^gs:\\/\\/.*$/,\n ],\n }),\n });\n };\n\n const createEmbeddingModel = (modelId: GoogleVertexEmbeddingModelId) =>\n new GoogleVertexEmbeddingModel(modelId, createConfig('embedding'));\n\n const createImageModel = (modelId: GoogleVertexImageModelId) =>\n new GoogleVertexImageModel(modelId, createConfig('image'));\n\n const provider = function (modelId: GoogleVertexModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Vertex AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createChatModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.tools = googleVertexTools;\n\n return provider;\n}\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n postJsonToApi,\n resolve,\n parseProviderOptions,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleVertexFailedResponseHandler } from './google-vertex-error';\nimport {\n GoogleVertexEmbeddingModelId,\n googleVertexEmbeddingProviderOptions,\n} from './google-vertex-embedding-options';\nimport { GoogleVertexConfig } from './google-vertex-config';\n\nexport class GoogleVertexEmbeddingModel implements EmbeddingModelV3 {\n readonly specificationVersion = 'v3';\n readonly modelId: GoogleVertexEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleVertexConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: GoogleVertexEmbeddingModelId,\n config: GoogleVertexConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>\n > {\n let googleOptions = await parseProviderOptions({\n provider: 'vertex',\n providerOptions,\n schema: googleVertexEmbeddingProviderOptions,\n });\n\n if (googleOptions == null) {\n googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleVertexEmbeddingProviderOptions,\n });\n }\n\n googleOptions = googleOptions ?? {};\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const url = `${this.config.baseURL}/models/${this.modelId}:predict`;\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url,\n headers: mergedHeaders,\n body: {\n instances: values.map(value => ({\n content: value,\n task_type: googleOptions.taskType,\n title: googleOptions.title,\n })),\n parameters: {\n outputDimensionality: googleOptions.outputDimensionality,\n autoTruncate: googleOptions.autoTruncate,\n },\n },\n failedResponseHandler: googleVertexFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleVertexTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n warnings: [],\n embeddings: response.predictions.map(\n prediction => prediction.embeddings.values,\n ),\n usage: {\n tokens: response.predictions.reduce(\n (tokenCount, prediction) =>\n tokenCount + prediction.embeddings.statistics.token_count,\n 0,\n ),\n },\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleVertexTextEmbeddingResponseSchema = z.object({\n predictions: z.array(\n z.object({\n embeddings: z.object({\n values: z.array(z.number()),\n statistics: z.object({\n token_count: z.number(),\n }),\n }),\n }),\n ),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst googleVertexErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleVertexErrorData = z.infer<typeof googleVertexErrorDataSchema>;\n\nexport const googleVertexFailedResponseHandler = createJsonErrorResponseHandler(\n {\n errorSchema: googleVertexErrorDataSchema,\n errorToMessage: data => data.error.message,\n },\n);\n","import { z } from 'zod/v4';\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api\nexport type GoogleVertexEmbeddingModelId =\n | 'textembedding-gecko'\n | 'textembedding-gecko@001'\n | 'textembedding-gecko@003'\n | 'textembedding-gecko-multilingual'\n | 'textembedding-gecko-multilingual@001'\n | 'text-multilingual-embedding-002'\n | 'text-embedding-004'\n | 'text-embedding-005'\n | (string & {});\n\nexport const googleVertexEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n\n /**\n * Optional. The title of the document being embedded.\n * Only valid when task_type is set to 'RETRIEVAL_DOCUMENT'.\n * Helps the model produce better embeddings by providing additional context.\n */\n title: z.string().optional(),\n\n /**\n * Optional. When set to true, input text will be truncated. When set to false,\n * an error is returned if the input text is longer than the maximum length supported by the model. Defaults to true.\n */\n autoTruncate: z.boolean().optional(),\n});\n\nexport type GoogleVertexEmbeddingProviderOptions = z.infer<\n typeof googleVertexEmbeddingProviderOptions\n>;\n","import {\n ImageModelV3,\n ImageModelV3File,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n Resolvable,\n combineHeaders,\n convertUint8ArrayToBase64,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleVertexFailedResponseHandler } from './google-vertex-error';\nimport { GoogleVertexImageModelId } from './google-vertex-image-settings';\n\ninterface GoogleVertexImageModelConfig {\n provider: string;\n baseURL: string;\n headers?: Resolvable<Record<string, string | undefined>>;\n fetch?: typeof fetch;\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/image/generate-images\nexport class GoogleVertexImageModel implements ImageModelV3 {\n readonly specificationVersion = 'v3';\n // https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/imagen-api#parameter_list\n readonly maxImagesPerCall = 4;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: GoogleVertexImageModelId,\n private config: GoogleVertexImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n files,\n mask,\n }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV3['doGenerate']>>\n > {\n const warnings: Array<SharedV3Warning> = [];\n\n if (size != null) {\n warnings.push({\n type: 'unsupported',\n feature: 'size',\n details:\n 'This model does not support the `size` option. Use `aspectRatio` instead.',\n });\n }\n\n const vertexImageOptions = await parseProviderOptions({\n provider: 'vertex',\n providerOptions,\n schema: vertexImageProviderOptionsSchema,\n });\n\n // Extract edit-specific options from provider options\n const { edit, ...otherOptions } = vertexImageOptions ?? {};\n const { mode: editMode, baseSteps, maskMode, maskDilation } = edit ?? {};\n\n // Build the request body based on whether we're editing or generating\n const isEditMode = files != null && files.length > 0;\n\n let body: Record<string, unknown>;\n\n if (isEditMode) {\n // Build reference images for editing\n const referenceImages: Array<Record<string, unknown>> = [];\n\n // Add the source image(s)\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n referenceImages.push({\n referenceType: 'REFERENCE_TYPE_RAW',\n referenceId: i + 1,\n referenceImage: {\n bytesBase64Encoded: getBase64Data(file),\n },\n });\n }\n\n // Add mask if provided\n if (mask != null) {\n referenceImages.push({\n referenceType: 'REFERENCE_TYPE_MASK',\n referenceId: files.length + 1,\n referenceImage: {\n bytesBase64Encoded: getBase64Data(mask),\n },\n maskImageConfig: {\n maskMode: maskMode ?? 'MASK_MODE_USER_PROVIDED',\n ...(maskDilation != null ? { dilation: maskDilation } : {}),\n },\n });\n }\n\n body = {\n instances: [\n {\n prompt,\n referenceImages,\n },\n ],\n parameters: {\n sampleCount: n,\n ...(aspectRatio != null ? { aspectRatio } : {}),\n ...(seed != null ? { seed } : {}),\n editMode: editMode ?? 'EDIT_MODE_INPAINT_INSERTION',\n ...(baseSteps != null ? { editConfig: { baseSteps } } : {}),\n ...otherOptions,\n },\n };\n } else {\n // Standard image generation\n body = {\n instances: [{ prompt }],\n parameters: {\n sampleCount: n,\n ...(aspectRatio != null ? { aspectRatio } : {}),\n ...(seed != null ? { seed } : {}),\n ...otherOptions,\n },\n };\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:predict`,\n headers: combineHeaders(await resolve(this.config.headers), headers),\n body,\n failedResponseHandler: googleVertexFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n vertexImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images:\n response.predictions?.map(\n ({ bytesBase64Encoded }) => bytesBase64Encoded,\n ) ?? [],\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n providerMetadata: {\n vertex: {\n images:\n response.predictions?.map(prediction => {\n const {\n // normalize revised prompt property\n prompt: revisedPrompt,\n } = prediction;\n\n return { ...(revisedPrompt != null && { revisedPrompt }) };\n }) ?? [],\n },\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst vertexImageResponseSchema = z.object({\n predictions: z\n .array(\n z.object({\n bytesBase64Encoded: z.string(),\n mimeType: z.string(),\n prompt: z.string().nullish(),\n }),\n )\n .nullish(),\n});\n\nconst vertexImageProviderOptionsSchema = z.object({\n negativePrompt: z.string().nullish(),\n personGeneration: z\n .enum(['dont_allow', 'allow_adult', 'allow_all'])\n .nullish(),\n safetySetting: z\n .enum([\n 'block_low_and_above',\n 'block_medium_and_above',\n 'block_only_high',\n 'block_none',\n ])\n .nullish(),\n addWatermark: z.boolean().nullish(),\n storageUri: z.string().nullish(),\n sampleImageSize: z.enum(['1K', '2K']).nullish(),\n /**\n * Configuration for image editing operations\n */\n edit: z\n .object({\n /**\n * An integer that represents the number of sampling steps.\n * A higher value offers better image quality, a lower value offers better latency.\n * Try 35 steps to start. If the quality doesn't meet your requirements,\n * increase the value towards an upper limit of 75.\n */\n baseSteps: z.number().nullish(),\n\n // Edit mode options\n // https://cloud.google.com/vertex-ai/generative-ai/docs/image/edit-insert-objects\n mode: z\n .enum([\n 'EDIT_MODE_INPAINT_INSERTION',\n 'EDIT_MODE_INPAINT_REMOVAL',\n 'EDIT_MODE_OUTPAINT',\n 'EDIT_MODE_CONTROLLED_EDITING',\n 'EDIT_MODE_PRODUCT_IMAGE',\n 'EDIT_MODE_BGSWAP',\n ])\n .nullish(),\n\n /**\n * The mask mode to use.\n * - `MASK_MODE_DEFAULT` - Default value for mask mode.\n * - `MASK_MODE_USER_PROVIDED` - User provided mask. No segmentation needed.\n * - `MASK_MODE_DETECTION_BOX` - Mask from detected bounding boxes.\n * - `MASK_MODE_CLOTHING_AREA` - Masks from segmenting the clothing area with open-vocab segmentation.\n * - `MASK_MODE_PARSED_PERSON` - Masks from segmenting the person body and clothing using the person-parsing model.\n */\n maskMode: z\n .enum([\n 'MASK_MODE_DEFAULT',\n 'MASK_MODE_USER_PROVIDED',\n 'MASK_MODE_DETECTION_BOX',\n 'MASK_MODE_CLOTHING_AREA',\n 'MASK_MODE_PARSED_PERSON',\n ])\n .nullish(),\n\n /**\n * Optional. A float value between 0 and 1, inclusive, that represents the\n * percentage of the image width to grow the mask by. Using dilation helps\n * compensate for imprecise masks. We recommend a value of 0.01.\n */\n maskDilation: z.number().nullish(),\n })\n .nullish(),\n});\nexport type GoogleVertexImageProviderOptions = z.infer<\n typeof vertexImageProviderOptionsSchema\n>;\n\n/**\n * Helper to convert ImageModelV3File data to base64 string\n */\nfunction getBase64Data(file: ImageModelV3File): string {\n if (file.type === 'url') {\n throw new Error(\n 'URL-based images are not supported for Google Vertex image editing. Please provide the image data directly.',\n );\n }\n\n if (typeof file.data === 'string') {\n return file.data;\n }\n\n // Convert Uint8Array to base64\n return convertUint8ArrayToBase64(file.data);\n}\n","import { googleTools } from '@ai-sdk/google/internal';\n\nexport const googleVertexTools = {\n googleSearch: googleTools.googleSearch,\n enterpriseWebSearch: googleTools.enterpriseWebSearch,\n googleMaps: googleTools.googleMaps,\n urlContext: googleTools.urlContext,\n fileSearch: googleTools.fileSearch,\n codeExecution: googleTools.codeExecution,\n vertexRagStore: googleTools.vertexRagStore,\n};\n"],"mappings":";AAAA,SAAS,uBAAAA,sBAAqB,WAAAC,gBAAe;;;ACA7C,SAAS,kBAAqC;AAE9C,IAAI,eAAkC;AACtC,IAAI,cAAwC;AAE5C,SAAS,QAAQ,SAA4B;AAC3C,MAAI,CAAC,gBAAgB,YAAY,aAAa;AAC5C,mBAAe,IAAI,WAAW;AAAA,MAC5B,QAAQ,CAAC,gDAAgD;AAAA,MACzD,GAAG;AAAA,IACL,CAAC;AACD,kBAAc;AAAA,EAChB;AACA,SAAO;AACT;AAEA,eAAsB,kBAAkB,SAA6B;AACnE,QAAM,OAAO,QAAQ,WAAW,CAAC,CAAC;AAClC,QAAM,SAAS,MAAM,KAAK,UAAU;AACpC,QAAM,QAAQ,MAAM,OAAO,eAAe;AAC1C,UAAO,+BAAO,UAAS;AACzB;;;ACrBA,SAAS,uCAAuC;AAEhD;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAAC;AAAA,EAEA;AAAA,EACA;AAAA,OACK;;;ACVA,IAAM,UACX,OACI,WACA;;;ACLN;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACXlB,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,8BAA8B,EAAE,OAAO;AAAA,EAC3C,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,EAAE,OAAO;AAAA,IAClB,QAAQ,EAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,oCAAoC;AAAA,EAC/C;AAAA,IACE,aAAa;AAAA,IACb,gBAAgB,UAAQ,KAAK,MAAM;AAAA,EACrC;AACF;;;AClBA,SAAS,KAAAC,UAAS;AAcX,IAAM,uCAAuCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3D,sBAAsBA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAUA,GACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM3B,cAAcA,GAAE,QAAQ,EAAE,SAAS;AACrC,CAAC;;;AFvCM,IAAM,6BAAN,MAA6D;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,gBAAgB,MAAM,qBAAqB;AAAA,MAC7C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,iBAAiB,MAAM;AACzB,sBAAgB,MAAM,qBAAqB;AAAA,QACzC,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,oBAAgB,wCAAiB,CAAC;AAElC,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,gBAAgB;AAAA,MACpB,MAAM,QAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM,MAAM,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AACzD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAM,cAAc;AAAA,MACtB;AAAA,MACA,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,WAAW,OAAO,IAAI,YAAU;AAAA,UAC9B,SAAS;AAAA,UACT,WAAW,cAAc;AAAA,UACzB,OAAO,cAAc;AAAA,QACvB,EAAE;AAAA,QACF,YAAY;AAAA,UACV,sBAAsB,cAAc;AAAA,UACpC,cAAc,cAAc;AAAA,QAC9B;AAAA,MACF;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,UAAU,CAAC;AAAA,MACX,YAAY,SAAS,YAAY;AAAA,QAC/B,gBAAc,WAAW,WAAW;AAAA,MACtC;AAAA,MACA,OAAO;AAAA,QACL,QAAQ,SAAS,YAAY;AAAA,UAC3B,CAAC,YAAY,eACX,aAAa,WAAW,WAAW,WAAW;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,MACA,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,0CAA0CC,GAAE,OAAO;AAAA,EACvD,aAAaA,GAAE;AAAA,IACbA,GAAE,OAAO;AAAA,MACP,YAAYA,GAAE,OAAO;AAAA,QACnB,QAAQA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,QAC1B,YAAYA,GAAE,OAAO;AAAA,UACnB,aAAaA,GAAE,OAAO;AAAA,QACxB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;;;AGjID;AAAA,EAEE,kBAAAC;AAAA,EACA;AAAA,EACA,6BAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,EACA,WAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAeX,IAAM,yBAAN,MAAqD;AAAA,EAS1D,YACW,SACD,QACR;AAFS;AACD;AAVV,SAAS,uBAAuB;AAEhC;AAAA,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAxDJ;AAyDI,UAAM,WAAmC,CAAC;AAE1C,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,qBAAqB,MAAMC,sBAAqB;AAAA,MACpD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,EAAE,MAAM,GAAG,aAAa,IAAI,kDAAsB,CAAC;AACzD,UAAM,EAAE,MAAM,UAAU,WAAW,UAAU,aAAa,IAAI,sBAAQ,CAAC;AAGvE,UAAM,aAAa,SAAS,QAAQ,MAAM,SAAS;AAEnD,QAAI;AAEJ,QAAI,YAAY;AAEd,YAAM,kBAAkD,CAAC;AAGzD,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAM,OAAO,MAAM,CAAC;AACpB,wBAAgB,KAAK;AAAA,UACnB,eAAe;AAAA,UACf,aAAa,IAAI;AAAA,UACjB,gBAAgB;AAAA,YACd,oBAAoB,cAAc,IAAI;AAAA,UACxC;AAAA,QACF,CAAC;AAAA,MACH;AAGA,UAAI,QAAQ,MAAM;AAChB,wBAAgB,KAAK;AAAA,UACnB,eAAe;AAAA,UACf,aAAa,MAAM,SAAS;AAAA,UAC5B,gBAAgB;AAAA,YACd,oBAAoB,cAAc,IAAI;AAAA,UACxC;AAAA,UACA,iBAAiB;AAAA,YACf,UAAU,8BAAY;AAAA,YACtB,GAAI,gBAAgB,OAAO,EAAE,UAAU,aAAa,IAAI,CAAC;AAAA,UAC3D;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL,WAAW;AAAA,UACT;AAAA,YACE;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,QACA,YAAY;AAAA,UACV,aAAa;AAAA,UACb,GAAI,eAAe,OAAO,EAAE,YAAY,IAAI,CAAC;AAAA,UAC7C,GAAI,QAAQ,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,UAC/B,UAAU,8BAAY;AAAA,UACtB,GAAI,aAAa,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,IAAI,CAAC;AAAA,UACzD,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF,OAAO;AAEL,aAAO;AAAA,QACL,WAAW,CAAC,EAAE,OAAO,CAAC;AAAA,QACtB,YAAY;AAAA,UACV,aAAa;AAAA,UACb,GAAI,eAAe,OAAO,EAAE,YAAY,IAAI,CAAC;AAAA,UAC7C,GAAI,QAAQ,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,UAC/B,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAASC,gBAAe,MAAMC,SAAQ,KAAK,OAAO,OAAO,GAAG,OAAO;AAAA,MACnE;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,SACE,oBAAS,gBAAT,mBAAsB;AAAA,QACpB,CAAC,EAAE,mBAAmB,MAAM;AAAA,YAD9B,YAEK,CAAC;AAAA,MACR;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,SACE,oBAAS,gBAAT,mBAAsB,IAAI,gBAAc;AACtC,kBAAM;AAAA;AAAA,cAEJ,QAAQ;AAAA,YACV,IAAI;AAEJ,mBAAO,EAAE,GAAI,iBAAiB,QAAQ,EAAE,cAAc,EAAG;AAAA,UAC3D,OAPA,YAOM,CAAC;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4BC,GAAE,OAAO;AAAA,EACzC,aAAaA,GACV;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,oBAAoBA,GAAE,OAAO;AAAA,MAC7B,UAAUA,GAAE,OAAO;AAAA,MACnB,QAAQA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC7B,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAED,IAAM,mCAAmCA,GAAE,OAAO;AAAA,EAChD,gBAAgBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACnC,kBAAkBA,GACf,KAAK,CAAC,cAAc,eAAe,WAAW,CAAC,EAC/C,QAAQ;AAAA,EACX,eAAeA,GACZ,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,QAAQ;AAAA,EACX,cAAcA,GAAE,QAAQ,EAAE,QAAQ;AAAA,EAClC,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC/B,iBAAiBA,GAAE,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI9C,MAAMA,GACH,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAON,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,IAI9B,MAAMA,GACH,KAAK;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,EACA,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAUX,UAAUA,GACP,KAAK;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,EACA,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOX,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACnC,CAAC,EACA,QAAQ;AACb,CAAC;AAQD,SAAS,cAAc,MAAgC;AACrD,MAAI,KAAK,SAAS,OAAO;AACvB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,KAAK,SAAS,UAAU;AACjC,WAAO,KAAK;AAAA,EACd;AAGA,SAAO,0BAA0B,KAAK,IAAI;AAC5C;;;AC/RA,SAAS,mBAAmB;AAErB,IAAM,oBAAoB;AAAA,EAC/B,cAAc,YAAY;AAAA,EAC1B,qBAAqB,YAAY;AAAA,EACjC,YAAY,YAAY;AAAA,EACxB,YAAY,YAAY;AAAA,EACxB,YAAY,YAAY;AAAA,EACxB,eAAe,YAAY;AAAA,EAC3B,gBAAgB,YAAY;AAC9B;;;ANYA,IAAM,wBACJ;AAGF,SAAS,uBACP,QACA,aACe;AACf,SAAO,OAAO,KAAK,SAAS;AAC1B,UAAM,eAA4B;AAAA,MAChC,GAAG;AAAA,MACH,SAAS;AAAA,QACP,IAAI,6BAAM,WAAU,iBAAiB,KAAK,OAAO,IAAI,CAAC;AAAA,QACtD,kBAAkB;AAAA,MACpB;AAAA,IACF;AACA,YAAQ,oCAAe,OAAO,IAAI,SAAS,GAAG,YAAY;AAAA,EAC5D;AACF;AA2EO,SAAS,aACd,UAAwC,CAAC,GACnB;AACtB,QAAM,SAAS,oBAAoB;AAAA,IACjC,cAAc,QAAQ;AAAA,IACtB,yBAAyB;AAAA,EAC3B,CAAC;AAED,QAAM,oBAAoB,MACxB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,qBAAqB,MACzB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAc,MAAM;AA3I5B;AA4II,QAAI,QAAQ;AACV,cAAO,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAAA,IAClD;AAEA,UAAM,SAAS,mBAAmB;AAClC,UAAM,UAAU,kBAAkB;AAIlC,UAAM,WAAW,GAAG,WAAW,WAAW,KAAK,SAAS,GAAG;AAE3D,YACE,0BAAqB,QAAQ,OAAO,MAApC,YACA,WAAW,QAAQ,qBAAqB,OAAO,cAAc,MAAM;AAAA,EAEvE;AAEA,QAAM,eAAe,CAAC,SAAqC;AACzD,UAAM,aAAa,YAAY;AA9JnC;AA+JM,YAAM,kBAAkB,MAAMC,UAAQ,aAAQ,YAAR,YAAmB,CAAC,CAAC;AAC3D,aAAO;AAAA,QACL;AAAA,QACA,wBAAwB,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,MACL,UAAU,iBAAiB,IAAI;AAAA,MAC/B,SAAS;AAAA,MACT,OAAO,SACH,uBAAuB,QAAQ,QAAQ,KAAK,IAC5C,QAAQ;AAAA,MACZ,SAAS,YAAY;AAAA,IACvB;AAAA,EACF;AAEA,QAAM,kBAAkB,CAAC,YAAiC;AAhL5D;AAiLI,WAAO,IAAI,gCAAgC,SAAS;AAAA,MAClD,GAAG,aAAa,MAAM;AAAA,MACtB,aAAY,aAAQ,eAAR,YAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA,UAEH;AAAA;AAAA,UAEA;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,uBAAuB,CAAC,YAC5B,IAAI,2BAA2B,SAAS,aAAa,WAAW,CAAC;AAEnE,QAAM,mBAAmB,CAAC,YACxB,IAAI,uBAAuB,SAAS,aAAa,OAAO,CAAC;AAE3D,QAAM,WAAW,SAAU,SAA8B;AACvD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,iBAAiB;AAC1B,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,QAAQ;AAEjB,SAAO;AACT;;;AFlMO,SAASC,cACd,UAAwC,CAAC,GACnB;AACtB,QAAM,SAASC,qBAAoB;AAAA,IACjC,cAAc,QAAQ;AAAA,IACtB,yBAAyB;AAAA,EAC3B,CAAC;AAED,MAAI,QAAQ;AACV,WAAO,aAAqB,OAAO;AAAA,EACrC;AAEA,SAAO,aAAqB;AAAA,IAC1B,GAAG;AAAA,IACH,SAAS,aAAa;AAAA,MACpB,eAAe,UAAU,MAAM;AAAA,QAC7B,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,GAAI,MAAMC,SAAQ,QAAQ,OAAO;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAKO,IAAM,SAASF,cAAa;","names":["loadOptionalSetting","resolve","resolve","z","z","z","combineHeaders","createJsonResponseHandler","parseProviderOptions","postJsonToApi","resolve","z","parseProviderOptions","postJsonToApi","combineHeaders","resolve","createJsonResponseHandler","z","resolve","createVertex","loadOptionalSetting","resolve"]}
|
|
1
|
+
{"version":3,"sources":["../src/google-vertex-provider-node.ts","../src/google-vertex-auth-google-auth-library.ts","../src/google-vertex-provider.ts","../src/version.ts","../src/google-vertex-embedding-model.ts","../src/google-vertex-error.ts","../src/google-vertex-embedding-options.ts","../src/google-vertex-image-model.ts","../src/google-vertex-tools.ts","../src/google-vertex-video-model.ts"],"sourcesContent":["import { loadOptionalSetting, resolve } from '@ai-sdk/provider-utils';\nimport { GoogleAuthOptions } from 'google-auth-library';\nimport { generateAuthToken } from './google-vertex-auth-google-auth-library';\nimport {\n createVertex as createVertexOriginal,\n GoogleVertexProvider,\n GoogleVertexProviderSettings as GoogleVertexProviderSettingsOriginal,\n} from './google-vertex-provider';\n\nexport interface GoogleVertexProviderSettings\n extends GoogleVertexProviderSettingsOriginal {\n /**\n * Optional. The Authentication options provided by google-auth-library.\n * Complete list of authentication options is documented in the\n * GoogleAuthOptions interface:\n * https://github.com/googleapis/google-auth-library-nodejs/blob/main/src/auth/googleauth.ts.\n */\n googleAuthOptions?: GoogleAuthOptions;\n}\n\nexport type { GoogleVertexProvider };\n\nexport function createVertex(\n options: GoogleVertexProviderSettings = {},\n): GoogleVertexProvider {\n const apiKey = loadOptionalSetting({\n settingValue: options.apiKey,\n environmentVariableName: 'GOOGLE_VERTEX_API_KEY',\n });\n\n if (apiKey) {\n return createVertexOriginal(options);\n }\n\n return createVertexOriginal({\n ...options,\n headers: async () => ({\n Authorization: `Bearer ${await generateAuthToken(\n options.googleAuthOptions,\n )}`,\n ...(await resolve(options.headers)),\n }),\n });\n}\n\n/**\n * Default Google Vertex AI provider instance.\n */\nexport const vertex = createVertex();\n","import { GoogleAuth, GoogleAuthOptions } from 'google-auth-library';\n\nlet authInstance: GoogleAuth | null = null;\nlet authOptions: GoogleAuthOptions | null = null;\n\nfunction getAuth(options: GoogleAuthOptions) {\n if (!authInstance || options !== authOptions) {\n authInstance = new GoogleAuth({\n scopes: ['https://www.googleapis.com/auth/cloud-platform'],\n ...options,\n });\n authOptions = options;\n }\n return authInstance;\n}\n\nexport async function generateAuthToken(options?: GoogleAuthOptions) {\n const auth = getAuth(options || {});\n const client = await auth.getClient();\n const token = await client.getAccessToken();\n return token?.token || null;\n}\n\n// For testing purposes only\nexport function _resetAuthInstance() {\n authInstance = null;\n}\n","import { GoogleGenerativeAILanguageModel } from '@ai-sdk/google/internal';\nimport {\n Experimental_VideoModelV3,\n ImageModelV3,\n LanguageModelV3,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadOptionalSetting,\n loadSetting,\n normalizeHeaders,\n resolve,\n Resolvable,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { VERSION } from './version';\nimport { GoogleVertexConfig } from './google-vertex-config';\nimport { GoogleVertexEmbeddingModel } from './google-vertex-embedding-model';\nimport { GoogleVertexEmbeddingModelId } from './google-vertex-embedding-options';\nimport { GoogleVertexImageModel } from './google-vertex-image-model';\nimport { GoogleVertexImageModelId } from './google-vertex-image-settings';\nimport { GoogleVertexModelId } from './google-vertex-options';\nimport { googleVertexTools } from './google-vertex-tools';\nimport { GoogleVertexVideoModel } from './google-vertex-video-model';\nimport { GoogleVertexVideoModelId } from './google-vertex-video-settings';\n\nconst EXPRESS_MODE_BASE_URL =\n 'https://aiplatform.googleapis.com/v1/publishers/google';\n\n// set `x-goog-api-key` header to API key for express mode\nfunction createExpressModeFetch(\n apiKey: string,\n customFetch?: FetchFunction,\n): FetchFunction {\n return async (url, init) => {\n const modifiedInit: RequestInit = {\n ...init,\n headers: {\n ...(init?.headers ? normalizeHeaders(init.headers) : {}),\n 'x-goog-api-key': apiKey,\n },\n };\n return (customFetch ?? fetch)(url.toString(), modifiedInit);\n };\n}\n\nexport interface GoogleVertexProvider extends ProviderV3 {\n /**\n * Creates a model for text generation.\n */\n (modelId: GoogleVertexModelId): LanguageModelV3;\n\n languageModel: (modelId: GoogleVertexModelId) => LanguageModelV3;\n\n /**\n * Creates a model for image generation.\n */\n image(modelId: GoogleVertexImageModelId): ImageModelV3;\n\n /**\n * Creates a model for image generation.\n */\n imageModel(modelId: GoogleVertexImageModelId): ImageModelV3;\n\n /**\n * Creates a model for video generation.\n */\n video(modelId: GoogleVertexVideoModelId): Experimental_VideoModelV3;\n\n tools: typeof googleVertexTools;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(\n modelId: GoogleVertexEmbeddingModelId,\n ): GoogleVertexEmbeddingModel;\n}\n\nexport interface GoogleVertexProviderSettings {\n /**\n * Optional. The API key for the Google Cloud project. If provided, the\n * provider will use express mode with API key authentication. Defaults to\n * the value of the `GOOGLE_VERTEX_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\n * Your Google Vertex location. Defaults to the environment variable `GOOGLE_VERTEX_LOCATION`.\n */\n location?: string;\n\n /**\n * Your Google Vertex project. Defaults to the environment variable `GOOGLE_VERTEX_PROJECT`.\n */\n project?: string;\n\n /**\n * Headers to use for requests. Can be:\n * - A headers object\n * - A Promise that resolves to a headers object\n * - A function that returns a headers object\n * - A function that returns a Promise of a headers object\n */\n headers?: Resolvable<Record<string, string | undefined>>;\n\n /**\n * Custom fetch implementation. You can use it as a middleware to intercept requests,\n * or to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n // for testing\n generateId?: () => string;\n\n /**\n * Base URL for the Google Vertex API calls.\n */\n baseURL?: string;\n}\n\n/**\n * Create a Google Vertex AI provider instance.\n */\nexport function createVertex(\n options: GoogleVertexProviderSettings = {},\n): GoogleVertexProvider {\n const apiKey = loadOptionalSetting({\n settingValue: options.apiKey,\n environmentVariableName: 'GOOGLE_VERTEX_API_KEY',\n });\n\n const loadVertexProject = () =>\n loadSetting({\n settingValue: options.project,\n settingName: 'project',\n environmentVariableName: 'GOOGLE_VERTEX_PROJECT',\n description: 'Google Vertex project',\n });\n\n const loadVertexLocation = () =>\n loadSetting({\n settingValue: options.location,\n settingName: 'location',\n environmentVariableName: 'GOOGLE_VERTEX_LOCATION',\n description: 'Google Vertex location',\n });\n\n const loadBaseURL = () => {\n if (apiKey) {\n return withoutTrailingSlash(options.baseURL) ?? EXPRESS_MODE_BASE_URL;\n }\n\n const region = loadVertexLocation();\n const project = loadVertexProject();\n\n // For global region, use aiplatform.googleapis.com directly\n // For other regions, use region-aiplatform.googleapis.com\n const baseHost = `${region === 'global' ? '' : region + '-'}aiplatform.googleapis.com`;\n\n return (\n withoutTrailingSlash(options.baseURL) ??\n `https://${baseHost}/v1beta1/projects/${project}/locations/${region}/publishers/google`\n );\n };\n\n const createConfig = (name: string): GoogleVertexConfig => {\n const getHeaders = async () => {\n const originalHeaders = await resolve(options.headers ?? {});\n return withUserAgentSuffix(\n originalHeaders,\n `ai-sdk/google-vertex/${VERSION}`,\n );\n };\n\n return {\n provider: `google.vertex.${name}`,\n headers: getHeaders,\n fetch: apiKey\n ? createExpressModeFetch(apiKey, options.fetch)\n : options.fetch,\n baseURL: loadBaseURL(),\n };\n };\n\n const createChatModel = (modelId: GoogleVertexModelId) => {\n return new GoogleGenerativeAILanguageModel(modelId, {\n ...createConfig('chat'),\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // HTTP URLs:\n /^https?:\\/\\/.*$/,\n // Google Cloud Storage URLs:\n /^gs:\\/\\/.*$/,\n ],\n }),\n });\n };\n\n const createEmbeddingModel = (modelId: GoogleVertexEmbeddingModelId) =>\n new GoogleVertexEmbeddingModel(modelId, createConfig('embedding'));\n\n const createImageModel = (modelId: GoogleVertexImageModelId) =>\n new GoogleVertexImageModel(modelId, createConfig('image'));\n\n const createVideoModel = (modelId: GoogleVertexVideoModelId) =>\n new GoogleVertexVideoModel(modelId, {\n ...createConfig('video'),\n generateId: options.generateId ?? generateId,\n });\n\n const provider = function (modelId: GoogleVertexModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Vertex AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createChatModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n provider.video = createVideoModel;\n provider.tools = googleVertexTools;\n\n return provider;\n}\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n postJsonToApi,\n resolve,\n parseProviderOptions,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleVertexFailedResponseHandler } from './google-vertex-error';\nimport {\n GoogleVertexEmbeddingModelId,\n googleVertexEmbeddingProviderOptions,\n} from './google-vertex-embedding-options';\nimport { GoogleVertexConfig } from './google-vertex-config';\n\nexport class GoogleVertexEmbeddingModel implements EmbeddingModelV3 {\n readonly specificationVersion = 'v3';\n readonly modelId: GoogleVertexEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleVertexConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: GoogleVertexEmbeddingModelId,\n config: GoogleVertexConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>\n > {\n let googleOptions = await parseProviderOptions({\n provider: 'vertex',\n providerOptions,\n schema: googleVertexEmbeddingProviderOptions,\n });\n\n if (googleOptions == null) {\n googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleVertexEmbeddingProviderOptions,\n });\n }\n\n googleOptions = googleOptions ?? {};\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const url = `${this.config.baseURL}/models/${this.modelId}:predict`;\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url,\n headers: mergedHeaders,\n body: {\n instances: values.map(value => ({\n content: value,\n task_type: googleOptions.taskType,\n title: googleOptions.title,\n })),\n parameters: {\n outputDimensionality: googleOptions.outputDimensionality,\n autoTruncate: googleOptions.autoTruncate,\n },\n },\n failedResponseHandler: googleVertexFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleVertexTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n warnings: [],\n embeddings: response.predictions.map(\n prediction => prediction.embeddings.values,\n ),\n usage: {\n tokens: response.predictions.reduce(\n (tokenCount, prediction) =>\n tokenCount + prediction.embeddings.statistics.token_count,\n 0,\n ),\n },\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleVertexTextEmbeddingResponseSchema = z.object({\n predictions: z.array(\n z.object({\n embeddings: z.object({\n values: z.array(z.number()),\n statistics: z.object({\n token_count: z.number(),\n }),\n }),\n }),\n ),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst googleVertexErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleVertexErrorData = z.infer<typeof googleVertexErrorDataSchema>;\n\nexport const googleVertexFailedResponseHandler = createJsonErrorResponseHandler(\n {\n errorSchema: googleVertexErrorDataSchema,\n errorToMessage: data => data.error.message,\n },\n);\n","import { z } from 'zod/v4';\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api\nexport type GoogleVertexEmbeddingModelId =\n | 'textembedding-gecko'\n | 'textembedding-gecko@001'\n | 'textembedding-gecko@003'\n | 'textembedding-gecko-multilingual'\n | 'textembedding-gecko-multilingual@001'\n | 'text-multilingual-embedding-002'\n | 'text-embedding-004'\n | 'text-embedding-005'\n | (string & {});\n\nexport const googleVertexEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n\n /**\n * Optional. The title of the document being embedded.\n * Only valid when task_type is set to 'RETRIEVAL_DOCUMENT'.\n * Helps the model produce better embeddings by providing additional context.\n */\n title: z.string().optional(),\n\n /**\n * Optional. When set to true, input text will be truncated. When set to false,\n * an error is returned if the input text is longer than the maximum length supported by the model. Defaults to true.\n */\n autoTruncate: z.boolean().optional(),\n});\n\nexport type GoogleVertexEmbeddingProviderOptions = z.infer<\n typeof googleVertexEmbeddingProviderOptions\n>;\n","import {\n ImageModelV3,\n ImageModelV3File,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n Resolvable,\n combineHeaders,\n convertUint8ArrayToBase64,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleVertexFailedResponseHandler } from './google-vertex-error';\nimport { GoogleVertexImageModelId } from './google-vertex-image-settings';\n\ninterface GoogleVertexImageModelConfig {\n provider: string;\n baseURL: string;\n headers?: Resolvable<Record<string, string | undefined>>;\n fetch?: typeof fetch;\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/image/generate-images\nexport class GoogleVertexImageModel implements ImageModelV3 {\n readonly specificationVersion = 'v3';\n // https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/imagen-api#parameter_list\n readonly maxImagesPerCall = 4;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: GoogleVertexImageModelId,\n private config: GoogleVertexImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n files,\n mask,\n }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV3['doGenerate']>>\n > {\n const warnings: Array<SharedV3Warning> = [];\n\n if (size != null) {\n warnings.push({\n type: 'unsupported',\n feature: 'size',\n details:\n 'This model does not support the `size` option. Use `aspectRatio` instead.',\n });\n }\n\n const vertexImageOptions = await parseProviderOptions({\n provider: 'vertex',\n providerOptions,\n schema: vertexImageProviderOptionsSchema,\n });\n\n // Extract edit-specific options from provider options\n const { edit, ...otherOptions } = vertexImageOptions ?? {};\n const { mode: editMode, baseSteps, maskMode, maskDilation } = edit ?? {};\n\n // Build the request body based on whether we're editing or generating\n const isEditMode = files != null && files.length > 0;\n\n let body: Record<string, unknown>;\n\n if (isEditMode) {\n // Build reference images for editing\n const referenceImages: Array<Record<string, unknown>> = [];\n\n // Add the source image(s)\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n referenceImages.push({\n referenceType: 'REFERENCE_TYPE_RAW',\n referenceId: i + 1,\n referenceImage: {\n bytesBase64Encoded: getBase64Data(file),\n },\n });\n }\n\n // Add mask if provided\n if (mask != null) {\n referenceImages.push({\n referenceType: 'REFERENCE_TYPE_MASK',\n referenceId: files.length + 1,\n referenceImage: {\n bytesBase64Encoded: getBase64Data(mask),\n },\n maskImageConfig: {\n maskMode: maskMode ?? 'MASK_MODE_USER_PROVIDED',\n ...(maskDilation != null ? { dilation: maskDilation } : {}),\n },\n });\n }\n\n body = {\n instances: [\n {\n prompt,\n referenceImages,\n },\n ],\n parameters: {\n sampleCount: n,\n ...(aspectRatio != null ? { aspectRatio } : {}),\n ...(seed != null ? { seed } : {}),\n editMode: editMode ?? 'EDIT_MODE_INPAINT_INSERTION',\n ...(baseSteps != null ? { editConfig: { baseSteps } } : {}),\n ...otherOptions,\n },\n };\n } else {\n // Standard image generation\n body = {\n instances: [{ prompt }],\n parameters: {\n sampleCount: n,\n ...(aspectRatio != null ? { aspectRatio } : {}),\n ...(seed != null ? { seed } : {}),\n ...otherOptions,\n },\n };\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:predict`,\n headers: combineHeaders(await resolve(this.config.headers), headers),\n body,\n failedResponseHandler: googleVertexFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n vertexImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images:\n response.predictions?.map(\n ({ bytesBase64Encoded }) => bytesBase64Encoded,\n ) ?? [],\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n providerMetadata: {\n vertex: {\n images:\n response.predictions?.map(prediction => {\n const {\n // normalize revised prompt property\n prompt: revisedPrompt,\n } = prediction;\n\n return { ...(revisedPrompt != null && { revisedPrompt }) };\n }) ?? [],\n },\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst vertexImageResponseSchema = z.object({\n predictions: z\n .array(\n z.object({\n bytesBase64Encoded: z.string(),\n mimeType: z.string(),\n prompt: z.string().nullish(),\n }),\n )\n .nullish(),\n});\n\nconst vertexImageProviderOptionsSchema = z.object({\n negativePrompt: z.string().nullish(),\n personGeneration: z\n .enum(['dont_allow', 'allow_adult', 'allow_all'])\n .nullish(),\n safetySetting: z\n .enum([\n 'block_low_and_above',\n 'block_medium_and_above',\n 'block_only_high',\n 'block_none',\n ])\n .nullish(),\n addWatermark: z.boolean().nullish(),\n storageUri: z.string().nullish(),\n sampleImageSize: z.enum(['1K', '2K']).nullish(),\n /**\n * Configuration for image editing operations\n */\n edit: z\n .object({\n /**\n * An integer that represents the number of sampling steps.\n * A higher value offers better image quality, a lower value offers better latency.\n * Try 35 steps to start. If the quality doesn't meet your requirements,\n * increase the value towards an upper limit of 75.\n */\n baseSteps: z.number().nullish(),\n\n // Edit mode options\n // https://cloud.google.com/vertex-ai/generative-ai/docs/image/edit-insert-objects\n mode: z\n .enum([\n 'EDIT_MODE_INPAINT_INSERTION',\n 'EDIT_MODE_INPAINT_REMOVAL',\n 'EDIT_MODE_OUTPAINT',\n 'EDIT_MODE_CONTROLLED_EDITING',\n 'EDIT_MODE_PRODUCT_IMAGE',\n 'EDIT_MODE_BGSWAP',\n ])\n .nullish(),\n\n /**\n * The mask mode to use.\n * - `MASK_MODE_DEFAULT` - Default value for mask mode.\n * - `MASK_MODE_USER_PROVIDED` - User provided mask. No segmentation needed.\n * - `MASK_MODE_DETECTION_BOX` - Mask from detected bounding boxes.\n * - `MASK_MODE_CLOTHING_AREA` - Masks from segmenting the clothing area with open-vocab segmentation.\n * - `MASK_MODE_PARSED_PERSON` - Masks from segmenting the person body and clothing using the person-parsing model.\n */\n maskMode: z\n .enum([\n 'MASK_MODE_DEFAULT',\n 'MASK_MODE_USER_PROVIDED',\n 'MASK_MODE_DETECTION_BOX',\n 'MASK_MODE_CLOTHING_AREA',\n 'MASK_MODE_PARSED_PERSON',\n ])\n .nullish(),\n\n /**\n * Optional. A float value between 0 and 1, inclusive, that represents the\n * percentage of the image width to grow the mask by. Using dilation helps\n * compensate for imprecise masks. We recommend a value of 0.01.\n */\n maskDilation: z.number().nullish(),\n })\n .nullish(),\n});\nexport type GoogleVertexImageProviderOptions = z.infer<\n typeof vertexImageProviderOptionsSchema\n>;\n\n/**\n * Helper to convert ImageModelV3File data to base64 string\n */\nfunction getBase64Data(file: ImageModelV3File): string {\n if (file.type === 'url') {\n throw new Error(\n 'URL-based images are not supported for Google Vertex image editing. Please provide the image data directly.',\n );\n }\n\n if (typeof file.data === 'string') {\n return file.data;\n }\n\n // Convert Uint8Array to base64\n return convertUint8ArrayToBase64(file.data);\n}\n","import { googleTools } from '@ai-sdk/google/internal';\n\nexport const googleVertexTools = {\n googleSearch: googleTools.googleSearch,\n enterpriseWebSearch: googleTools.enterpriseWebSearch,\n googleMaps: googleTools.googleMaps,\n urlContext: googleTools.urlContext,\n fileSearch: googleTools.fileSearch,\n codeExecution: googleTools.codeExecution,\n vertexRagStore: googleTools.vertexRagStore,\n};\n","import {\n AISDKError,\n type Experimental_VideoModelV3,\n type SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n convertUint8ArrayToBase64,\n createJsonResponseHandler,\n delay,\n type FetchFunction,\n lazySchema,\n parseProviderOptions,\n postJsonToApi,\n type Resolvable,\n resolve,\n zodSchema,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleVertexFailedResponseHandler } from './google-vertex-error';\nimport type { GoogleVertexVideoModelId } from './google-vertex-video-settings';\n\nexport type GoogleVertexVideoProviderOptions = {\n // Polling configuration\n pollIntervalMs?: number | null;\n pollTimeoutMs?: number | null;\n\n // Video generation options\n personGeneration?: 'dont_allow' | 'allow_adult' | 'allow_all' | null;\n negativePrompt?: string | null;\n generateAudio?: boolean | null;\n\n // Output configuration\n gcsOutputDirectory?: string | null;\n\n // Reference images (for style/asset reference)\n referenceImages?: Array<{\n bytesBase64Encoded?: string;\n gcsUri?: string;\n }> | null;\n\n [key: string]: unknown; // For passthrough\n};\n\ninterface GoogleVertexVideoModelConfig {\n provider: string;\n baseURL: string;\n headers?: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId?: () => string;\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class GoogleVertexVideoModel implements Experimental_VideoModelV3 {\n readonly specificationVersion = 'v3';\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxVideosPerCall(): number {\n // Vertex supports multiple videos via sampleCount\n return 4;\n }\n\n constructor(\n readonly modelId: GoogleVertexVideoModelId,\n private readonly config: GoogleVertexVideoModelConfig,\n ) {}\n\n async doGenerate(\n options: Parameters<Experimental_VideoModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<Experimental_VideoModelV3['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const warnings: SharedV3Warning[] = [];\n\n const vertexOptions = (await parseProviderOptions({\n provider: 'vertex',\n providerOptions: options.providerOptions,\n schema: vertexVideoProviderOptionsSchema,\n })) as GoogleVertexVideoProviderOptions | undefined;\n\n const instances: Array<Record<string, unknown>> = [{}];\n const instance = instances[0];\n\n if (options.prompt != null) {\n instance.prompt = options.prompt;\n }\n\n if (options.image != null) {\n if (options.image.type === 'url') {\n warnings.push({\n type: 'unsupported',\n feature: 'URL-based image input',\n details:\n 'Vertex AI video models require base64-encoded images or GCS URIs. URL will be ignored.',\n });\n } else {\n const base64Data =\n typeof options.image.data === 'string'\n ? options.image.data\n : convertUint8ArrayToBase64(options.image.data);\n\n instance.image = {\n bytesBase64Encoded: base64Data,\n };\n }\n }\n\n if (vertexOptions?.referenceImages != null) {\n instance.referenceImages = vertexOptions.referenceImages;\n }\n\n const parameters: Record<string, unknown> = {\n sampleCount: options.n,\n };\n\n if (options.aspectRatio) {\n parameters.aspectRatio = options.aspectRatio;\n }\n\n if (options.resolution) {\n const resolutionMap: Record<string, string> = {\n '1280x720': '720p',\n '1920x1080': '1080p',\n '3840x2160': '4k',\n };\n parameters.resolution =\n resolutionMap[options.resolution] || options.resolution;\n }\n\n if (options.duration) {\n parameters.durationSeconds = options.duration;\n }\n\n if (options.seed) {\n parameters.seed = options.seed;\n }\n\n if (vertexOptions != null) {\n const opts = vertexOptions;\n\n if (\n opts.personGeneration !== undefined &&\n opts.personGeneration !== null\n ) {\n parameters.personGeneration = opts.personGeneration;\n }\n if (opts.negativePrompt !== undefined && opts.negativePrompt !== null) {\n parameters.negativePrompt = opts.negativePrompt;\n }\n if (opts.generateAudio !== undefined && opts.generateAudio !== null) {\n parameters.generateAudio = opts.generateAudio;\n }\n if (\n opts.gcsOutputDirectory !== undefined &&\n opts.gcsOutputDirectory !== null\n ) {\n parameters.gcsOutputDirectory = opts.gcsOutputDirectory;\n }\n\n for (const [key, value] of Object.entries(opts)) {\n if (\n ![\n 'pollIntervalMs',\n 'pollTimeoutMs',\n 'personGeneration',\n 'negativePrompt',\n 'generateAudio',\n 'gcsOutputDirectory',\n 'referenceImages',\n ].includes(key)\n ) {\n parameters[key] = value;\n }\n }\n }\n\n const { value: operation } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:predictLongRunning`,\n headers: combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n ),\n body: {\n instances,\n parameters,\n },\n successfulResponseHandler: createJsonResponseHandler(\n vertexOperationSchema,\n ),\n failedResponseHandler: googleVertexFailedResponseHandler,\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const operationName = operation.name;\n if (!operationName) {\n throw new AISDKError({\n name: 'VERTEX_VIDEO_GENERATION_ERROR',\n message: 'No operation name returned from API',\n });\n }\n\n const pollIntervalMs = vertexOptions?.pollIntervalMs ?? 10000; // 10 seconds\n const pollTimeoutMs = vertexOptions?.pollTimeoutMs ?? 600000; // 10 minutes\n\n const startTime = Date.now();\n let finalOperation = operation;\n let responseHeaders: Record<string, string> | undefined;\n\n while (!finalOperation.done) {\n if (Date.now() - startTime > pollTimeoutMs) {\n throw new AISDKError({\n name: 'VERTEX_VIDEO_GENERATION_TIMEOUT',\n message: `Video generation timed out after ${pollTimeoutMs}ms`,\n });\n }\n\n await delay(pollIntervalMs);\n\n if (options.abortSignal?.aborted) {\n throw new AISDKError({\n name: 'VERTEX_VIDEO_GENERATION_ABORTED',\n message: 'Video generation request was aborted',\n });\n }\n\n const { value: statusOperation, responseHeaders: pollHeaders } =\n await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:fetchPredictOperation`,\n headers: combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n ),\n body: {\n operationName,\n },\n successfulResponseHandler: createJsonResponseHandler(\n vertexOperationSchema,\n ),\n failedResponseHandler: googleVertexFailedResponseHandler,\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n finalOperation = statusOperation;\n responseHeaders = pollHeaders;\n }\n\n if (finalOperation.error) {\n throw new AISDKError({\n name: 'VERTEX_VIDEO_GENERATION_FAILED',\n message: `Video generation failed: ${finalOperation.error.message}`,\n });\n }\n\n const response = finalOperation.response;\n if (!response?.videos || response.videos.length === 0) {\n throw new AISDKError({\n name: 'VERTEX_VIDEO_GENERATION_ERROR',\n message: `No videos in response. Response: ${JSON.stringify(finalOperation)}`,\n });\n }\n\n // Process videos - Vertex returns base64 encoded videos or GCS URIs\n const videos: Array<\n | { type: 'base64'; data: string; mediaType: string }\n | { type: 'url'; url: string; mediaType: string }\n > = [];\n const videoMetadata: Array<{\n gcsUri?: string | null | undefined;\n mimeType?: string | null | undefined;\n }> = [];\n\n for (const video of response.videos) {\n if (video.bytesBase64Encoded) {\n videos.push({\n type: 'base64',\n data: video.bytesBase64Encoded,\n mediaType: video.mimeType || 'video/mp4',\n });\n videoMetadata.push({\n mimeType: video.mimeType,\n });\n } else if (video.gcsUri) {\n videos.push({\n type: 'url',\n url: video.gcsUri,\n mediaType: video.mimeType || 'video/mp4',\n });\n videoMetadata.push({\n gcsUri: video.gcsUri,\n mimeType: video.mimeType,\n });\n }\n }\n\n if (videos.length === 0) {\n throw new AISDKError({\n name: 'VERTEX_VIDEO_GENERATION_ERROR',\n message: 'No valid videos in response',\n });\n }\n\n return {\n videos,\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n providerMetadata: {\n 'google-vertex': {\n videos: videoMetadata,\n },\n },\n };\n }\n}\n\nconst vertexOperationSchema = z.object({\n name: z.string().nullish(),\n done: z.boolean().nullish(),\n error: z\n .object({\n code: z.number().nullish(),\n message: z.string(),\n status: z.string().nullish(),\n })\n .nullish(),\n response: z\n .object({\n videos: z\n .array(\n z.object({\n bytesBase64Encoded: z.string().nullish(),\n gcsUri: z.string().nullish(),\n mimeType: z.string().nullish(),\n }),\n )\n .nullish(),\n raiMediaFilteredCount: z.number().nullish(),\n })\n .nullish(),\n});\n\nconst vertexVideoProviderOptionsSchema = lazySchema(() =>\n zodSchema(\n z\n .object({\n pollIntervalMs: z.number().positive().nullish(),\n pollTimeoutMs: z.number().positive().nullish(),\n personGeneration: z\n .enum(['dont_allow', 'allow_adult', 'allow_all'])\n .nullish(),\n negativePrompt: z.string().nullish(),\n generateAudio: z.boolean().nullish(),\n gcsOutputDirectory: z.string().nullish(),\n referenceImages: z\n .array(\n z.object({\n bytesBase64Encoded: z.string().nullish(),\n gcsUri: z.string().nullish(),\n }),\n )\n .nullish(),\n })\n .passthrough(),\n ),\n);\n"],"mappings":";AAAA,SAAS,uBAAAA,sBAAqB,WAAAC,gBAAe;;;ACA7C,SAAS,kBAAqC;AAE9C,IAAI,eAAkC;AACtC,IAAI,cAAwC;AAE5C,SAAS,QAAQ,SAA4B;AAC3C,MAAI,CAAC,gBAAgB,YAAY,aAAa;AAC5C,mBAAe,IAAI,WAAW;AAAA,MAC5B,QAAQ,CAAC,gDAAgD;AAAA,MACzD,GAAG;AAAA,IACL,CAAC;AACD,kBAAc;AAAA,EAChB;AACA,SAAO;AACT;AAEA,eAAsB,kBAAkB,SAA6B;AACnE,QAAM,OAAO,QAAQ,WAAW,CAAC,CAAC;AAClC,QAAM,SAAS,MAAM,KAAK,UAAU;AACpC,QAAM,QAAQ,MAAM,OAAO,eAAe;AAC1C,UAAO,+BAAO,UAAS;AACzB;;;ACrBA,SAAS,uCAAuC;AAOhD;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,WAAAC;AAAA,EAEA;AAAA,EACA;AAAA,OACK;;;ACfA,IAAM,UACX,OACI,WACA;;;ACLN;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACXlB,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,8BAA8B,EAAE,OAAO;AAAA,EAC3C,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,EAAE,OAAO;AAAA,IAClB,QAAQ,EAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,oCAAoC;AAAA,EAC/C;AAAA,IACE,aAAa;AAAA,IACb,gBAAgB,UAAQ,KAAK,MAAM;AAAA,EACrC;AACF;;;AClBA,SAAS,KAAAC,UAAS;AAcX,IAAM,uCAAuCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK3D,sBAAsBA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAUA,GACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM3B,cAAcA,GAAE,QAAQ,EAAE,SAAS;AACrC,CAAC;;;AFvCM,IAAM,6BAAN,MAA6D;AAAA,EAYlE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,gBAAgB,MAAM,qBAAqB;AAAA,MAC7C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,iBAAiB,MAAM;AACzB,sBAAgB,MAAM,qBAAqB;AAAA,QACzC,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,oBAAgB,wCAAiB,CAAC;AAElC,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,gBAAgB;AAAA,MACpB,MAAM,QAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM,MAAM,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AACzD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAM,cAAc;AAAA,MACtB;AAAA,MACA,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,WAAW,OAAO,IAAI,YAAU;AAAA,UAC9B,SAAS;AAAA,UACT,WAAW,cAAc;AAAA,UACzB,OAAO,cAAc;AAAA,QACvB,EAAE;AAAA,QACF,YAAY;AAAA,UACV,sBAAsB,cAAc;AAAA,UACpC,cAAc,cAAc;AAAA,QAC9B;AAAA,MACF;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,UAAU,CAAC;AAAA,MACX,YAAY,SAAS,YAAY;AAAA,QAC/B,gBAAc,WAAW,WAAW;AAAA,MACtC;AAAA,MACA,OAAO;AAAA,QACL,QAAQ,SAAS,YAAY;AAAA,UAC3B,CAAC,YAAY,eACX,aAAa,WAAW,WAAW,WAAW;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,MACA,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,0CAA0CC,GAAE,OAAO;AAAA,EACvD,aAAaA,GAAE;AAAA,IACbA,GAAE,OAAO;AAAA,MACP,YAAYA,GAAE,OAAO;AAAA,QACnB,QAAQA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,QAC1B,YAAYA,GAAE,OAAO;AAAA,UACnB,aAAaA,GAAE,OAAO;AAAA,QACxB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;;;AGjID;AAAA,EAEE,kBAAAC;AAAA,EACA;AAAA,EACA,6BAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,EACA,WAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAeX,IAAM,yBAAN,MAAqD;AAAA,EAS1D,YACW,SACD,QACR;AAFS;AACD;AAVV,SAAS,uBAAuB;AAEhC;AAAA,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAxDJ;AAyDI,UAAM,WAAmC,CAAC;AAE1C,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,qBAAqB,MAAMC,sBAAqB;AAAA,MACpD,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,EAAE,MAAM,GAAG,aAAa,IAAI,kDAAsB,CAAC;AACzD,UAAM,EAAE,MAAM,UAAU,WAAW,UAAU,aAAa,IAAI,sBAAQ,CAAC;AAGvE,UAAM,aAAa,SAAS,QAAQ,MAAM,SAAS;AAEnD,QAAI;AAEJ,QAAI,YAAY;AAEd,YAAM,kBAAkD,CAAC;AAGzD,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAM,OAAO,MAAM,CAAC;AACpB,wBAAgB,KAAK;AAAA,UACnB,eAAe;AAAA,UACf,aAAa,IAAI;AAAA,UACjB,gBAAgB;AAAA,YACd,oBAAoB,cAAc,IAAI;AAAA,UACxC;AAAA,QACF,CAAC;AAAA,MACH;AAGA,UAAI,QAAQ,MAAM;AAChB,wBAAgB,KAAK;AAAA,UACnB,eAAe;AAAA,UACf,aAAa,MAAM,SAAS;AAAA,UAC5B,gBAAgB;AAAA,YACd,oBAAoB,cAAc,IAAI;AAAA,UACxC;AAAA,UACA,iBAAiB;AAAA,YACf,UAAU,8BAAY;AAAA,YACtB,GAAI,gBAAgB,OAAO,EAAE,UAAU,aAAa,IAAI,CAAC;AAAA,UAC3D;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL,WAAW;AAAA,UACT;AAAA,YACE;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,QACA,YAAY;AAAA,UACV,aAAa;AAAA,UACb,GAAI,eAAe,OAAO,EAAE,YAAY,IAAI,CAAC;AAAA,UAC7C,GAAI,QAAQ,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,UAC/B,UAAU,8BAAY;AAAA,UACtB,GAAI,aAAa,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,IAAI,CAAC;AAAA,UACzD,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF,OAAO;AAEL,aAAO;AAAA,QACL,WAAW,CAAC,EAAE,OAAO,CAAC;AAAA,QACtB,YAAY;AAAA,UACV,aAAa;AAAA,UACb,GAAI,eAAe,OAAO,EAAE,YAAY,IAAI,CAAC;AAAA,UAC7C,GAAI,QAAQ,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,UAC/B,GAAG;AAAA,QACL;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAASC,gBAAe,MAAMC,SAAQ,KAAK,OAAO,OAAO,GAAG,OAAO;AAAA,MACnE;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,SACE,oBAAS,gBAAT,mBAAsB;AAAA,QACpB,CAAC,EAAE,mBAAmB,MAAM;AAAA,YAD9B,YAEK,CAAC;AAAA,MACR;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,SACE,oBAAS,gBAAT,mBAAsB,IAAI,gBAAc;AACtC,kBAAM;AAAA;AAAA,cAEJ,QAAQ;AAAA,YACV,IAAI;AAEJ,mBAAO,EAAE,GAAI,iBAAiB,QAAQ,EAAE,cAAc,EAAG;AAAA,UAC3D,OAPA,YAOM,CAAC;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4BC,GAAE,OAAO;AAAA,EACzC,aAAaA,GACV;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,oBAAoBA,GAAE,OAAO;AAAA,MAC7B,UAAUA,GAAE,OAAO;AAAA,MACnB,QAAQA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC7B,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAED,IAAM,mCAAmCA,GAAE,OAAO;AAAA,EAChD,gBAAgBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACnC,kBAAkBA,GACf,KAAK,CAAC,cAAc,eAAe,WAAW,CAAC,EAC/C,QAAQ;AAAA,EACX,eAAeA,GACZ,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,QAAQ;AAAA,EACX,cAAcA,GAAE,QAAQ,EAAE,QAAQ;AAAA,EAClC,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC/B,iBAAiBA,GAAE,KAAK,CAAC,MAAM,IAAI,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAI9C,MAAMA,GACH,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAON,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,IAI9B,MAAMA,GACH,KAAK;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,EACA,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAUX,UAAUA,GACP,KAAK;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,EACA,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOX,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACnC,CAAC,EACA,QAAQ;AACb,CAAC;AAQD,SAAS,cAAc,MAAgC;AACrD,MAAI,KAAK,SAAS,OAAO;AACvB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,KAAK,SAAS,UAAU;AACjC,WAAO,KAAK;AAAA,EACd;AAGA,SAAO,0BAA0B,KAAK,IAAI;AAC5C;;;AC/RA,SAAS,mBAAmB;AAErB,IAAM,oBAAoB;AAAA,EAC/B,cAAc,YAAY;AAAA,EAC1B,qBAAqB,YAAY;AAAA,EACjC,YAAY,YAAY;AAAA,EACxB,YAAY,YAAY;AAAA,EACxB,YAAY,YAAY;AAAA,EACxB,eAAe,YAAY;AAAA,EAC3B,gBAAgB,YAAY;AAC9B;;;ACVA;AAAA,EACE;AAAA,OAGK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EACA,6BAAAC;AAAA,EACA;AAAA,EAEA;AAAA,EACA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,EAEA,WAAAC;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAqCX,IAAM,yBAAN,MAAkE;AAAA,EAYvE,YACW,SACQ,QACjB;AAFS;AACQ;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,mBAA2B;AAE7B,WAAO;AAAA,EACT;AAAA,EAOA,MAAM,WACJ,SACuE;AA1E3E;AA2EI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,WAA8B,CAAC;AAErC,UAAM,gBAAiB,MAAMC,sBAAqB;AAAA,MAChD,UAAU;AAAA,MACV,iBAAiB,QAAQ;AAAA,MACzB,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,YAA4C,CAAC,CAAC,CAAC;AACrD,UAAM,WAAW,UAAU,CAAC;AAE5B,QAAI,QAAQ,UAAU,MAAM;AAC1B,eAAS,SAAS,QAAQ;AAAA,IAC5B;AAEA,QAAI,QAAQ,SAAS,MAAM;AACzB,UAAI,QAAQ,MAAM,SAAS,OAAO;AAChC,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SACE;AAAA,QACJ,CAAC;AAAA,MACH,OAAO;AACL,cAAM,aACJ,OAAO,QAAQ,MAAM,SAAS,WAC1B,QAAQ,MAAM,OACdC,2BAA0B,QAAQ,MAAM,IAAI;AAElD,iBAAS,QAAQ;AAAA,UACf,oBAAoB;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AAEA,SAAI,+CAAe,oBAAmB,MAAM;AAC1C,eAAS,kBAAkB,cAAc;AAAA,IAC3C;AAEA,UAAM,aAAsC;AAAA,MAC1C,aAAa,QAAQ;AAAA,IACvB;AAEA,QAAI,QAAQ,aAAa;AACvB,iBAAW,cAAc,QAAQ;AAAA,IACnC;AAEA,QAAI,QAAQ,YAAY;AACtB,YAAM,gBAAwC;AAAA,QAC5C,YAAY;AAAA,QACZ,aAAa;AAAA,QACb,aAAa;AAAA,MACf;AACA,iBAAW,aACT,cAAc,QAAQ,UAAU,KAAK,QAAQ;AAAA,IACjD;AAEA,QAAI,QAAQ,UAAU;AACpB,iBAAW,kBAAkB,QAAQ;AAAA,IACvC;AAEA,QAAI,QAAQ,MAAM;AAChB,iBAAW,OAAO,QAAQ;AAAA,IAC5B;AAEA,QAAI,iBAAiB,MAAM;AACzB,YAAM,OAAO;AAEb,UACE,KAAK,qBAAqB,UAC1B,KAAK,qBAAqB,MAC1B;AACA,mBAAW,mBAAmB,KAAK;AAAA,MACrC;AACA,UAAI,KAAK,mBAAmB,UAAa,KAAK,mBAAmB,MAAM;AACrE,mBAAW,iBAAiB,KAAK;AAAA,MACnC;AACA,UAAI,KAAK,kBAAkB,UAAa,KAAK,kBAAkB,MAAM;AACnE,mBAAW,gBAAgB,KAAK;AAAA,MAClC;AACA,UACE,KAAK,uBAAuB,UAC5B,KAAK,uBAAuB,MAC5B;AACA,mBAAW,qBAAqB,KAAK;AAAA,MACvC;AAEA,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC/C,YACE,CAAC;AAAA,UACC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,EAAE,SAAS,GAAG,GACd;AACA,qBAAW,GAAG,IAAI;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,EAAE,OAAO,UAAU,IAAI,MAAMC,eAAc;AAAA,MAC/C,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAASC;AAAA,QACP,MAAMC,SAAQ,KAAK,OAAO,OAAO;AAAA,QACjC,QAAQ;AAAA,MACV;AAAA,MACA,MAAM;AAAA,QACJ;AAAA,QACA;AAAA,MACF;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,uBAAuB;AAAA,MACvB,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,gBAAgB,UAAU;AAChC,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,WAAW;AAAA,QACnB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,kBAAiB,oDAAe,mBAAf,YAAiC;AACxD,UAAM,iBAAgB,oDAAe,kBAAf,YAAgC;AAEtD,UAAM,YAAY,KAAK,IAAI;AAC3B,QAAI,iBAAiB;AACrB,QAAI;AAEJ,WAAO,CAAC,eAAe,MAAM;AAC3B,UAAI,KAAK,IAAI,IAAI,YAAY,eAAe;AAC1C,cAAM,IAAI,WAAW;AAAA,UACnB,MAAM;AAAA,UACN,SAAS,oCAAoC,aAAa;AAAA,QAC5D,CAAC;AAAA,MACH;AAEA,YAAM,MAAM,cAAc;AAE1B,WAAI,aAAQ,gBAAR,mBAAqB,SAAS;AAChC,cAAM,IAAI,WAAW;AAAA,UACnB,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAEA,YAAM,EAAE,OAAO,iBAAiB,iBAAiB,YAAY,IAC3D,MAAMH,eAAc;AAAA,QAClB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,QAClD,SAASC;AAAA,UACP,MAAMC,SAAQ,KAAK,OAAO,OAAO;AAAA,UACjC,QAAQ;AAAA,QACV;AAAA,QACA,MAAM;AAAA,UACJ;AAAA,QACF;AAAA,QACA,2BAA2BC;AAAA,UACzB;AAAA,QACF;AAAA,QACA,uBAAuB;AAAA,QACvB,aAAa,QAAQ;AAAA,QACrB,OAAO,KAAK,OAAO;AAAA,MACrB,CAAC;AAEH,uBAAiB;AACjB,wBAAkB;AAAA,IACpB;AAEA,QAAI,eAAe,OAAO;AACxB,YAAM,IAAI,WAAW;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,4BAA4B,eAAe,MAAM,OAAO;AAAA,MACnE,CAAC;AAAA,IACH;AAEA,UAAM,WAAW,eAAe;AAChC,QAAI,EAAC,qCAAU,WAAU,SAAS,OAAO,WAAW,GAAG;AACrD,YAAM,IAAI,WAAW;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,oCAAoC,KAAK,UAAU,cAAc,CAAC;AAAA,MAC7E,CAAC;AAAA,IACH;AAGA,UAAM,SAGF,CAAC;AACL,UAAM,gBAGD,CAAC;AAEN,eAAW,SAAS,SAAS,QAAQ;AACnC,UAAI,MAAM,oBAAoB;AAC5B,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,MAAM,MAAM;AAAA,UACZ,WAAW,MAAM,YAAY;AAAA,QAC/B,CAAC;AACD,sBAAc,KAAK;AAAA,UACjB,UAAU,MAAM;AAAA,QAClB,CAAC;AAAA,MACH,WAAW,MAAM,QAAQ;AACvB,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,KAAK,MAAM;AAAA,UACX,WAAW,MAAM,YAAY;AAAA,QAC/B,CAAC;AACD,sBAAc,KAAK;AAAA,UACjB,QAAQ,MAAM;AAAA,UACd,UAAU,MAAM;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,OAAO,WAAW,GAAG;AACvB,YAAM,IAAI,WAAW;AAAA,QACnB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,MACA,kBAAkB;AAAA,QAChB,iBAAiB;AAAA,UACf,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,wBAAwBC,GAAE,OAAO;AAAA,EACrC,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACzB,MAAMA,GAAE,QAAQ,EAAE,QAAQ;AAAA,EAC1B,OAAOA,GACJ,OAAO;AAAA,IACN,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,SAASA,GAAE,OAAO;AAAA,IAClB,QAAQA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,CAAC,EACA,QAAQ;AAAA,EACX,UAAUA,GACP,OAAO;AAAA,IACN,QAAQA,GACL;AAAA,MACCA,GAAE,OAAO;AAAA,QACP,oBAAoBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACvC,QAAQA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC3B,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,IACH,EACC,QAAQ;AAAA,IACX,uBAAuBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,CAAC,EACA,QAAQ;AACb,CAAC;AAED,IAAM,mCAAmC;AAAA,EAAW,MAClD;AAAA,IACEA,GACG,OAAO;AAAA,MACN,gBAAgBA,GAAE,OAAO,EAAE,SAAS,EAAE,QAAQ;AAAA,MAC9C,eAAeA,GAAE,OAAO,EAAE,SAAS,EAAE,QAAQ;AAAA,MAC7C,kBAAkBA,GACf,KAAK,CAAC,cAAc,eAAe,WAAW,CAAC,EAC/C,QAAQ;AAAA,MACX,gBAAgBA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACnC,eAAeA,GAAE,QAAQ,EAAE,QAAQ;AAAA,MACnC,oBAAoBA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACvC,iBAAiBA,GACd;AAAA,QACCA,GAAE,OAAO;AAAA,UACP,oBAAoBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACvC,QAAQA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,CAAC;AAAA,MACH,EACC,QAAQ;AAAA,IACb,CAAC,EACA,YAAY;AAAA,EACjB;AACF;;;APxVA,IAAM,wBACJ;AAGF,SAAS,uBACP,QACA,aACe;AACf,SAAO,OAAO,KAAK,SAAS;AAC1B,UAAM,eAA4B;AAAA,MAChC,GAAG;AAAA,MACH,SAAS;AAAA,QACP,IAAI,6BAAM,WAAU,iBAAiB,KAAK,OAAO,IAAI,CAAC;AAAA,QACtD,kBAAkB;AAAA,MACpB;AAAA,IACF;AACA,YAAQ,oCAAe,OAAO,IAAI,SAAS,GAAG,YAAY;AAAA,EAC5D;AACF;AAgFO,SAAS,aACd,UAAwC,CAAC,GACnB;AACtB,QAAM,SAAS,oBAAoB;AAAA,IACjC,cAAc,QAAQ;AAAA,IACtB,yBAAyB;AAAA,EAC3B,CAAC;AAED,QAAM,oBAAoB,MACxB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,qBAAqB,MACzB,YAAY;AAAA,IACV,cAAc,QAAQ;AAAA,IACtB,aAAa;AAAA,IACb,yBAAyB;AAAA,IACzB,aAAa;AAAA,EACf,CAAC;AAEH,QAAM,cAAc,MAAM;AAvJ5B;AAwJI,QAAI,QAAQ;AACV,cAAO,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAAA,IAClD;AAEA,UAAM,SAAS,mBAAmB;AAClC,UAAM,UAAU,kBAAkB;AAIlC,UAAM,WAAW,GAAG,WAAW,WAAW,KAAK,SAAS,GAAG;AAE3D,YACE,0BAAqB,QAAQ,OAAO,MAApC,YACA,WAAW,QAAQ,qBAAqB,OAAO,cAAc,MAAM;AAAA,EAEvE;AAEA,QAAM,eAAe,CAAC,SAAqC;AACzD,UAAM,aAAa,YAAY;AA1KnC;AA2KM,YAAM,kBAAkB,MAAMC,UAAQ,aAAQ,YAAR,YAAmB,CAAC,CAAC;AAC3D,aAAO;AAAA,QACL;AAAA,QACA,wBAAwB,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,MACL,UAAU,iBAAiB,IAAI;AAAA,MAC/B,SAAS;AAAA,MACT,OAAO,SACH,uBAAuB,QAAQ,QAAQ,KAAK,IAC5C,QAAQ;AAAA,MACZ,SAAS,YAAY;AAAA,IACvB;AAAA,EACF;AAEA,QAAM,kBAAkB,CAAC,YAAiC;AA5L5D;AA6LI,WAAO,IAAI,gCAAgC,SAAS;AAAA,MAClD,GAAG,aAAa,MAAM;AAAA,MACtB,aAAY,aAAQ,eAAR,YAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA,UAEH;AAAA;AAAA,UAEA;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,uBAAuB,CAAC,YAC5B,IAAI,2BAA2B,SAAS,aAAa,WAAW,CAAC;AAEnE,QAAM,mBAAmB,CAAC,YACxB,IAAI,uBAAuB,SAAS,aAAa,OAAO,CAAC;AAE3D,QAAM,mBAAmB,CAAC,YAAmC;AAjN/D;AAkNI,eAAI,uBAAuB,SAAS;AAAA,MAClC,GAAG,aAAa,OAAO;AAAA,MACvB,aAAY,aAAQ,eAAR,YAAsB;AAAA,IACpC,CAAC;AAAA;AAEH,QAAM,WAAW,SAAU,SAA8B;AACvD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,iBAAiB;AAC1B,WAAS,qBAAqB;AAC9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AACtB,WAAS,QAAQ;AACjB,WAAS,QAAQ;AAEjB,SAAO;AACT;;;AFrNO,SAASC,cACd,UAAwC,CAAC,GACnB;AACtB,QAAM,SAASC,qBAAoB;AAAA,IACjC,cAAc,QAAQ;AAAA,IACtB,yBAAyB;AAAA,EAC3B,CAAC;AAED,MAAI,QAAQ;AACV,WAAO,aAAqB,OAAO;AAAA,EACrC;AAEA,SAAO,aAAqB;AAAA,IAC1B,GAAG;AAAA,IACH,SAAS,aAAa;AAAA,MACpB,eAAe,UAAU,MAAM;AAAA,QAC7B,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,GAAI,MAAMC,SAAQ,QAAQ,OAAO;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAKO,IAAM,SAASF,cAAa;","names":["loadOptionalSetting","resolve","resolve","z","z","z","combineHeaders","createJsonResponseHandler","parseProviderOptions","postJsonToApi","resolve","z","parseProviderOptions","postJsonToApi","combineHeaders","resolve","createJsonResponseHandler","z","combineHeaders","convertUint8ArrayToBase64","createJsonResponseHandler","parseProviderOptions","postJsonToApi","resolve","z","parseProviderOptions","convertUint8ArrayToBase64","postJsonToApi","combineHeaders","resolve","createJsonResponseHandler","z","resolve","createVertex","loadOptionalSetting","resolve"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/google-vertex",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.41",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -48,10 +48,10 @@
|
|
|
48
48
|
},
|
|
49
49
|
"dependencies": {
|
|
50
50
|
"google-auth-library": "^10.5.0",
|
|
51
|
-
"@ai-sdk/anthropic": "3.0.
|
|
52
|
-
"@ai-sdk/google": "3.0.
|
|
53
|
-
"@ai-sdk/provider": "3.0.
|
|
54
|
-
"@ai-sdk/provider-utils": "4.0.
|
|
51
|
+
"@ai-sdk/anthropic": "3.0.35",
|
|
52
|
+
"@ai-sdk/google": "3.0.20",
|
|
53
|
+
"@ai-sdk/provider": "3.0.7",
|
|
54
|
+
"@ai-sdk/provider-utils": "4.0.13"
|
|
55
55
|
},
|
|
56
56
|
"devDependencies": {
|
|
57
57
|
"@types/node": "20.17.24",
|
|
@@ -1,5 +1,10 @@
|
|
|
1
1
|
import { GoogleGenerativeAILanguageModel } from '@ai-sdk/google/internal';
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
Experimental_VideoModelV3,
|
|
4
|
+
ImageModelV3,
|
|
5
|
+
LanguageModelV3,
|
|
6
|
+
ProviderV3,
|
|
7
|
+
} from '@ai-sdk/provider';
|
|
3
8
|
import {
|
|
4
9
|
FetchFunction,
|
|
5
10
|
generateId,
|
|
@@ -19,6 +24,8 @@ import { GoogleVertexImageModel } from './google-vertex-image-model';
|
|
|
19
24
|
import { GoogleVertexImageModelId } from './google-vertex-image-settings';
|
|
20
25
|
import { GoogleVertexModelId } from './google-vertex-options';
|
|
21
26
|
import { googleVertexTools } from './google-vertex-tools';
|
|
27
|
+
import { GoogleVertexVideoModel } from './google-vertex-video-model';
|
|
28
|
+
import { GoogleVertexVideoModelId } from './google-vertex-video-settings';
|
|
22
29
|
|
|
23
30
|
const EXPRESS_MODE_BASE_URL =
|
|
24
31
|
'https://aiplatform.googleapis.com/v1/publishers/google';
|
|
@@ -58,6 +65,11 @@ export interface GoogleVertexProvider extends ProviderV3 {
|
|
|
58
65
|
*/
|
|
59
66
|
imageModel(modelId: GoogleVertexImageModelId): ImageModelV3;
|
|
60
67
|
|
|
68
|
+
/**
|
|
69
|
+
* Creates a model for video generation.
|
|
70
|
+
*/
|
|
71
|
+
video(modelId: GoogleVertexVideoModelId): Experimental_VideoModelV3;
|
|
72
|
+
|
|
61
73
|
tools: typeof googleVertexTools;
|
|
62
74
|
|
|
63
75
|
/**
|
|
@@ -195,6 +207,12 @@ export function createVertex(
|
|
|
195
207
|
const createImageModel = (modelId: GoogleVertexImageModelId) =>
|
|
196
208
|
new GoogleVertexImageModel(modelId, createConfig('image'));
|
|
197
209
|
|
|
210
|
+
const createVideoModel = (modelId: GoogleVertexVideoModelId) =>
|
|
211
|
+
new GoogleVertexVideoModel(modelId, {
|
|
212
|
+
...createConfig('video'),
|
|
213
|
+
generateId: options.generateId ?? generateId,
|
|
214
|
+
});
|
|
215
|
+
|
|
198
216
|
const provider = function (modelId: GoogleVertexModelId) {
|
|
199
217
|
if (new.target) {
|
|
200
218
|
throw new Error(
|
|
@@ -211,6 +229,7 @@ export function createVertex(
|
|
|
211
229
|
provider.textEmbeddingModel = createEmbeddingModel;
|
|
212
230
|
provider.image = createImageModel;
|
|
213
231
|
provider.imageModel = createImageModel;
|
|
232
|
+
provider.video = createVideoModel;
|
|
214
233
|
provider.tools = googleVertexTools;
|
|
215
234
|
|
|
216
235
|
return provider;
|