@ai-sdk/google 0.0.0-85f9a635-20240518005312
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +13 -0
- package/README.md +75 -0
- package/dist/index.d.mts +95 -0
- package/dist/index.d.ts +95 -0
- package/dist/index.js +516 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +499 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +67 -0
package/dist/index.mjs
ADDED
@@ -0,0 +1,499 @@
|
|
1
|
+
// src/google-facade.ts
|
2
|
+
import {
|
3
|
+
generateId,
|
4
|
+
loadApiKey,
|
5
|
+
withoutTrailingSlash
|
6
|
+
} from "@ai-sdk/provider-utils";
|
7
|
+
|
8
|
+
// src/google-generative-ai-language-model.ts
|
9
|
+
import {
|
10
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
11
|
+
} from "@ai-sdk/provider";
|
12
|
+
import {
|
13
|
+
createEventSourceResponseHandler,
|
14
|
+
createJsonResponseHandler,
|
15
|
+
postJsonToApi
|
16
|
+
} from "@ai-sdk/provider-utils";
|
17
|
+
import { z as z2 } from "zod";
|
18
|
+
|
19
|
+
// src/convert-to-google-generative-ai-messages.ts
|
20
|
+
import {
|
21
|
+
UnsupportedFunctionalityError
|
22
|
+
} from "@ai-sdk/provider";
|
23
|
+
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
24
|
+
function convertToGoogleGenerativeAIMessages(prompt) {
|
25
|
+
const messages = [];
|
26
|
+
for (const { role, content } of prompt) {
|
27
|
+
switch (role) {
|
28
|
+
case "system": {
|
29
|
+
messages.push({ role: "user", parts: [{ text: content }] });
|
30
|
+
messages.push({ role: "model", parts: [{ text: "" }] });
|
31
|
+
break;
|
32
|
+
}
|
33
|
+
case "user": {
|
34
|
+
messages.push({
|
35
|
+
role: "user",
|
36
|
+
parts: content.map((part) => {
|
37
|
+
var _a;
|
38
|
+
switch (part.type) {
|
39
|
+
case "text": {
|
40
|
+
return { text: part.text };
|
41
|
+
}
|
42
|
+
case "image": {
|
43
|
+
if (part.image instanceof URL) {
|
44
|
+
throw new UnsupportedFunctionalityError({
|
45
|
+
functionality: "URL image parts"
|
46
|
+
});
|
47
|
+
} else {
|
48
|
+
return {
|
49
|
+
inlineData: {
|
50
|
+
mimeType: (_a = part.mimeType) != null ? _a : "image/jpeg",
|
51
|
+
data: convertUint8ArrayToBase64(part.image)
|
52
|
+
}
|
53
|
+
};
|
54
|
+
}
|
55
|
+
}
|
56
|
+
}
|
57
|
+
})
|
58
|
+
});
|
59
|
+
break;
|
60
|
+
}
|
61
|
+
case "assistant": {
|
62
|
+
messages.push({
|
63
|
+
role: "model",
|
64
|
+
parts: content.map((part) => {
|
65
|
+
switch (part.type) {
|
66
|
+
case "text": {
|
67
|
+
return part.text.length === 0 ? void 0 : { text: part.text };
|
68
|
+
}
|
69
|
+
case "tool-call": {
|
70
|
+
return {
|
71
|
+
functionCall: {
|
72
|
+
name: part.toolName,
|
73
|
+
args: part.args
|
74
|
+
}
|
75
|
+
};
|
76
|
+
}
|
77
|
+
}
|
78
|
+
}).filter(
|
79
|
+
(part) => part !== void 0
|
80
|
+
)
|
81
|
+
});
|
82
|
+
break;
|
83
|
+
}
|
84
|
+
case "tool": {
|
85
|
+
messages.push({
|
86
|
+
role: "user",
|
87
|
+
parts: content.map((part) => ({
|
88
|
+
functionResponse: {
|
89
|
+
name: part.toolName,
|
90
|
+
response: part.result
|
91
|
+
}
|
92
|
+
}))
|
93
|
+
});
|
94
|
+
break;
|
95
|
+
}
|
96
|
+
default: {
|
97
|
+
const _exhaustiveCheck = role;
|
98
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
99
|
+
}
|
100
|
+
}
|
101
|
+
}
|
102
|
+
return messages;
|
103
|
+
}
|
104
|
+
|
105
|
+
// src/google-error.ts
|
106
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
107
|
+
import { z } from "zod";
|
108
|
+
var googleErrorDataSchema = z.object({
|
109
|
+
error: z.object({
|
110
|
+
code: z.number().nullable(),
|
111
|
+
message: z.string(),
|
112
|
+
status: z.string()
|
113
|
+
})
|
114
|
+
});
|
115
|
+
var googleFailedResponseHandler = createJsonErrorResponseHandler({
|
116
|
+
errorSchema: googleErrorDataSchema,
|
117
|
+
errorToMessage: (data) => data.error.message
|
118
|
+
});
|
119
|
+
|
120
|
+
// src/map-google-generative-ai-finish-reason.ts
|
121
|
+
function mapGoogleGenerativeAIFinishReason({
|
122
|
+
finishReason,
|
123
|
+
hasToolCalls
|
124
|
+
}) {
|
125
|
+
switch (finishReason) {
|
126
|
+
case "STOP":
|
127
|
+
return hasToolCalls ? "tool-calls" : "stop";
|
128
|
+
case "MAX_TOKENS":
|
129
|
+
return "length";
|
130
|
+
case "RECITATION":
|
131
|
+
case "SAFETY":
|
132
|
+
return "content-filter";
|
133
|
+
case "FINISH_REASON_UNSPECIFIED":
|
134
|
+
case "OTHER":
|
135
|
+
default:
|
136
|
+
return "other";
|
137
|
+
}
|
138
|
+
}
|
139
|
+
|
140
|
+
// src/google-generative-ai-language-model.ts
|
141
|
+
var GoogleGenerativeAILanguageModel = class {
|
142
|
+
constructor(modelId, settings, config) {
|
143
|
+
this.specificationVersion = "v1";
|
144
|
+
this.defaultObjectGenerationMode = "json";
|
145
|
+
this.modelId = modelId;
|
146
|
+
this.settings = settings;
|
147
|
+
this.config = config;
|
148
|
+
}
|
149
|
+
get provider() {
|
150
|
+
return this.config.provider;
|
151
|
+
}
|
152
|
+
getArgs({
|
153
|
+
mode,
|
154
|
+
prompt,
|
155
|
+
maxTokens,
|
156
|
+
temperature,
|
157
|
+
topP,
|
158
|
+
frequencyPenalty,
|
159
|
+
presencePenalty,
|
160
|
+
seed
|
161
|
+
}) {
|
162
|
+
var _a;
|
163
|
+
const type = mode.type;
|
164
|
+
const warnings = [];
|
165
|
+
if (frequencyPenalty != null) {
|
166
|
+
warnings.push({
|
167
|
+
type: "unsupported-setting",
|
168
|
+
setting: "frequencyPenalty"
|
169
|
+
});
|
170
|
+
}
|
171
|
+
if (presencePenalty != null) {
|
172
|
+
warnings.push({
|
173
|
+
type: "unsupported-setting",
|
174
|
+
setting: "presencePenalty"
|
175
|
+
});
|
176
|
+
}
|
177
|
+
if (seed != null) {
|
178
|
+
warnings.push({
|
179
|
+
type: "unsupported-setting",
|
180
|
+
setting: "seed"
|
181
|
+
});
|
182
|
+
}
|
183
|
+
const generationConfig = {
|
184
|
+
// model specific settings:
|
185
|
+
topK: this.settings.topK,
|
186
|
+
// standardized settings:
|
187
|
+
maxOutputTokens: maxTokens,
|
188
|
+
temperature,
|
189
|
+
topP
|
190
|
+
};
|
191
|
+
const contents = convertToGoogleGenerativeAIMessages(prompt);
|
192
|
+
switch (type) {
|
193
|
+
case "regular": {
|
194
|
+
const functionDeclarations = (_a = mode.tools) == null ? void 0 : _a.map((tool) => {
|
195
|
+
var _a2;
|
196
|
+
return {
|
197
|
+
name: tool.name,
|
198
|
+
description: (_a2 = tool.description) != null ? _a2 : "",
|
199
|
+
parameters: prepareJsonSchema(tool.parameters)
|
200
|
+
};
|
201
|
+
});
|
202
|
+
return {
|
203
|
+
args: {
|
204
|
+
generationConfig,
|
205
|
+
contents,
|
206
|
+
tools: functionDeclarations == null ? void 0 : { functionDeclarations }
|
207
|
+
},
|
208
|
+
warnings
|
209
|
+
};
|
210
|
+
}
|
211
|
+
case "object-json": {
|
212
|
+
return {
|
213
|
+
args: {
|
214
|
+
generationConfig: {
|
215
|
+
...generationConfig,
|
216
|
+
response_mime_type: "application/json"
|
217
|
+
},
|
218
|
+
contents
|
219
|
+
},
|
220
|
+
warnings
|
221
|
+
};
|
222
|
+
}
|
223
|
+
case "object-tool": {
|
224
|
+
throw new UnsupportedFunctionalityError2({
|
225
|
+
functionality: "object-tool mode"
|
226
|
+
});
|
227
|
+
}
|
228
|
+
case "object-grammar": {
|
229
|
+
throw new UnsupportedFunctionalityError2({
|
230
|
+
functionality: "object-grammar mode"
|
231
|
+
});
|
232
|
+
}
|
233
|
+
default: {
|
234
|
+
const _exhaustiveCheck = type;
|
235
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
236
|
+
}
|
237
|
+
}
|
238
|
+
}
|
239
|
+
async doGenerate(options) {
|
240
|
+
var _a, _b;
|
241
|
+
const { args, warnings } = this.getArgs(options);
|
242
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
243
|
+
url: `${this.config.baseURL}/${this.modelId}:generateContent`,
|
244
|
+
headers: this.config.headers(),
|
245
|
+
body: args,
|
246
|
+
failedResponseHandler: googleFailedResponseHandler,
|
247
|
+
successfulResponseHandler: createJsonResponseHandler(responseSchema),
|
248
|
+
abortSignal: options.abortSignal
|
249
|
+
});
|
250
|
+
const { contents: rawPrompt, ...rawSettings } = args;
|
251
|
+
const candidate = response.candidates[0];
|
252
|
+
const toolCalls = getToolCallsFromParts({
|
253
|
+
parts: candidate.content.parts,
|
254
|
+
generateId: this.config.generateId
|
255
|
+
});
|
256
|
+
const usageMetadata = response.usageMetadata;
|
257
|
+
return {
|
258
|
+
text: getTextFromParts(candidate.content.parts),
|
259
|
+
toolCalls,
|
260
|
+
finishReason: mapGoogleGenerativeAIFinishReason({
|
261
|
+
finishReason: candidate.finishReason,
|
262
|
+
hasToolCalls: toolCalls != null && toolCalls.length > 0
|
263
|
+
}),
|
264
|
+
usage: {
|
265
|
+
promptTokens: (_a = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _a : NaN,
|
266
|
+
completionTokens: (_b = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _b : NaN
|
267
|
+
},
|
268
|
+
rawCall: { rawPrompt, rawSettings },
|
269
|
+
rawResponse: { headers: responseHeaders },
|
270
|
+
warnings
|
271
|
+
};
|
272
|
+
}
|
273
|
+
async doStream(options) {
|
274
|
+
const { args, warnings } = this.getArgs(options);
|
275
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
276
|
+
url: `${this.config.baseURL}/${this.modelId}:streamGenerateContent?alt=sse`,
|
277
|
+
headers: this.config.headers(),
|
278
|
+
body: args,
|
279
|
+
failedResponseHandler: googleFailedResponseHandler,
|
280
|
+
successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),
|
281
|
+
abortSignal: options.abortSignal
|
282
|
+
});
|
283
|
+
const { contents: rawPrompt, ...rawSettings } = args;
|
284
|
+
let finishReason = "other";
|
285
|
+
let usage = {
|
286
|
+
promptTokens: Number.NaN,
|
287
|
+
completionTokens: Number.NaN
|
288
|
+
};
|
289
|
+
const generateId2 = this.config.generateId;
|
290
|
+
let hasToolCalls = false;
|
291
|
+
return {
|
292
|
+
stream: response.pipeThrough(
|
293
|
+
new TransformStream({
|
294
|
+
transform(chunk, controller) {
|
295
|
+
var _a, _b;
|
296
|
+
if (!chunk.success) {
|
297
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
298
|
+
return;
|
299
|
+
}
|
300
|
+
const value = chunk.value;
|
301
|
+
const candidate = value.candidates[0];
|
302
|
+
if ((candidate == null ? void 0 : candidate.finishReason) != null) {
|
303
|
+
finishReason = mapGoogleGenerativeAIFinishReason({
|
304
|
+
finishReason: candidate.finishReason,
|
305
|
+
hasToolCalls
|
306
|
+
});
|
307
|
+
}
|
308
|
+
const usageMetadata = value.usageMetadata;
|
309
|
+
if (usageMetadata != null) {
|
310
|
+
usage = {
|
311
|
+
promptTokens: (_a = usageMetadata.promptTokenCount) != null ? _a : NaN,
|
312
|
+
completionTokens: (_b = usageMetadata.candidatesTokenCount) != null ? _b : NaN
|
313
|
+
};
|
314
|
+
}
|
315
|
+
const content = candidate.content;
|
316
|
+
if (content == null) {
|
317
|
+
return;
|
318
|
+
}
|
319
|
+
const deltaText = getTextFromParts(content.parts);
|
320
|
+
if (deltaText != null) {
|
321
|
+
controller.enqueue({
|
322
|
+
type: "text-delta",
|
323
|
+
textDelta: deltaText
|
324
|
+
});
|
325
|
+
}
|
326
|
+
const toolCallDeltas = getToolCallsFromParts({
|
327
|
+
parts: content.parts,
|
328
|
+
generateId: generateId2
|
329
|
+
});
|
330
|
+
if (toolCallDeltas != null) {
|
331
|
+
for (const toolCall of toolCallDeltas) {
|
332
|
+
controller.enqueue({
|
333
|
+
type: "tool-call-delta",
|
334
|
+
toolCallType: "function",
|
335
|
+
toolCallId: toolCall.toolCallId,
|
336
|
+
toolName: toolCall.toolName,
|
337
|
+
argsTextDelta: toolCall.args
|
338
|
+
});
|
339
|
+
controller.enqueue({
|
340
|
+
type: "tool-call",
|
341
|
+
toolCallType: "function",
|
342
|
+
toolCallId: toolCall.toolCallId,
|
343
|
+
toolName: toolCall.toolName,
|
344
|
+
args: toolCall.args
|
345
|
+
});
|
346
|
+
hasToolCalls = true;
|
347
|
+
}
|
348
|
+
}
|
349
|
+
},
|
350
|
+
flush(controller) {
|
351
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
352
|
+
}
|
353
|
+
})
|
354
|
+
),
|
355
|
+
rawCall: { rawPrompt, rawSettings },
|
356
|
+
rawResponse: { headers: responseHeaders },
|
357
|
+
warnings
|
358
|
+
};
|
359
|
+
}
|
360
|
+
};
|
361
|
+
function prepareJsonSchema(jsonSchema) {
|
362
|
+
if (typeof jsonSchema !== "object") {
|
363
|
+
return jsonSchema;
|
364
|
+
}
|
365
|
+
if (Array.isArray(jsonSchema)) {
|
366
|
+
return jsonSchema.map(prepareJsonSchema);
|
367
|
+
}
|
368
|
+
const result = {};
|
369
|
+
for (const [key, value] of Object.entries(jsonSchema)) {
|
370
|
+
if (key === "additionalProperties" || key === "$schema") {
|
371
|
+
continue;
|
372
|
+
}
|
373
|
+
result[key] = prepareJsonSchema(value);
|
374
|
+
}
|
375
|
+
return result;
|
376
|
+
}
|
377
|
+
function getToolCallsFromParts({
|
378
|
+
parts,
|
379
|
+
generateId: generateId2
|
380
|
+
}) {
|
381
|
+
const functionCallParts = parts.filter(
|
382
|
+
(part) => "functionCall" in part
|
383
|
+
);
|
384
|
+
return functionCallParts.length === 0 ? void 0 : functionCallParts.map((part) => ({
|
385
|
+
toolCallType: "function",
|
386
|
+
toolCallId: generateId2(),
|
387
|
+
toolName: part.functionCall.name,
|
388
|
+
args: JSON.stringify(part.functionCall.args)
|
389
|
+
}));
|
390
|
+
}
|
391
|
+
function getTextFromParts(parts) {
|
392
|
+
const textParts = parts.filter((part) => "text" in part);
|
393
|
+
return textParts.length === 0 ? void 0 : textParts.map((part) => part.text).join("");
|
394
|
+
}
|
395
|
+
var contentSchema = z2.object({
|
396
|
+
role: z2.string(),
|
397
|
+
parts: z2.array(
|
398
|
+
z2.union([
|
399
|
+
z2.object({
|
400
|
+
text: z2.string()
|
401
|
+
}),
|
402
|
+
z2.object({
|
403
|
+
functionCall: z2.object({
|
404
|
+
name: z2.string(),
|
405
|
+
args: z2.unknown()
|
406
|
+
})
|
407
|
+
})
|
408
|
+
])
|
409
|
+
)
|
410
|
+
});
|
411
|
+
var responseSchema = z2.object({
|
412
|
+
candidates: z2.array(
|
413
|
+
z2.object({
|
414
|
+
content: contentSchema,
|
415
|
+
finishReason: z2.string().optional()
|
416
|
+
})
|
417
|
+
),
|
418
|
+
usageMetadata: z2.object({
|
419
|
+
promptTokenCount: z2.number(),
|
420
|
+
candidatesTokenCount: z2.number(),
|
421
|
+
totalTokenCount: z2.number()
|
422
|
+
}).optional()
|
423
|
+
});
|
424
|
+
var chunkSchema = z2.object({
|
425
|
+
candidates: z2.array(
|
426
|
+
z2.object({
|
427
|
+
content: contentSchema.optional(),
|
428
|
+
finishReason: z2.string().optional()
|
429
|
+
})
|
430
|
+
),
|
431
|
+
usageMetadata: z2.object({
|
432
|
+
promptTokenCount: z2.number(),
|
433
|
+
candidatesTokenCount: z2.number(),
|
434
|
+
totalTokenCount: z2.number()
|
435
|
+
}).optional()
|
436
|
+
});
|
437
|
+
|
438
|
+
// src/google-facade.ts
|
439
|
+
var Google = class {
|
440
|
+
/**
|
441
|
+
* Creates a new Google provider instance.
|
442
|
+
*/
|
443
|
+
constructor(options = {}) {
|
444
|
+
var _a, _b, _c;
|
445
|
+
this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://generativelanguage.googleapis.com/v1beta";
|
446
|
+
this.apiKey = options.apiKey;
|
447
|
+
this.headers = options.headers;
|
448
|
+
this.generateId = (_c = options.generateId) != null ? _c : generateId;
|
449
|
+
}
|
450
|
+
get baseConfig() {
|
451
|
+
return {
|
452
|
+
baseURL: this.baseURL,
|
453
|
+
headers: () => ({
|
454
|
+
"x-goog-api-key": loadApiKey({
|
455
|
+
apiKey: this.apiKey,
|
456
|
+
environmentVariableName: "GOOGLE_GENERATIVE_AI_API_KEY",
|
457
|
+
description: "Google Generative AI"
|
458
|
+
}),
|
459
|
+
...this.headers
|
460
|
+
})
|
461
|
+
};
|
462
|
+
}
|
463
|
+
/**
|
464
|
+
* @deprecated Use `chat()` instead.
|
465
|
+
*/
|
466
|
+
generativeAI(modelId, settings = {}) {
|
467
|
+
return this.chat(modelId, settings);
|
468
|
+
}
|
469
|
+
chat(modelId, settings = {}) {
|
470
|
+
return new GoogleGenerativeAILanguageModel(modelId, settings, {
|
471
|
+
provider: "google.generative-ai",
|
472
|
+
...this.baseConfig,
|
473
|
+
generateId: this.generateId
|
474
|
+
});
|
475
|
+
}
|
476
|
+
};
|
477
|
+
|
478
|
+
// src/google-provider.ts
|
479
|
+
function createGoogleGenerativeAI(options = {}) {
|
480
|
+
const google2 = new Google(options);
|
481
|
+
const provider = function(modelId, settings) {
|
482
|
+
if (new.target) {
|
483
|
+
throw new Error(
|
484
|
+
"The Google Generative AI model function cannot be called with the new keyword."
|
485
|
+
);
|
486
|
+
}
|
487
|
+
return google2.chat(modelId, settings);
|
488
|
+
};
|
489
|
+
provider.chat = google2.chat.bind(google2);
|
490
|
+
provider.generativeAI = google2.generativeAI.bind(google2);
|
491
|
+
return provider;
|
492
|
+
}
|
493
|
+
var google = createGoogleGenerativeAI();
|
494
|
+
export {
|
495
|
+
Google,
|
496
|
+
createGoogleGenerativeAI,
|
497
|
+
google
|
498
|
+
};
|
499
|
+
//# sourceMappingURL=index.mjs.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"sources":["../src/google-facade.ts","../src/google-generative-ai-language-model.ts","../src/convert-to-google-generative-ai-messages.ts","../src/google-error.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-provider.ts"],"sourcesContent":["import {\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { GoogleGenerativeAIProviderSettings } from './google-provider';\n\n/**\n * @deprecated Use `createGoogleGenerativeAI` instead.\n */\nexport class Google {\n /**\n * Base URL for the Google API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n readonly headers?: Record<string, string>;\n\n private readonly generateId: () => string;\n\n /**\n * Creates a new Google provider instance.\n */\n constructor(options: GoogleGenerativeAIProviderSettings = {}) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n this.apiKey = options.apiKey;\n this.headers = options.headers;\n this.generateId = options.generateId ?? generateId;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...this.headers,\n }),\n };\n }\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) {\n return this.chat(modelId, settings);\n }\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) {\n return new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n ...this.baseConfig,\n generateId: this.generateId,\n });\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: GoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (seed != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'seed',\n });\n }\n\n const generationConfig = {\n // model specific settings:\n topK: this.settings.topK,\n\n // standardized settings:\n maxOutputTokens: maxTokens,\n temperature,\n topP,\n };\n\n const contents = convertToGoogleGenerativeAIMessages(prompt);\n\n switch (type) {\n case 'regular': {\n const functionDeclarations = mode.tools?.map(tool => ({\n name: tool.name,\n description: tool.description ?? '',\n parameters: prepareJsonSchema(tool.parameters),\n }));\n\n return {\n args: {\n generationConfig,\n contents,\n tools:\n functionDeclarations == null\n ? undefined\n : { functionDeclarations },\n },\n warnings,\n };\n }\n\n case 'object-json': {\n return {\n args: {\n generationConfig: {\n ...generationConfig,\n response_mime_type: 'application/json',\n },\n contents,\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${this.modelId}:generateContent`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n const candidate = response.candidates[0];\n\n const toolCalls = getToolCallsFromParts({\n parts: candidate.content.parts,\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(candidate.content.parts),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n promptTokens: usageMetadata?.promptTokenCount ?? NaN,\n completionTokens: usageMetadata?.candidatesTokenCount ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${this.modelId}:streamGenerateContent?alt=sse`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n });\n\n const { contents: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const candidate = value.candidates[0];\n\n if (candidate?.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n }\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage = {\n promptTokens: usageMetadata.promptTokenCount ?? NaN,\n completionTokens: usageMetadata.candidatesTokenCount ?? NaN,\n };\n }\n\n const content = candidate.content;\n\n if (content == null) {\n return;\n }\n\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: deltaText,\n });\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\n// Removes all \"additionalProperty\" and \"$schema\" properties from the object (recursively)\n// (not supported by Google Generative AI)\nfunction prepareJsonSchema(jsonSchema: any): unknown {\n if (typeof jsonSchema !== 'object') {\n return jsonSchema;\n }\n\n if (Array.isArray(jsonSchema)) {\n return jsonSchema.map(prepareJsonSchema);\n }\n\n const result: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(jsonSchema)) {\n if (key === 'additionalProperties' || key === '$schema') {\n continue;\n }\n\n result[key] = prepareJsonSchema(value);\n }\n\n return result;\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z.array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n ]),\n ),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema,\n finishReason: z.string().optional(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number(),\n totalTokenCount: z.number(),\n })\n .optional(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.optional(),\n finishReason: z.string().optional(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number(),\n candidatesTokenCount: z.number(),\n totalTokenCount: z.number(),\n })\n .optional(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport {\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV1Prompt,\n): GoogleGenerativeAIPrompt {\n const messages: GoogleGenerativeAIPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n // system message becomes user message:\n messages.push({ role: 'user', parts: [{ text: content }] });\n\n // required for to ensure turn-taking:\n messages.push({ role: 'model', parts: [{ text: '' }] });\n\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n parts: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { text: part.text };\n }\n case 'image': {\n if (part.image instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'URL image parts',\n });\n } else {\n return {\n inlineData: {\n mimeType: part.mimeType ?? 'image/jpeg',\n data: convertUint8ArrayToBase64(part.image),\n },\n };\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n messages.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(\n part => part !== undefined,\n ) as GoogleGenerativeAIContentPart[],\n });\n break;\n }\n\n case 'tool': {\n messages.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: part.result,\n },\n })),\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'RECITATION':\n case 'SAFETY':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n default:\n return 'other';\n }\n}\n","import { Google } from './google-facade';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\n\nexport interface GoogleGenerativeAIProvider {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): GoogleGenerativeAILanguageModel;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): GoogleGenerativeAILanguageModel;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): GoogleGenerativeAILanguageModel;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\n@deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const google = new Google(options);\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return google.chat(modelId, settings);\n };\n\n provider.chat = google.chat.bind(google);\n provider.generativeAI = google.generativeAI.bind(google);\n\n return provider as GoogleGenerativeAIProvider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACJP;AAAA,EAKE,iCAAAA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACblB;AAAA,EAEE;AAAA,OACK;AACP,SAAS,iCAAiC;AAMnC,SAAS,oCACd,QAC0B;AAC1B,QAAM,WAAqC,CAAC;AAE5C,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AAEb,iBAAS,KAAK,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,QAAQ,CAAC,EAAE,CAAC;AAG1D,iBAAS,KAAK,EAAE,MAAM,SAAS,OAAO,CAAC,EAAE,MAAM,GAAG,CAAC,EAAE,CAAC;AAEtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,UAAQ;AA9BrC;AA+BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,KAAK,KAAK;AAAA,cAC3B;AAAA,cACA,KAAK,SAAS;AACZ,oBAAI,KAAK,iBAAiB,KAAK;AAC7B,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe;AAAA,kBACjB,CAAC;AAAA,gBACH,OAAO;AACL,yBAAO;AAAA,oBACL,YAAY;AAAA,sBACV,WAAU,UAAK,aAAL,YAAiB;AAAA,sBAC3B,MAAM,0BAA0B,KAAK,KAAK;AAAA,oBAC5C;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cACA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA;AAAA,YACC,UAAQ,SAAS;AAAA,UACnB;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU,KAAK;AAAA,YACjB;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvGA,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,wBAAwB,EAAE,OAAO;AAAA,EACrC,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,EAAE,OAAO;AAAA,IAClB,QAAQ,EAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,8BAA8B,+BAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ACdM,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AACE,aAAO;AAAA,EACX;AACF;;;AHQO,IAAM,kCAAN,MAAiE;AAAA,EAStE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA9DnD;AA+DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,mBAAmB;AAAA;AAAA,MAEvB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAEA,UAAM,WAAW,oCAAoC,MAAM;AAE3D,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,wBAAuB,UAAK,UAAL,mBAAY,IAAI,UAAK;AAtG1D,cAAAC;AAsG8D;AAAA,YACpD,MAAM,KAAK;AAAA,YACX,cAAaA,MAAA,KAAK,gBAAL,OAAAA,MAAoB;AAAA,YACjC,YAAY,kBAAkB,KAAK,UAAU;AAAA,UAC/C;AAAA;AAEA,eAAO;AAAA,UACL,MAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA,OACE,wBAAwB,OACpB,SACA,EAAE,qBAAqB;AAAA,UAC/B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,kBAAkB;AAAA,cAChB,GAAG;AAAA,cACH,oBAAoB;AAAA,YACtB;AAAA,YACA;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA3JjE;AA4JI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI,KAAK,OAAO;AAAA,MAC3C,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B,0BAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,YAAY,sBAAsB;AAAA,MACtC,OAAO,UAAU,QAAQ;AAAA,MACzB,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,iBAAiB,UAAU,QAAQ,KAAK;AAAA,MAC9C;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,eAAc,oDAAe,qBAAf,YAAmC;AAAA,QACjD,mBAAkB,oDAAe,yBAAf,YAAuC;AAAA,MAC3D;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI,KAAK,OAAO;AAAA,MAC3C,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B,iCAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAjOvC;AAkOY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,YAAY,MAAM,WAAW,CAAC;AAEpC,iBAAI,uCAAW,iBAAgB,MAAM;AACnC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAAA,YACH;AAEA,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,sBAAQ;AAAA,gBACN,eAAc,mBAAc,qBAAd,YAAkC;AAAA,gBAChD,mBAAkB,mBAAc,yBAAd,YAAsC;AAAA,cAC1D;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAE1B,gBAAI,WAAW,MAAM;AACnB;AAAA,YACF;AAEA,kBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,gBAAI,aAAa,MAAM;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW;AAAA,cACb,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB,sBAAsB;AAAA,cAC3C,OAAO,QAAQ;AAAA,cACf,YAAAA;AAAA,YACF,CAAC;AAED,gBAAI,kBAAkB,MAAM;AAC1B,yBAAW,YAAY,gBAAgB;AACrC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,SAAS;AAAA,gBAC1B,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,MAAM,SAAS;AAAA,gBACjB,CAAC;AAED,+BAAe;AAAA,cACjB;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,IACF;AAAA,EACF;AACF;AAIA,SAAS,kBAAkB,YAA0B;AACnD,MAAI,OAAO,eAAe,UAAU;AAClC,WAAO;AAAA,EACT;AAEA,MAAI,MAAM,QAAQ,UAAU,GAAG;AAC7B,WAAO,WAAW,IAAI,iBAAiB;AAAA,EACzC;AAEA,QAAM,SAA8B,CAAC;AAErC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,UAAU,GAAG;AACrD,QAAI,QAAQ,0BAA0B,QAAQ,WAAW;AACvD;AAAA,IACF;AAEA,WAAO,GAAG,IAAI,kBAAkB,KAAK;AAAA,EACvC;AAEA,SAAO;AACT;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,MAAM;AAAA,IAC9B,UAAQ,kBAAkB;AAAA,EAC5B;AAMA,SAAO,kBAAkB,WAAW,IAChC,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,MAAM,OAAO,UAAQ,UAAU,IAAI;AAIrD,SAAO,UAAU,WAAW,IACxB,SACA,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAC9C;AAEA,IAAM,gBAAgBC,GAAE,OAAO;AAAA,EAC7B,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE;AAAA,IACPA,GAAE,MAAM;AAAA,MACNA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,cAAcA,GAAE,OAAO;AAAA,UACrB,MAAMA,GAAE,OAAO;AAAA,UACf,MAAMA,GAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC;AAID,IAAM,iBAAiBA,GAAE,OAAO;AAAA,EAC9B,YAAYA,GAAE;AAAA,IACZA,GAAE,OAAO;AAAA,MACP,SAAS;AAAA,MACT,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,eAAeA,GACZ,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO;AAAA,IAC3B,sBAAsBA,GAAE,OAAO;AAAA,IAC/B,iBAAiBA,GAAE,OAAO;AAAA,EAC5B,CAAC,EACA,SAAS;AACd,CAAC;AAID,IAAM,cAAcA,GAAE,OAAO;AAAA,EAC3B,YAAYA,GAAE;AAAA,IACZA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,SAAS;AAAA,MAChC,cAAcA,GAAE,OAAO,EAAE,SAAS;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,eAAeA,GACZ,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO;AAAA,IAC3B,sBAAsBA,GAAE,OAAO;AAAA,IAC/B,iBAAiBA,GAAE,OAAO;AAAA,EAC5B,CAAC,EACA,SAAS;AACd,CAAC;;;ADhZM,IAAM,SAAN,MAAa;AAAA;AAAA;AAAA;AAAA,EAelB,YAAY,UAA8C,CAAC,GAAG;AA9BhE;AA+BI,SAAK,WACH,2BAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AACF,SAAK,SAAS,QAAQ;AACtB,SAAK,UAAU,QAAQ;AACvB,SAAK,cAAa,aAAQ,eAAR,YAAsB;AAAA,EAC1C;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,kBAAkB,WAAW;AAAA,UAC3B,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC;AAAA,QACD,GAAG,KAAK;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aACE,SACA,WAAuC,CAAC,GACxC;AACA,WAAO,KAAK,KAAK,SAAS,QAAQ;AAAA,EACpC;AAAA,EAEA,KACE,SACA,WAAuC,CAAC,GACxC;AACA,WAAO,IAAI,gCAAgC,SAAS,UAAU;AAAA,MAC5D,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,YAAY,KAAK;AAAA,IACnB,CAAC;AAAA,EACH;AACF;;;AKjBO,SAAS,yBACd,UAA8C,CAAC,GACnB;AAC5B,QAAMC,UAAS,IAAI,OAAO,OAAO;AAEjC,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAOA,QAAO,KAAK,SAAS,QAAQ;AAAA,EACtC;AAEA,WAAS,OAAOA,QAAO,KAAK,KAAKA,OAAM;AACvC,WAAS,eAAeA,QAAO,aAAa,KAAKA,OAAM;AAEvD,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["UnsupportedFunctionalityError","z","_a","UnsupportedFunctionalityError","generateId","z","google"]}
|
package/package.json
ADDED
@@ -0,0 +1,67 @@
|
|
1
|
+
{
|
2
|
+
"name": "@ai-sdk/google",
|
3
|
+
"version": "0.0.0-85f9a635-20240518005312",
|
4
|
+
"license": "Apache-2.0",
|
5
|
+
"sideEffects": false,
|
6
|
+
"main": "./dist/index.js",
|
7
|
+
"module": "./dist/index.mjs",
|
8
|
+
"types": "./dist/index.d.ts",
|
9
|
+
"files": [
|
10
|
+
"dist/**/*"
|
11
|
+
],
|
12
|
+
"exports": {
|
13
|
+
"./package.json": "./package.json",
|
14
|
+
".": {
|
15
|
+
"types": "./dist/index.d.ts",
|
16
|
+
"import": "./dist/index.mjs",
|
17
|
+
"require": "./dist/index.js"
|
18
|
+
}
|
19
|
+
},
|
20
|
+
"dependencies": {
|
21
|
+
"@ai-sdk/provider": "0.0.0-85f9a635-20240518005312",
|
22
|
+
"@ai-sdk/provider-utils": "0.0.0-85f9a635-20240518005312"
|
23
|
+
},
|
24
|
+
"devDependencies": {
|
25
|
+
"@types/node": "^18",
|
26
|
+
"tsup": "^8",
|
27
|
+
"typescript": "5.1.3",
|
28
|
+
"zod": "3.22.4",
|
29
|
+
"@vercel/ai-tsconfig": "0.0.0"
|
30
|
+
},
|
31
|
+
"peerDependencies": {
|
32
|
+
"zod": "^3.0.0"
|
33
|
+
},
|
34
|
+
"peerDependenciesMeta": {
|
35
|
+
"zod": {
|
36
|
+
"optional": true
|
37
|
+
}
|
38
|
+
},
|
39
|
+
"engines": {
|
40
|
+
"node": ">=18"
|
41
|
+
},
|
42
|
+
"publishConfig": {
|
43
|
+
"access": "public"
|
44
|
+
},
|
45
|
+
"homepage": "https://sdk.vercel.ai/docs",
|
46
|
+
"repository": {
|
47
|
+
"type": "git",
|
48
|
+
"url": "git+https://github.com/vercel/ai.git"
|
49
|
+
},
|
50
|
+
"bugs": {
|
51
|
+
"url": "https://github.com/vercel/ai/issues"
|
52
|
+
},
|
53
|
+
"keywords": [
|
54
|
+
"ai"
|
55
|
+
],
|
56
|
+
"scripts": {
|
57
|
+
"build": "tsup",
|
58
|
+
"clean": "rm -rf dist",
|
59
|
+
"dev": "tsup --watch",
|
60
|
+
"lint": "eslint \"./**/*.ts*\"",
|
61
|
+
"type-check": "tsc --noEmit",
|
62
|
+
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
63
|
+
"test": "pnpm test:node && pnpm test:edge",
|
64
|
+
"test:edge": "vitest --config vitest.edge.config.js --run",
|
65
|
+
"test:node": "vitest --config vitest.node.config.js --run"
|
66
|
+
}
|
67
|
+
}
|