ai 5.0.0-canary.2 → 5.0.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/index.d.mts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js +46 -46
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +46 -46
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
- package/rsc/dist/rsc-server.mjs +21 -24
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -1247,7 +1247,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1247
1247
|
return {
|
1248
1248
|
role: "system",
|
1249
1249
|
content: message.content,
|
1250
|
-
|
1250
|
+
providerOptions: (_a17 = message.providerOptions) != null ? _a17 : message.experimental_providerMetadata
|
1251
1251
|
};
|
1252
1252
|
}
|
1253
1253
|
case "user": {
|
@@ -1255,13 +1255,13 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1255
1255
|
return {
|
1256
1256
|
role: "user",
|
1257
1257
|
content: [{ type: "text", text: message.content }],
|
1258
|
-
|
1258
|
+
providerOptions: (_b = message.providerOptions) != null ? _b : message.experimental_providerMetadata
|
1259
1259
|
};
|
1260
1260
|
}
|
1261
1261
|
return {
|
1262
1262
|
role: "user",
|
1263
1263
|
content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== "text" || part.text !== ""),
|
1264
|
-
|
1264
|
+
providerOptions: (_c = message.providerOptions) != null ? _c : message.experimental_providerMetadata
|
1265
1265
|
};
|
1266
1266
|
}
|
1267
1267
|
case "assistant": {
|
@@ -1269,7 +1269,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1269
1269
|
return {
|
1270
1270
|
role: "assistant",
|
1271
1271
|
content: [{ type: "text", text: message.content }],
|
1272
|
-
|
1272
|
+
providerOptions: (_d = message.providerOptions) != null ? _d : message.experimental_providerMetadata
|
1273
1273
|
};
|
1274
1274
|
}
|
1275
1275
|
return {
|
@@ -1287,7 +1287,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1287
1287
|
data: part.data instanceof URL ? part.data : convertDataContentToBase64String(part.data),
|
1288
1288
|
filename: part.filename,
|
1289
1289
|
mimeType: part.mimeType,
|
1290
|
-
|
1290
|
+
providerOptions
|
1291
1291
|
};
|
1292
1292
|
}
|
1293
1293
|
case "reasoning": {
|
@@ -1295,21 +1295,21 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1295
1295
|
type: "reasoning",
|
1296
1296
|
text: part.text,
|
1297
1297
|
signature: part.signature,
|
1298
|
-
|
1298
|
+
providerOptions
|
1299
1299
|
};
|
1300
1300
|
}
|
1301
1301
|
case "redacted-reasoning": {
|
1302
1302
|
return {
|
1303
1303
|
type: "redacted-reasoning",
|
1304
1304
|
data: part.data,
|
1305
|
-
|
1305
|
+
providerOptions
|
1306
1306
|
};
|
1307
1307
|
}
|
1308
1308
|
case "text": {
|
1309
1309
|
return {
|
1310
1310
|
type: "text",
|
1311
1311
|
text: part.text,
|
1312
|
-
|
1312
|
+
providerOptions
|
1313
1313
|
};
|
1314
1314
|
}
|
1315
1315
|
case "tool-call": {
|
@@ -1318,12 +1318,12 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1318
1318
|
toolCallId: part.toolCallId,
|
1319
1319
|
toolName: part.toolName,
|
1320
1320
|
args: part.args,
|
1321
|
-
|
1321
|
+
providerOptions
|
1322
1322
|
};
|
1323
1323
|
}
|
1324
1324
|
}
|
1325
1325
|
}),
|
1326
|
-
|
1326
|
+
providerOptions: (_e = message.providerOptions) != null ? _e : message.experimental_providerMetadata
|
1327
1327
|
};
|
1328
1328
|
}
|
1329
1329
|
case "tool": {
|
@@ -1338,10 +1338,10 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1338
1338
|
result: part.result,
|
1339
1339
|
content: part.experimental_content,
|
1340
1340
|
isError: part.isError,
|
1341
|
-
|
1341
|
+
providerOptions: (_a18 = part.providerOptions) != null ? _a18 : part.experimental_providerMetadata
|
1342
1342
|
};
|
1343
1343
|
}),
|
1344
|
-
|
1344
|
+
providerOptions: (_f = message.providerOptions) != null ? _f : message.experimental_providerMetadata
|
1345
1345
|
};
|
1346
1346
|
}
|
1347
1347
|
default: {
|
@@ -1379,7 +1379,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1379
1379
|
return {
|
1380
1380
|
type: "text",
|
1381
1381
|
text: part.text,
|
1382
|
-
|
1382
|
+
providerOptions: (_a17 = part.providerOptions) != null ? _a17 : part.experimental_providerMetadata
|
1383
1383
|
};
|
1384
1384
|
}
|
1385
1385
|
let mimeType = part.mimeType;
|
@@ -1433,7 +1433,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1433
1433
|
type: "image",
|
1434
1434
|
image: normalizedData,
|
1435
1435
|
mimeType,
|
1436
|
-
|
1436
|
+
providerOptions: (_c = part.providerOptions) != null ? _c : part.experimental_providerMetadata
|
1437
1437
|
};
|
1438
1438
|
}
|
1439
1439
|
case "file": {
|
@@ -1445,7 +1445,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1445
1445
|
data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,
|
1446
1446
|
filename: part.filename,
|
1447
1447
|
mimeType,
|
1448
|
-
|
1448
|
+
providerOptions: (_d = part.providerOptions) != null ? _d : part.experimental_providerMetadata
|
1449
1449
|
};
|
1450
1450
|
}
|
1451
1451
|
}
|
@@ -2689,8 +2689,8 @@ async function generateObject({
|
|
2689
2689
|
fn: async (span2) => {
|
2690
2690
|
var _a18, _b2, _c2, _d2, _e, _f;
|
2691
2691
|
const result2 = await model.doGenerate({
|
2692
|
-
|
2693
|
-
type: "
|
2692
|
+
responseFormat: {
|
2693
|
+
type: "json",
|
2694
2694
|
schema: outputStrategy.jsonSchema,
|
2695
2695
|
name: schemaName,
|
2696
2696
|
description: schemaDescription
|
@@ -2698,7 +2698,7 @@ async function generateObject({
|
|
2698
2698
|
...prepareCallSettings(settings),
|
2699
2699
|
inputFormat: standardizedPrompt.type,
|
2700
2700
|
prompt: promptMessages,
|
2701
|
-
|
2701
|
+
providerOptions,
|
2702
2702
|
abortSignal,
|
2703
2703
|
headers
|
2704
2704
|
});
|
@@ -2795,19 +2795,19 @@ async function generateObject({
|
|
2795
2795
|
fn: async (span2) => {
|
2796
2796
|
var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
|
2797
2797
|
const result2 = await model.doGenerate({
|
2798
|
-
|
2799
|
-
|
2800
|
-
tool: {
|
2798
|
+
tools: [
|
2799
|
+
{
|
2801
2800
|
type: "function",
|
2802
2801
|
name: schemaName != null ? schemaName : "json",
|
2803
2802
|
description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
|
2804
2803
|
parameters: outputStrategy.jsonSchema
|
2805
2804
|
}
|
2806
|
-
|
2805
|
+
],
|
2806
|
+
toolChoice: { type: "required" },
|
2807
2807
|
...prepareCallSettings(settings),
|
2808
2808
|
inputFormat,
|
2809
2809
|
prompt: promptMessages,
|
2810
|
-
|
2810
|
+
providerOptions,
|
2811
2811
|
abortSignal,
|
2812
2812
|
headers
|
2813
2813
|
});
|
@@ -3273,8 +3273,8 @@ var DefaultStreamObjectResult = class {
|
|
3273
3273
|
tools: void 0
|
3274
3274
|
});
|
3275
3275
|
callOptions = {
|
3276
|
-
|
3277
|
-
type: "
|
3276
|
+
responseFormat: {
|
3277
|
+
type: "json",
|
3278
3278
|
schema: outputStrategy.jsonSchema,
|
3279
3279
|
name: schemaName,
|
3280
3280
|
description: schemaDescription
|
@@ -3287,7 +3287,7 @@ var DefaultStreamObjectResult = class {
|
|
3287
3287
|
modelSupportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model)
|
3288
3288
|
// support 'this' context
|
3289
3289
|
}),
|
3290
|
-
|
3290
|
+
providerOptions,
|
3291
3291
|
abortSignal,
|
3292
3292
|
headers
|
3293
3293
|
};
|
@@ -3313,15 +3313,15 @@ var DefaultStreamObjectResult = class {
|
|
3313
3313
|
tools: void 0
|
3314
3314
|
});
|
3315
3315
|
callOptions = {
|
3316
|
-
|
3317
|
-
|
3318
|
-
tool: {
|
3316
|
+
tools: [
|
3317
|
+
{
|
3319
3318
|
type: "function",
|
3320
3319
|
name: schemaName != null ? schemaName : "json",
|
3321
3320
|
description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
|
3322
3321
|
parameters: outputStrategy.jsonSchema
|
3323
3322
|
}
|
3324
|
-
|
3323
|
+
],
|
3324
|
+
toolChoice: { type: "required" },
|
3325
3325
|
...prepareCallSettings(settings),
|
3326
3326
|
inputFormat: standardizedPrompt.type,
|
3327
3327
|
prompt: await convertToLanguageModelPrompt({
|
@@ -3330,7 +3330,7 @@ var DefaultStreamObjectResult = class {
|
|
3330
3330
|
modelSupportsUrl: (_b = model.supportsUrl) == null ? void 0 : _b.bind(model)
|
3331
3331
|
// support 'this' context,
|
3332
3332
|
}),
|
3333
|
-
|
3333
|
+
providerOptions,
|
3334
3334
|
abortSignal,
|
3335
3335
|
headers
|
3336
3336
|
};
|
@@ -4087,8 +4087,7 @@ async function generateText({
|
|
4087
4087
|
tracer,
|
4088
4088
|
fn: async (span) => {
|
4089
4089
|
var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
4090
|
-
const
|
4091
|
-
type: "regular",
|
4090
|
+
const toolsAndToolChoice = {
|
4092
4091
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4093
4092
|
};
|
4094
4093
|
const callSettings = prepareCallSettings(settings);
|
@@ -4142,11 +4141,11 @@ async function generateText({
|
|
4142
4141
|
// convert the language model level tools:
|
4143
4142
|
input: () => {
|
4144
4143
|
var _a19;
|
4145
|
-
return (_a19 =
|
4144
|
+
return (_a19 = toolsAndToolChoice.tools) == null ? void 0 : _a19.map((tool2) => JSON.stringify(tool2));
|
4146
4145
|
}
|
4147
4146
|
},
|
4148
4147
|
"ai.prompt.toolChoice": {
|
4149
|
-
input: () =>
|
4148
|
+
input: () => toolsAndToolChoice.toolChoice != null ? JSON.stringify(toolsAndToolChoice.toolChoice) : void 0
|
4150
4149
|
},
|
4151
4150
|
// standardized gen-ai llm span attributes:
|
4152
4151
|
"gen_ai.system": model.provider,
|
@@ -4164,12 +4163,12 @@ async function generateText({
|
|
4164
4163
|
fn: async (span2) => {
|
4165
4164
|
var _a19, _b2, _c2, _d2, _e2, _f2;
|
4166
4165
|
const result = await model.doGenerate({
|
4167
|
-
mode,
|
4168
4166
|
...callSettings,
|
4167
|
+
...toolsAndToolChoice,
|
4169
4168
|
inputFormat: promptFormat,
|
4170
4169
|
responseFormat: output == null ? void 0 : output.responseFormat({ model }),
|
4171
4170
|
prompt: promptMessages,
|
4172
|
-
|
4171
|
+
providerOptions,
|
4173
4172
|
abortSignal,
|
4174
4173
|
headers
|
4175
4174
|
});
|
@@ -5474,8 +5473,7 @@ var DefaultStreamTextResult = class {
|
|
5474
5473
|
modelSupportsUrl: (_a18 = model.supportsUrl) == null ? void 0 : _a18.bind(model)
|
5475
5474
|
// support 'this' context
|
5476
5475
|
});
|
5477
|
-
const
|
5478
|
-
type: "regular",
|
5476
|
+
const toolsAndToolChoice = {
|
5479
5477
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
5480
5478
|
};
|
5481
5479
|
const {
|
@@ -5503,11 +5501,13 @@ var DefaultStreamTextResult = class {
|
|
5503
5501
|
// convert the language model level tools:
|
5504
5502
|
input: () => {
|
5505
5503
|
var _a19;
|
5506
|
-
return (_a19 =
|
5504
|
+
return (_a19 = toolsAndToolChoice.tools) == null ? void 0 : _a19.map(
|
5505
|
+
(tool2) => JSON.stringify(tool2)
|
5506
|
+
);
|
5507
5507
|
}
|
5508
5508
|
},
|
5509
5509
|
"ai.prompt.toolChoice": {
|
5510
|
-
input: () =>
|
5510
|
+
input: () => toolsAndToolChoice.toolChoice != null ? JSON.stringify(toolsAndToolChoice.toolChoice) : void 0
|
5511
5511
|
},
|
5512
5512
|
// standardized gen-ai llm span attributes:
|
5513
5513
|
"gen_ai.system": model.provider,
|
@@ -5528,12 +5528,12 @@ var DefaultStreamTextResult = class {
|
|
5528
5528
|
// get before the call
|
5529
5529
|
doStreamSpan: doStreamSpan2,
|
5530
5530
|
result: await model.doStream({
|
5531
|
-
mode,
|
5532
5531
|
...prepareCallSettings(settings),
|
5532
|
+
...toolsAndToolChoice,
|
5533
5533
|
inputFormat: promptFormat,
|
5534
5534
|
responseFormat: output == null ? void 0 : output.responseFormat({ model }),
|
5535
5535
|
prompt: promptMessages,
|
5536
|
-
|
5536
|
+
providerOptions,
|
5537
5537
|
abortSignal,
|
5538
5538
|
headers
|
5539
5539
|
})
|
@@ -6283,9 +6283,9 @@ function defaultSettingsMiddleware({
|
|
6283
6283
|
return {
|
6284
6284
|
...settings,
|
6285
6285
|
...params,
|
6286
|
-
|
6287
|
-
settings.
|
6288
|
-
params.
|
6286
|
+
providerOptions: mergeObjects(
|
6287
|
+
settings.providerOptions,
|
6288
|
+
params.providerOptions
|
6289
6289
|
),
|
6290
6290
|
// special case for temperature 0
|
6291
6291
|
// TODO remove when temperature defaults to undefined
|