ai 6.0.0-beta.36 → 6.0.0-beta.38
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +133 -112
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +141 -119
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +2 -2
- package/dist/internal/index.d.ts +2 -2
- package/dist/internal/index.js +32 -28
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +32 -28
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -720,7 +720,7 @@ import {
|
|
|
720
720
|
} from "@ai-sdk/provider-utils";
|
|
721
721
|
|
|
722
722
|
// src/version.ts
|
|
723
|
-
var VERSION = true ? "6.0.0-beta.
|
|
723
|
+
var VERSION = true ? "6.0.0-beta.38" : "0.0.0-test";
|
|
724
724
|
|
|
725
725
|
// src/util/download/download.ts
|
|
726
726
|
var download = async ({ url }) => {
|
|
@@ -1222,7 +1222,7 @@ function isNonEmptyObject(object7) {
|
|
|
1222
1222
|
}
|
|
1223
1223
|
|
|
1224
1224
|
// src/prompt/prepare-tools-and-tool-choice.ts
|
|
1225
|
-
function prepareToolsAndToolChoice({
|
|
1225
|
+
async function prepareToolsAndToolChoice({
|
|
1226
1226
|
tools,
|
|
1227
1227
|
toolChoice,
|
|
1228
1228
|
activeTools
|
|
@@ -1236,33 +1236,37 @@ function prepareToolsAndToolChoice({
|
|
|
1236
1236
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
|
1237
1237
|
([name17]) => activeTools.includes(name17)
|
|
1238
1238
|
) : Object.entries(tools);
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1239
|
+
const languageModelTools = [];
|
|
1240
|
+
for (const [name17, tool3] of filteredTools) {
|
|
1241
|
+
const toolType = tool3.type;
|
|
1242
|
+
switch (toolType) {
|
|
1243
|
+
case void 0:
|
|
1244
|
+
case "dynamic":
|
|
1245
|
+
case "function":
|
|
1246
|
+
languageModelTools.push({
|
|
1247
|
+
type: "function",
|
|
1248
|
+
name: name17,
|
|
1249
|
+
description: tool3.description,
|
|
1250
|
+
inputSchema: await asSchema(tool3.inputSchema).jsonSchema,
|
|
1251
|
+
providerOptions: tool3.providerOptions
|
|
1252
|
+
});
|
|
1253
|
+
break;
|
|
1254
|
+
case "provider-defined":
|
|
1255
|
+
languageModelTools.push({
|
|
1256
|
+
type: "provider-defined",
|
|
1257
|
+
name: name17,
|
|
1258
|
+
id: tool3.id,
|
|
1259
|
+
args: tool3.args
|
|
1260
|
+
});
|
|
1261
|
+
break;
|
|
1262
|
+
default: {
|
|
1263
|
+
const exhaustiveCheck = toolType;
|
|
1264
|
+
throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
|
|
1264
1265
|
}
|
|
1265
|
-
}
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
return {
|
|
1269
|
+
tools: languageModelTools,
|
|
1266
1270
|
toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
|
|
1267
1271
|
};
|
|
1268
1272
|
}
|
|
@@ -1632,29 +1636,33 @@ function getTracer({
|
|
|
1632
1636
|
|
|
1633
1637
|
// src/telemetry/record-span.ts
|
|
1634
1638
|
import { SpanStatusCode } from "@opentelemetry/api";
|
|
1635
|
-
function recordSpan({
|
|
1639
|
+
async function recordSpan({
|
|
1636
1640
|
name: name17,
|
|
1637
1641
|
tracer,
|
|
1638
1642
|
attributes,
|
|
1639
1643
|
fn,
|
|
1640
1644
|
endWhenDone = true
|
|
1641
1645
|
}) {
|
|
1642
|
-
return tracer.startActiveSpan(
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
span.end();
|
|
1647
|
-
}
|
|
1648
|
-
return result;
|
|
1649
|
-
} catch (error) {
|
|
1646
|
+
return tracer.startActiveSpan(
|
|
1647
|
+
name17,
|
|
1648
|
+
{ attributes: await attributes },
|
|
1649
|
+
async (span) => {
|
|
1650
1650
|
try {
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1651
|
+
const result = await fn(span);
|
|
1652
|
+
if (endWhenDone) {
|
|
1653
|
+
span.end();
|
|
1654
|
+
}
|
|
1655
|
+
return result;
|
|
1656
|
+
} catch (error) {
|
|
1657
|
+
try {
|
|
1658
|
+
recordErrorOnSpan(span, error);
|
|
1659
|
+
} finally {
|
|
1660
|
+
span.end();
|
|
1661
|
+
}
|
|
1662
|
+
throw error;
|
|
1654
1663
|
}
|
|
1655
|
-
throw error;
|
|
1656
1664
|
}
|
|
1657
|
-
|
|
1665
|
+
);
|
|
1658
1666
|
}
|
|
1659
1667
|
function recordErrorOnSpan(span, error) {
|
|
1660
1668
|
if (error instanceof Error) {
|
|
@@ -1673,33 +1681,41 @@ function recordErrorOnSpan(span, error) {
|
|
|
1673
1681
|
}
|
|
1674
1682
|
|
|
1675
1683
|
// src/telemetry/select-telemetry-attributes.ts
|
|
1676
|
-
function selectTelemetryAttributes({
|
|
1684
|
+
async function selectTelemetryAttributes({
|
|
1677
1685
|
telemetry,
|
|
1678
1686
|
attributes
|
|
1679
1687
|
}) {
|
|
1680
1688
|
if ((telemetry == null ? void 0 : telemetry.isEnabled) !== true) {
|
|
1681
1689
|
return {};
|
|
1682
1690
|
}
|
|
1683
|
-
|
|
1691
|
+
const resultAttributes = {};
|
|
1692
|
+
for (const [key, value] of Object.entries(attributes)) {
|
|
1684
1693
|
if (value == null) {
|
|
1685
|
-
|
|
1694
|
+
continue;
|
|
1686
1695
|
}
|
|
1687
1696
|
if (typeof value === "object" && "input" in value && typeof value.input === "function") {
|
|
1688
1697
|
if ((telemetry == null ? void 0 : telemetry.recordInputs) === false) {
|
|
1689
|
-
|
|
1698
|
+
continue;
|
|
1699
|
+
}
|
|
1700
|
+
const result = await value.input();
|
|
1701
|
+
if (result != null) {
|
|
1702
|
+
resultAttributes[key] = result;
|
|
1690
1703
|
}
|
|
1691
|
-
|
|
1692
|
-
return result == null ? attributes2 : { ...attributes2, [key]: result };
|
|
1704
|
+
continue;
|
|
1693
1705
|
}
|
|
1694
1706
|
if (typeof value === "object" && "output" in value && typeof value.output === "function") {
|
|
1695
1707
|
if ((telemetry == null ? void 0 : telemetry.recordOutputs) === false) {
|
|
1696
|
-
|
|
1708
|
+
continue;
|
|
1709
|
+
}
|
|
1710
|
+
const result = await value.output();
|
|
1711
|
+
if (result != null) {
|
|
1712
|
+
resultAttributes[key] = result;
|
|
1697
1713
|
}
|
|
1698
|
-
|
|
1699
|
-
return result == null ? attributes2 : { ...attributes2, [key]: result };
|
|
1714
|
+
continue;
|
|
1700
1715
|
}
|
|
1701
|
-
|
|
1702
|
-
}
|
|
1716
|
+
resultAttributes[key] = value;
|
|
1717
|
+
}
|
|
1718
|
+
return resultAttributes;
|
|
1703
1719
|
}
|
|
1704
1720
|
|
|
1705
1721
|
// src/telemetry/stringify-for-telemetry.ts
|
|
@@ -2006,7 +2022,7 @@ async function executeToolCall({
|
|
|
2006
2022
|
}
|
|
2007
2023
|
try {
|
|
2008
2024
|
span.setAttributes(
|
|
2009
|
-
selectTelemetryAttributes({
|
|
2025
|
+
await selectTelemetryAttributes({
|
|
2010
2026
|
telemetry,
|
|
2011
2027
|
attributes: {
|
|
2012
2028
|
"ai.toolCall.result": {
|
|
@@ -2125,9 +2141,9 @@ async function parseToolCall({
|
|
|
2125
2141
|
repairedToolCall = await repairToolCall({
|
|
2126
2142
|
toolCall,
|
|
2127
2143
|
tools,
|
|
2128
|
-
inputSchema: ({ toolName }) => {
|
|
2144
|
+
inputSchema: async ({ toolName }) => {
|
|
2129
2145
|
const { inputSchema } = tools[toolName];
|
|
2130
|
-
return asSchema2(inputSchema).jsonSchema;
|
|
2146
|
+
return await asSchema2(inputSchema).jsonSchema;
|
|
2131
2147
|
},
|
|
2132
2148
|
system,
|
|
2133
2149
|
messages,
|
|
@@ -2520,7 +2536,7 @@ async function generateText({
|
|
|
2520
2536
|
supportedUrls: await stepModel.supportedUrls,
|
|
2521
2537
|
download: download2
|
|
2522
2538
|
});
|
|
2523
|
-
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
|
2539
|
+
const { toolChoice: stepToolChoice, tools: stepTools } = await prepareToolsAndToolChoice({
|
|
2524
2540
|
tools,
|
|
2525
2541
|
toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
|
|
2526
2542
|
activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
|
|
@@ -2571,7 +2587,7 @@ async function generateText({
|
|
|
2571
2587
|
...callSettings2,
|
|
2572
2588
|
tools: stepTools,
|
|
2573
2589
|
toolChoice: stepToolChoice,
|
|
2574
|
-
responseFormat: output == null ? void 0 : output.responseFormat,
|
|
2590
|
+
responseFormat: await (output == null ? void 0 : output.responseFormat),
|
|
2575
2591
|
prompt: promptMessages,
|
|
2576
2592
|
providerOptions,
|
|
2577
2593
|
abortSignal,
|
|
@@ -2585,7 +2601,7 @@ async function generateText({
|
|
|
2585
2601
|
body: (_h = result.response) == null ? void 0 : _h.body
|
|
2586
2602
|
};
|
|
2587
2603
|
span2.setAttributes(
|
|
2588
|
-
selectTelemetryAttributes({
|
|
2604
|
+
await selectTelemetryAttributes({
|
|
2589
2605
|
telemetry,
|
|
2590
2606
|
attributes: {
|
|
2591
2607
|
"ai.response.finishReason": result.finishReason,
|
|
@@ -2729,7 +2745,7 @@ async function generateText({
|
|
|
2729
2745
|
!await isStopConditionMet({ stopConditions, steps })
|
|
2730
2746
|
);
|
|
2731
2747
|
span.setAttributes(
|
|
2732
|
-
selectTelemetryAttributes({
|
|
2748
|
+
await selectTelemetryAttributes({
|
|
2733
2749
|
telemetry,
|
|
2734
2750
|
attributes: {
|
|
2735
2751
|
"ai.response.finishReason": currentModelResponse.finishReason,
|
|
@@ -4317,15 +4333,15 @@ async function consumeStream({
|
|
|
4317
4333
|
|
|
4318
4334
|
// src/util/create-resolvable-promise.ts
|
|
4319
4335
|
function createResolvablePromise() {
|
|
4320
|
-
let
|
|
4336
|
+
let resolve3;
|
|
4321
4337
|
let reject;
|
|
4322
4338
|
const promise = new Promise((res, rej) => {
|
|
4323
|
-
|
|
4339
|
+
resolve3 = res;
|
|
4324
4340
|
reject = rej;
|
|
4325
4341
|
});
|
|
4326
4342
|
return {
|
|
4327
4343
|
promise,
|
|
4328
|
-
resolve:
|
|
4344
|
+
resolve: resolve3,
|
|
4329
4345
|
reject
|
|
4330
4346
|
};
|
|
4331
4347
|
}
|
|
@@ -4422,13 +4438,13 @@ var DelayedPromise = class {
|
|
|
4422
4438
|
if (this._promise) {
|
|
4423
4439
|
return this._promise;
|
|
4424
4440
|
}
|
|
4425
|
-
this._promise = new Promise((
|
|
4441
|
+
this._promise = new Promise((resolve3, reject) => {
|
|
4426
4442
|
if (this.status.type === "resolved") {
|
|
4427
|
-
|
|
4443
|
+
resolve3(this.status.value);
|
|
4428
4444
|
} else if (this.status.type === "rejected") {
|
|
4429
4445
|
reject(this.status.error);
|
|
4430
4446
|
}
|
|
4431
|
-
this._resolve =
|
|
4447
|
+
this._resolve = resolve3;
|
|
4432
4448
|
this._reject = reject;
|
|
4433
4449
|
});
|
|
4434
4450
|
return this._promise;
|
|
@@ -5031,7 +5047,7 @@ var DefaultStreamTextResult = class {
|
|
|
5031
5047
|
steps: recordedSteps
|
|
5032
5048
|
}));
|
|
5033
5049
|
rootSpan.setAttributes(
|
|
5034
|
-
selectTelemetryAttributes({
|
|
5050
|
+
await selectTelemetryAttributes({
|
|
5035
5051
|
telemetry,
|
|
5036
5052
|
attributes: {
|
|
5037
5053
|
"ai.response.finishReason": finishReason,
|
|
@@ -5240,7 +5256,7 @@ var DefaultStreamTextResult = class {
|
|
|
5240
5256
|
supportedUrls: await stepModel.supportedUrls,
|
|
5241
5257
|
download: download2
|
|
5242
5258
|
});
|
|
5243
|
-
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
|
5259
|
+
const { toolChoice: stepToolChoice, tools: stepTools } = await prepareToolsAndToolChoice({
|
|
5244
5260
|
tools,
|
|
5245
5261
|
toolChoice: (_d = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _d : toolChoice,
|
|
5246
5262
|
activeTools: (_e = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _e : activeTools
|
|
@@ -5296,7 +5312,7 @@ var DefaultStreamTextResult = class {
|
|
|
5296
5312
|
...callSettings,
|
|
5297
5313
|
tools: stepTools,
|
|
5298
5314
|
toolChoice: stepToolChoice,
|
|
5299
|
-
responseFormat: output == null ? void 0 : output.responseFormat,
|
|
5315
|
+
responseFormat: await (output == null ? void 0 : output.responseFormat),
|
|
5300
5316
|
prompt: promptMessages,
|
|
5301
5317
|
providerOptions,
|
|
5302
5318
|
abortSignal,
|
|
@@ -5499,7 +5515,7 @@ var DefaultStreamTextResult = class {
|
|
|
5499
5515
|
const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
|
|
5500
5516
|
try {
|
|
5501
5517
|
doStreamSpan.setAttributes(
|
|
5502
|
-
selectTelemetryAttributes({
|
|
5518
|
+
await selectTelemetryAttributes({
|
|
5503
5519
|
telemetry,
|
|
5504
5520
|
attributes: {
|
|
5505
5521
|
"ai.response.finishReason": stepFinishReason,
|
|
@@ -6408,7 +6424,7 @@ async function embed({
|
|
|
6408
6424
|
const embedding2 = modelResponse.embeddings[0];
|
|
6409
6425
|
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
|
6410
6426
|
doEmbedSpan.setAttributes(
|
|
6411
|
-
selectTelemetryAttributes({
|
|
6427
|
+
await selectTelemetryAttributes({
|
|
6412
6428
|
telemetry,
|
|
6413
6429
|
attributes: {
|
|
6414
6430
|
"ai.embeddings": {
|
|
@@ -6431,7 +6447,7 @@ async function embed({
|
|
|
6431
6447
|
)
|
|
6432
6448
|
);
|
|
6433
6449
|
span.setAttributes(
|
|
6434
|
-
selectTelemetryAttributes({
|
|
6450
|
+
await selectTelemetryAttributes({
|
|
6435
6451
|
telemetry,
|
|
6436
6452
|
attributes: {
|
|
6437
6453
|
"ai.embedding": { output: () => JSON.stringify(embedding) },
|
|
@@ -6552,7 +6568,7 @@ async function embedMany({
|
|
|
6552
6568
|
const embeddings3 = modelResponse.embeddings;
|
|
6553
6569
|
const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
|
|
6554
6570
|
doEmbedSpan.setAttributes(
|
|
6555
|
-
selectTelemetryAttributes({
|
|
6571
|
+
await selectTelemetryAttributes({
|
|
6556
6572
|
telemetry,
|
|
6557
6573
|
attributes: {
|
|
6558
6574
|
"ai.embeddings": {
|
|
@@ -6575,7 +6591,7 @@ async function embedMany({
|
|
|
6575
6591
|
}
|
|
6576
6592
|
);
|
|
6577
6593
|
span.setAttributes(
|
|
6578
|
-
selectTelemetryAttributes({
|
|
6594
|
+
await selectTelemetryAttributes({
|
|
6579
6595
|
telemetry,
|
|
6580
6596
|
attributes: {
|
|
6581
6597
|
"ai.embeddings": {
|
|
@@ -6634,7 +6650,7 @@ async function embedMany({
|
|
|
6634
6650
|
const embeddings2 = modelResponse.embeddings;
|
|
6635
6651
|
const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
|
|
6636
6652
|
doEmbedSpan.setAttributes(
|
|
6637
|
-
selectTelemetryAttributes({
|
|
6653
|
+
await selectTelemetryAttributes({
|
|
6638
6654
|
telemetry,
|
|
6639
6655
|
attributes: {
|
|
6640
6656
|
"ai.embeddings": {
|
|
@@ -6678,7 +6694,7 @@ async function embedMany({
|
|
|
6678
6694
|
}
|
|
6679
6695
|
}
|
|
6680
6696
|
span.setAttributes(
|
|
6681
|
-
selectTelemetryAttributes({
|
|
6697
|
+
await selectTelemetryAttributes({
|
|
6682
6698
|
telemetry,
|
|
6683
6699
|
attributes: {
|
|
6684
6700
|
"ai.embeddings": {
|
|
@@ -6853,7 +6869,7 @@ import {
|
|
|
6853
6869
|
} from "@ai-sdk/provider-utils";
|
|
6854
6870
|
var noSchemaOutputStrategy = {
|
|
6855
6871
|
type: "no-schema",
|
|
6856
|
-
jsonSchema: void 0,
|
|
6872
|
+
jsonSchema: async () => void 0,
|
|
6857
6873
|
async validatePartialResult({ value, textDelta }) {
|
|
6858
6874
|
return { success: true, value: { partial: value, textDelta } };
|
|
6859
6875
|
},
|
|
@@ -6877,7 +6893,7 @@ var noSchemaOutputStrategy = {
|
|
|
6877
6893
|
};
|
|
6878
6894
|
var objectOutputStrategy = (schema) => ({
|
|
6879
6895
|
type: "object",
|
|
6880
|
-
jsonSchema: schema.jsonSchema,
|
|
6896
|
+
jsonSchema: async () => await schema.jsonSchema,
|
|
6881
6897
|
async validatePartialResult({ value, textDelta }) {
|
|
6882
6898
|
return {
|
|
6883
6899
|
success: true,
|
|
@@ -6898,20 +6914,22 @@ var objectOutputStrategy = (schema) => ({
|
|
|
6898
6914
|
}
|
|
6899
6915
|
});
|
|
6900
6916
|
var arrayOutputStrategy = (schema) => {
|
|
6901
|
-
const { $schema, ...itemSchema } = schema.jsonSchema;
|
|
6902
6917
|
return {
|
|
6903
6918
|
type: "enum",
|
|
6904
6919
|
// wrap in object that contains array of elements, since most LLMs will not
|
|
6905
6920
|
// be able to generate an array directly:
|
|
6906
6921
|
// possible future optimization: use arrays directly when model supports grammar-guided generation
|
|
6907
|
-
jsonSchema: {
|
|
6908
|
-
$schema
|
|
6909
|
-
|
|
6910
|
-
|
|
6911
|
-
|
|
6912
|
-
|
|
6913
|
-
|
|
6914
|
-
|
|
6922
|
+
jsonSchema: async () => {
|
|
6923
|
+
const { $schema, ...itemSchema } = await schema.jsonSchema;
|
|
6924
|
+
return {
|
|
6925
|
+
$schema: "http://json-schema.org/draft-07/schema#",
|
|
6926
|
+
type: "object",
|
|
6927
|
+
properties: {
|
|
6928
|
+
elements: { type: "array", items: itemSchema }
|
|
6929
|
+
},
|
|
6930
|
+
required: ["elements"],
|
|
6931
|
+
additionalProperties: false
|
|
6932
|
+
};
|
|
6915
6933
|
},
|
|
6916
6934
|
async validatePartialResult({
|
|
6917
6935
|
value,
|
|
@@ -7019,7 +7037,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
7019
7037
|
// wrap in object that contains result, since most LLMs will not
|
|
7020
7038
|
// be able to generate an enum value directly:
|
|
7021
7039
|
// possible future optimization: use enums directly when model supports top-level enums
|
|
7022
|
-
jsonSchema: {
|
|
7040
|
+
jsonSchema: async () => ({
|
|
7023
7041
|
$schema: "http://json-schema.org/draft-07/schema#",
|
|
7024
7042
|
type: "object",
|
|
7025
7043
|
properties: {
|
|
@@ -7027,7 +7045,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
7027
7045
|
},
|
|
7028
7046
|
required: ["result"],
|
|
7029
7047
|
additionalProperties: false
|
|
7030
|
-
},
|
|
7048
|
+
}),
|
|
7031
7049
|
async validateFinalResult(value) {
|
|
7032
7050
|
if (!isJSONObject(value) || typeof value.result !== "string") {
|
|
7033
7051
|
return {
|
|
@@ -7338,6 +7356,7 @@ async function generateObject(options) {
|
|
|
7338
7356
|
settings: { ...callSettings, maxRetries }
|
|
7339
7357
|
});
|
|
7340
7358
|
const tracer = getTracer(telemetry);
|
|
7359
|
+
const jsonSchema3 = await outputStrategy.jsonSchema();
|
|
7341
7360
|
try {
|
|
7342
7361
|
return await recordSpan({
|
|
7343
7362
|
name: "ai.generateObject",
|
|
@@ -7353,7 +7372,7 @@ async function generateObject(options) {
|
|
|
7353
7372
|
"ai.prompt": {
|
|
7354
7373
|
input: () => JSON.stringify({ system, prompt, messages })
|
|
7355
7374
|
},
|
|
7356
|
-
"ai.schema":
|
|
7375
|
+
"ai.schema": jsonSchema3 != null ? { input: () => JSON.stringify(jsonSchema3) } : void 0,
|
|
7357
7376
|
"ai.schema.name": schemaName,
|
|
7358
7377
|
"ai.schema.description": schemaDescription,
|
|
7359
7378
|
"ai.settings.output": outputStrategy.type
|
|
@@ -7411,7 +7430,7 @@ async function generateObject(options) {
|
|
|
7411
7430
|
const result2 = await model.doGenerate({
|
|
7412
7431
|
responseFormat: {
|
|
7413
7432
|
type: "json",
|
|
7414
|
-
schema:
|
|
7433
|
+
schema: jsonSchema3,
|
|
7415
7434
|
name: schemaName,
|
|
7416
7435
|
description: schemaDescription
|
|
7417
7436
|
},
|
|
@@ -7439,7 +7458,7 @@ async function generateObject(options) {
|
|
|
7439
7458
|
});
|
|
7440
7459
|
}
|
|
7441
7460
|
span2.setAttributes(
|
|
7442
|
-
selectTelemetryAttributes({
|
|
7461
|
+
await selectTelemetryAttributes({
|
|
7443
7462
|
telemetry,
|
|
7444
7463
|
attributes: {
|
|
7445
7464
|
"ai.response.finishReason": result2.finishReason,
|
|
@@ -7491,7 +7510,7 @@ async function generateObject(options) {
|
|
|
7491
7510
|
}
|
|
7492
7511
|
);
|
|
7493
7512
|
span.setAttributes(
|
|
7494
|
-
selectTelemetryAttributes({
|
|
7513
|
+
await selectTelemetryAttributes({
|
|
7495
7514
|
telemetry,
|
|
7496
7515
|
attributes: {
|
|
7497
7516
|
"ai.response.finishReason": finishReason,
|
|
@@ -7643,11 +7662,11 @@ var SerialJobExecutor = class {
|
|
|
7643
7662
|
this.isProcessing = false;
|
|
7644
7663
|
}
|
|
7645
7664
|
async run(job) {
|
|
7646
|
-
return new Promise((
|
|
7665
|
+
return new Promise((resolve3, reject) => {
|
|
7647
7666
|
this.queue.push(async () => {
|
|
7648
7667
|
try {
|
|
7649
7668
|
await job();
|
|
7650
|
-
|
|
7669
|
+
resolve3();
|
|
7651
7670
|
} catch (error) {
|
|
7652
7671
|
reject(error);
|
|
7653
7672
|
}
|
|
@@ -7816,7 +7835,9 @@ var DefaultStreamObjectResult = class {
|
|
|
7816
7835
|
"ai.prompt": {
|
|
7817
7836
|
input: () => JSON.stringify({ system, prompt, messages })
|
|
7818
7837
|
},
|
|
7819
|
-
"ai.schema":
|
|
7838
|
+
"ai.schema": {
|
|
7839
|
+
input: async () => JSON.stringify(await outputStrategy.jsonSchema())
|
|
7840
|
+
},
|
|
7820
7841
|
"ai.schema.name": schemaName,
|
|
7821
7842
|
"ai.schema.description": schemaDescription,
|
|
7822
7843
|
"ai.settings.output": outputStrategy.type
|
|
@@ -7833,7 +7854,7 @@ var DefaultStreamObjectResult = class {
|
|
|
7833
7854
|
const callOptions = {
|
|
7834
7855
|
responseFormat: {
|
|
7835
7856
|
type: "json",
|
|
7836
|
-
schema: outputStrategy.jsonSchema,
|
|
7857
|
+
schema: await outputStrategy.jsonSchema(),
|
|
7837
7858
|
name: schemaName,
|
|
7838
7859
|
description: schemaDescription
|
|
7839
7860
|
},
|
|
@@ -8036,7 +8057,7 @@ var DefaultStreamObjectResult = class {
|
|
|
8036
8057
|
totalTokens: NaN
|
|
8037
8058
|
};
|
|
8038
8059
|
doStreamSpan.setAttributes(
|
|
8039
|
-
selectTelemetryAttributes({
|
|
8060
|
+
await selectTelemetryAttributes({
|
|
8040
8061
|
telemetry,
|
|
8041
8062
|
attributes: {
|
|
8042
8063
|
"ai.response.finishReason": finishReason,
|
|
@@ -8063,7 +8084,7 @@ var DefaultStreamObjectResult = class {
|
|
|
8063
8084
|
);
|
|
8064
8085
|
doStreamSpan.end();
|
|
8065
8086
|
rootSpan.setAttributes(
|
|
8066
|
-
selectTelemetryAttributes({
|
|
8087
|
+
await selectTelemetryAttributes({
|
|
8067
8088
|
telemetry,
|
|
8068
8089
|
attributes: {
|
|
8069
8090
|
"ai.usage.inputTokens": finalUsage.inputTokens,
|
|
@@ -8307,12 +8328,13 @@ __export(output_exports, {
|
|
|
8307
8328
|
});
|
|
8308
8329
|
import {
|
|
8309
8330
|
asSchema as asSchema4,
|
|
8331
|
+
resolve,
|
|
8310
8332
|
safeParseJSON as safeParseJSON4,
|
|
8311
8333
|
safeValidateTypes as safeValidateTypes4
|
|
8312
8334
|
} from "@ai-sdk/provider-utils";
|
|
8313
8335
|
var text = () => ({
|
|
8314
8336
|
type: "text",
|
|
8315
|
-
responseFormat: { type: "text" },
|
|
8337
|
+
responseFormat: Promise.resolve({ type: "text" }),
|
|
8316
8338
|
async parsePartial({ text: text2 }) {
|
|
8317
8339
|
return { partial: text2 };
|
|
8318
8340
|
},
|
|
@@ -8326,10 +8348,10 @@ var object3 = ({
|
|
|
8326
8348
|
const schema = asSchema4(inputSchema);
|
|
8327
8349
|
return {
|
|
8328
8350
|
type: "object",
|
|
8329
|
-
responseFormat: {
|
|
8351
|
+
responseFormat: resolve(schema.jsonSchema).then((jsonSchema3) => ({
|
|
8330
8352
|
type: "json",
|
|
8331
|
-
schema:
|
|
8332
|
-
},
|
|
8353
|
+
schema: jsonSchema3
|
|
8354
|
+
})),
|
|
8333
8355
|
async parsePartial({ text: text2 }) {
|
|
8334
8356
|
const result = await parsePartialJson(text2);
|
|
8335
8357
|
switch (result.state) {
|
|
@@ -9154,9 +9176,9 @@ var SseMCPTransport = class {
|
|
|
9154
9176
|
this.headers = headers;
|
|
9155
9177
|
}
|
|
9156
9178
|
async start() {
|
|
9157
|
-
return new Promise((
|
|
9179
|
+
return new Promise((resolve3, reject) => {
|
|
9158
9180
|
if (this.connected) {
|
|
9159
|
-
return
|
|
9181
|
+
return resolve3();
|
|
9160
9182
|
}
|
|
9161
9183
|
this.abortController = new AbortController();
|
|
9162
9184
|
const establishConnection = async () => {
|
|
@@ -9206,7 +9228,7 @@ var SseMCPTransport = class {
|
|
|
9206
9228
|
});
|
|
9207
9229
|
}
|
|
9208
9230
|
this.connected = true;
|
|
9209
|
-
|
|
9231
|
+
resolve3();
|
|
9210
9232
|
} else if (event === "message") {
|
|
9211
9233
|
try {
|
|
9212
9234
|
const message = JSONRPCMessageSchema.parse(
|
|
@@ -9409,7 +9431,7 @@ var DefaultMCPClient = class {
|
|
|
9409
9431
|
resultSchema,
|
|
9410
9432
|
options
|
|
9411
9433
|
}) {
|
|
9412
|
-
return new Promise((
|
|
9434
|
+
return new Promise((resolve3, reject) => {
|
|
9413
9435
|
if (this.isClosed) {
|
|
9414
9436
|
return reject(
|
|
9415
9437
|
new MCPClientError({
|
|
@@ -9443,7 +9465,7 @@ var DefaultMCPClient = class {
|
|
|
9443
9465
|
}
|
|
9444
9466
|
try {
|
|
9445
9467
|
const result = resultSchema.parse(response.result);
|
|
9446
|
-
|
|
9468
|
+
resolve3(result);
|
|
9447
9469
|
} catch (error) {
|
|
9448
9470
|
const parseError = new MCPClientError({
|
|
9449
9471
|
message: "Failed to parse server response",
|
|
@@ -9812,11 +9834,11 @@ async function convertFileListToFileUIParts(files) {
|
|
|
9812
9834
|
return Promise.all(
|
|
9813
9835
|
Array.from(files).map(async (file) => {
|
|
9814
9836
|
const { name: name17, type } = file;
|
|
9815
|
-
const dataUrl = await new Promise((
|
|
9837
|
+
const dataUrl = await new Promise((resolve3, reject) => {
|
|
9816
9838
|
const reader = new FileReader();
|
|
9817
9839
|
reader.onload = (readerEvent) => {
|
|
9818
9840
|
var _a17;
|
|
9819
|
-
|
|
9841
|
+
resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
|
|
9820
9842
|
};
|
|
9821
9843
|
reader.onerror = (error) => reject(error);
|
|
9822
9844
|
reader.readAsDataURL(file);
|
|
@@ -9836,7 +9858,7 @@ import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-
|
|
|
9836
9858
|
|
|
9837
9859
|
// src/ui/http-chat-transport.ts
|
|
9838
9860
|
import {
|
|
9839
|
-
resolve,
|
|
9861
|
+
resolve as resolve2,
|
|
9840
9862
|
withUserAgentSuffix as withUserAgentSuffix11,
|
|
9841
9863
|
getRuntimeEnvironmentUserAgent as getRuntimeEnvironmentUserAgent4
|
|
9842
9864
|
} from "@ai-sdk/provider-utils";
|
|
@@ -9863,9 +9885,9 @@ var HttpChatTransport = class {
|
|
|
9863
9885
|
...options
|
|
9864
9886
|
}) {
|
|
9865
9887
|
var _a17, _b, _c, _d, _e;
|
|
9866
|
-
const resolvedBody = await
|
|
9867
|
-
const resolvedHeaders = await
|
|
9868
|
-
const resolvedCredentials = await
|
|
9888
|
+
const resolvedBody = await resolve2(this.body);
|
|
9889
|
+
const resolvedHeaders = await resolve2(this.headers);
|
|
9890
|
+
const resolvedCredentials = await resolve2(this.credentials);
|
|
9869
9891
|
const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
|
|
9870
9892
|
api: this.api,
|
|
9871
9893
|
id: options.chatId,
|
|
@@ -9915,9 +9937,9 @@ var HttpChatTransport = class {
|
|
|
9915
9937
|
}
|
|
9916
9938
|
async reconnectToStream(options) {
|
|
9917
9939
|
var _a17, _b, _c, _d, _e;
|
|
9918
|
-
const resolvedBody = await
|
|
9919
|
-
const resolvedHeaders = await
|
|
9920
|
-
const resolvedCredentials = await
|
|
9940
|
+
const resolvedBody = await resolve2(this.body);
|
|
9941
|
+
const resolvedHeaders = await resolve2(this.headers);
|
|
9942
|
+
const resolvedCredentials = await resolve2(this.credentials);
|
|
9921
9943
|
const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
|
|
9922
9944
|
api: this.api,
|
|
9923
9945
|
id: options.chatId,
|
|
@@ -10790,11 +10812,11 @@ function createUIMessageStream({
|
|
|
10790
10812
|
errorText: onError(error)
|
|
10791
10813
|
});
|
|
10792
10814
|
}
|
|
10793
|
-
const waitForStreams = new Promise(async (
|
|
10815
|
+
const waitForStreams = new Promise(async (resolve3) => {
|
|
10794
10816
|
while (ongoingStreamPromises.length > 0) {
|
|
10795
10817
|
await ongoingStreamPromises.shift();
|
|
10796
10818
|
}
|
|
10797
|
-
|
|
10819
|
+
resolve3();
|
|
10798
10820
|
});
|
|
10799
10821
|
waitForStreams.finally(() => {
|
|
10800
10822
|
try {
|