ai 3.3.2 → 3.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +40 -4
- package/dist/index.d.ts +40 -4
- package/dist/index.js +28 -9
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +28 -9
- package/dist/index.mjs.map +1 -1
- package/package.json +8 -8
- package/rsc/dist/rsc-server.mjs +1 -0
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -1277,6 +1277,8 @@ _a6 = symbol6;
|
|
1277
1277
|
async function generateObject({
|
1278
1278
|
model,
|
1279
1279
|
schema: inputSchema,
|
1280
|
+
schemaName,
|
1281
|
+
schemaDescription,
|
1280
1282
|
mode,
|
1281
1283
|
system,
|
1282
1284
|
prompt,
|
@@ -1313,6 +1315,8 @@ async function generateObject({
|
|
1313
1315
|
"ai.schema": {
|
1314
1316
|
input: () => JSON.stringify(schema.jsonSchema)
|
1315
1317
|
},
|
1318
|
+
"ai.schema.name": schemaName,
|
1319
|
+
"ai.schema.description": schemaDescription,
|
1316
1320
|
"ai.settings.mode": mode
|
1317
1321
|
}
|
1318
1322
|
}),
|
@@ -1331,7 +1335,7 @@ async function generateObject({
|
|
1331
1335
|
switch (mode) {
|
1332
1336
|
case "json": {
|
1333
1337
|
const validatedPrompt = getValidatedPrompt({
|
1334
|
-
system: injectJsonSchemaIntoSystem({
|
1338
|
+
system: model.supportsStructuredOutputs ? system : injectJsonSchemaIntoSystem({
|
1335
1339
|
system,
|
1336
1340
|
schema: schema.jsonSchema
|
1337
1341
|
}),
|
@@ -1372,7 +1376,12 @@ async function generateObject({
|
|
1372
1376
|
tracer,
|
1373
1377
|
fn: async (span2) => {
|
1374
1378
|
const result2 = await model.doGenerate({
|
1375
|
-
mode: {
|
1379
|
+
mode: {
|
1380
|
+
type: "object-json",
|
1381
|
+
schema: schema.jsonSchema,
|
1382
|
+
name: schemaName,
|
1383
|
+
description: schemaDescription
|
1384
|
+
},
|
1376
1385
|
...prepareCallSettings(settings),
|
1377
1386
|
inputFormat,
|
1378
1387
|
prompt: promptMessages,
|
@@ -1454,8 +1463,8 @@ async function generateObject({
|
|
1454
1463
|
type: "object-tool",
|
1455
1464
|
tool: {
|
1456
1465
|
type: "function",
|
1457
|
-
name: "json",
|
1458
|
-
description: "Respond with a JSON object.",
|
1466
|
+
name: schemaName != null ? schemaName : "json",
|
1467
|
+
description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
|
1459
1468
|
parameters: schema.jsonSchema
|
1460
1469
|
}
|
1461
1470
|
},
|
@@ -1636,6 +1645,8 @@ function createAsyncIterableStream(source, transformer) {
|
|
1636
1645
|
async function streamObject({
|
1637
1646
|
model,
|
1638
1647
|
schema: inputSchema,
|
1648
|
+
schemaName,
|
1649
|
+
schemaDescription,
|
1639
1650
|
mode,
|
1640
1651
|
system,
|
1641
1652
|
prompt,
|
@@ -1672,6 +1683,8 @@ async function streamObject({
|
|
1672
1683
|
input: () => JSON.stringify({ system, prompt, messages })
|
1673
1684
|
},
|
1674
1685
|
"ai.schema": { input: () => JSON.stringify(schema.jsonSchema) },
|
1686
|
+
"ai.schema.name": schemaName,
|
1687
|
+
"ai.schema.description": schemaDescription,
|
1675
1688
|
"ai.settings.mode": mode
|
1676
1689
|
}
|
1677
1690
|
}),
|
@@ -1686,7 +1699,7 @@ async function streamObject({
|
|
1686
1699
|
switch (mode) {
|
1687
1700
|
case "json": {
|
1688
1701
|
const validatedPrompt = getValidatedPrompt({
|
1689
|
-
system: injectJsonSchemaIntoSystem({
|
1702
|
+
system: model.supportsStructuredOutputs ? system : injectJsonSchemaIntoSystem({
|
1690
1703
|
system,
|
1691
1704
|
schema: schema.jsonSchema
|
1692
1705
|
}),
|
@@ -1694,7 +1707,12 @@ async function streamObject({
|
|
1694
1707
|
messages
|
1695
1708
|
});
|
1696
1709
|
callOptions = {
|
1697
|
-
mode: {
|
1710
|
+
mode: {
|
1711
|
+
type: "object-json",
|
1712
|
+
schema: schema.jsonSchema,
|
1713
|
+
name: schemaName,
|
1714
|
+
description: schemaDescription
|
1715
|
+
},
|
1698
1716
|
...prepareCallSettings(settings),
|
1699
1717
|
inputFormat: validatedPrompt.type,
|
1700
1718
|
prompt: await convertToLanguageModelPrompt({
|
@@ -1730,8 +1748,8 @@ async function streamObject({
|
|
1730
1748
|
type: "object-tool",
|
1731
1749
|
tool: {
|
1732
1750
|
type: "function",
|
1733
|
-
name: "json",
|
1734
|
-
description: "Respond with a JSON object.",
|
1751
|
+
name: schemaName != null ? schemaName : "json",
|
1752
|
+
description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
|
1735
1753
|
parameters: schema.jsonSchema
|
1736
1754
|
}
|
1737
1755
|
},
|
@@ -3640,6 +3658,7 @@ function readableFromAsyncIterable(iterable) {
|
|
3640
3658
|
|
3641
3659
|
// streams/stream-data.ts
|
3642
3660
|
import { formatStreamPart as formatStreamPart2 } from "@ai-sdk/ui-utils";
|
3661
|
+
var STREAM_DATA_WARNING_TIME_MS = 15 * 1e3;
|
3643
3662
|
var StreamData2 = class {
|
3644
3663
|
constructor() {
|
3645
3664
|
this.encoder = new TextEncoder();
|
@@ -3655,7 +3674,7 @@ var StreamData2 = class {
|
|
3655
3674
|
console.warn(
|
3656
3675
|
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
3657
3676
|
);
|
3658
|
-
},
|
3677
|
+
}, STREAM_DATA_WARNING_TIME_MS);
|
3659
3678
|
}
|
3660
3679
|
},
|
3661
3680
|
pull: (controller) => {
|