ai 4.1.29 → 4.1.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +32 -5
- package/dist/index.d.ts +32 -5
- package/dist/index.js +23 -18
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +23 -18
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,17 @@
|
|
1
1
|
# ai
|
2
2
|
|
3
|
+
## 4.1.31
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- b30b1cc: feat (ai/core): add onError callback to streamObject
|
8
|
+
|
9
|
+
## 4.1.30
|
10
|
+
|
11
|
+
### Patch Changes
|
12
|
+
|
13
|
+
- 4ee5b6f: fix (core): remove invalid providerOptions from streamObject onFinish callback
|
14
|
+
|
3
15
|
## 4.1.29
|
4
16
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
@@ -1226,6 +1226,19 @@ type ObjectStreamPart<PARTIAL> = {
|
|
1226
1226
|
providerMetadata?: ProviderMetadata;
|
1227
1227
|
};
|
1228
1228
|
|
1229
|
+
/**
|
1230
|
+
Callback that is set using the `onError` option.
|
1231
|
+
|
1232
|
+
@param event - The event that is passed to the callback.
|
1233
|
+
*/
|
1234
|
+
type StreamObjectOnErrorCallback = (event: {
|
1235
|
+
error: unknown;
|
1236
|
+
}) => Promise<void> | void;
|
1237
|
+
/**
|
1238
|
+
Callback that is set using the `onFinish` option.
|
1239
|
+
|
1240
|
+
@param event - The event that is passed to the callback.
|
1241
|
+
*/
|
1229
1242
|
type StreamObjectOnFinishCallback<RESULT> = (event: {
|
1230
1243
|
/**
|
1231
1244
|
The token usage of the generated response.
|
@@ -1248,13 +1261,9 @@ type StreamObjectOnFinishCallback<RESULT> = (event: {
|
|
1248
1261
|
*/
|
1249
1262
|
warnings?: CallWarning[];
|
1250
1263
|
/**
|
1251
|
-
Additional provider-specific
|
1264
|
+
Additional provider-specific metadata. They are passed through
|
1252
1265
|
to the provider from the AI SDK and enable provider-specific
|
1253
1266
|
functionality that can be fully encapsulated in the provider.
|
1254
|
-
*/
|
1255
|
-
providerOptions?: ProviderOptions;
|
1256
|
-
/**
|
1257
|
-
@deprecated Use `providerOptions` instead.
|
1258
1267
|
*/
|
1259
1268
|
experimental_providerMetadata?: ProviderMetadata;
|
1260
1269
|
}) => Promise<void> | void;
|
@@ -1317,6 +1326,12 @@ functionality that can be fully encapsulated in the provider.
|
|
1317
1326
|
*/
|
1318
1327
|
experimental_providerMetadata?: ProviderMetadata;
|
1319
1328
|
/**
|
1329
|
+
Callback that is invoked when an error occurs during streaming.
|
1330
|
+
You can use it to log errors.
|
1331
|
+
The stream processing will pause until the callback promise is resolved.
|
1332
|
+
*/
|
1333
|
+
onError?: StreamObjectOnErrorCallback;
|
1334
|
+
/**
|
1320
1335
|
Callback that is called when the LLM response and the final object validation are finished.
|
1321
1336
|
*/
|
1322
1337
|
onFinish?: StreamObjectOnFinishCallback<OBJECT>;
|
@@ -1388,6 +1403,12 @@ functionality that can be fully encapsulated in the provider.
|
|
1388
1403
|
*/
|
1389
1404
|
experimental_providerMetadata?: ProviderMetadata;
|
1390
1405
|
/**
|
1406
|
+
Callback that is invoked when an error occurs during streaming.
|
1407
|
+
You can use it to log errors.
|
1408
|
+
The stream processing will pause until the callback promise is resolved.
|
1409
|
+
*/
|
1410
|
+
onError?: StreamObjectOnErrorCallback;
|
1411
|
+
/**
|
1391
1412
|
Callback that is called when the LLM response and the final object validation are finished.
|
1392
1413
|
*/
|
1393
1414
|
onFinish?: StreamObjectOnFinishCallback<Array<ELEMENT>>;
|
@@ -1433,6 +1454,12 @@ functionality that can be fully encapsulated in the provider.
|
|
1433
1454
|
*/
|
1434
1455
|
experimental_providerMetadata?: ProviderMetadata;
|
1435
1456
|
/**
|
1457
|
+
Callback that is invoked when an error occurs during streaming.
|
1458
|
+
You can use it to log errors.
|
1459
|
+
The stream processing will pause until the callback promise is resolved.
|
1460
|
+
*/
|
1461
|
+
onError?: StreamObjectOnErrorCallback;
|
1462
|
+
/**
|
1436
1463
|
Callback that is called when the LLM response and the final object validation are finished.
|
1437
1464
|
*/
|
1438
1465
|
onFinish?: StreamObjectOnFinishCallback<JSONValue>;
|
package/dist/index.d.ts
CHANGED
@@ -1226,6 +1226,19 @@ type ObjectStreamPart<PARTIAL> = {
|
|
1226
1226
|
providerMetadata?: ProviderMetadata;
|
1227
1227
|
};
|
1228
1228
|
|
1229
|
+
/**
|
1230
|
+
Callback that is set using the `onError` option.
|
1231
|
+
|
1232
|
+
@param event - The event that is passed to the callback.
|
1233
|
+
*/
|
1234
|
+
type StreamObjectOnErrorCallback = (event: {
|
1235
|
+
error: unknown;
|
1236
|
+
}) => Promise<void> | void;
|
1237
|
+
/**
|
1238
|
+
Callback that is set using the `onFinish` option.
|
1239
|
+
|
1240
|
+
@param event - The event that is passed to the callback.
|
1241
|
+
*/
|
1229
1242
|
type StreamObjectOnFinishCallback<RESULT> = (event: {
|
1230
1243
|
/**
|
1231
1244
|
The token usage of the generated response.
|
@@ -1248,13 +1261,9 @@ type StreamObjectOnFinishCallback<RESULT> = (event: {
|
|
1248
1261
|
*/
|
1249
1262
|
warnings?: CallWarning[];
|
1250
1263
|
/**
|
1251
|
-
Additional provider-specific
|
1264
|
+
Additional provider-specific metadata. They are passed through
|
1252
1265
|
to the provider from the AI SDK and enable provider-specific
|
1253
1266
|
functionality that can be fully encapsulated in the provider.
|
1254
|
-
*/
|
1255
|
-
providerOptions?: ProviderOptions;
|
1256
|
-
/**
|
1257
|
-
@deprecated Use `providerOptions` instead.
|
1258
1267
|
*/
|
1259
1268
|
experimental_providerMetadata?: ProviderMetadata;
|
1260
1269
|
}) => Promise<void> | void;
|
@@ -1317,6 +1326,12 @@ functionality that can be fully encapsulated in the provider.
|
|
1317
1326
|
*/
|
1318
1327
|
experimental_providerMetadata?: ProviderMetadata;
|
1319
1328
|
/**
|
1329
|
+
Callback that is invoked when an error occurs during streaming.
|
1330
|
+
You can use it to log errors.
|
1331
|
+
The stream processing will pause until the callback promise is resolved.
|
1332
|
+
*/
|
1333
|
+
onError?: StreamObjectOnErrorCallback;
|
1334
|
+
/**
|
1320
1335
|
Callback that is called when the LLM response and the final object validation are finished.
|
1321
1336
|
*/
|
1322
1337
|
onFinish?: StreamObjectOnFinishCallback<OBJECT>;
|
@@ -1388,6 +1403,12 @@ functionality that can be fully encapsulated in the provider.
|
|
1388
1403
|
*/
|
1389
1404
|
experimental_providerMetadata?: ProviderMetadata;
|
1390
1405
|
/**
|
1406
|
+
Callback that is invoked when an error occurs during streaming.
|
1407
|
+
You can use it to log errors.
|
1408
|
+
The stream processing will pause until the callback promise is resolved.
|
1409
|
+
*/
|
1410
|
+
onError?: StreamObjectOnErrorCallback;
|
1411
|
+
/**
|
1391
1412
|
Callback that is called when the LLM response and the final object validation are finished.
|
1392
1413
|
*/
|
1393
1414
|
onFinish?: StreamObjectOnFinishCallback<Array<ELEMENT>>;
|
@@ -1433,6 +1454,12 @@ functionality that can be fully encapsulated in the provider.
|
|
1433
1454
|
*/
|
1434
1455
|
experimental_providerMetadata?: ProviderMetadata;
|
1435
1456
|
/**
|
1457
|
+
Callback that is invoked when an error occurs during streaming.
|
1458
|
+
You can use it to log errors.
|
1459
|
+
The stream processing will pause until the callback promise is resolved.
|
1460
|
+
*/
|
1461
|
+
onError?: StreamObjectOnErrorCallback;
|
1462
|
+
/**
|
1436
1463
|
Callback that is called when the LLM response and the final object validation are finished.
|
1437
1464
|
*/
|
1438
1465
|
onFinish?: StreamObjectOnFinishCallback<JSONValue>;
|
package/dist/index.js
CHANGED
@@ -2211,9 +2211,7 @@ var arrayOutputStrategy = (schema) => {
|
|
2211
2211
|
}
|
2212
2212
|
case "text-delta":
|
2213
2213
|
case "finish":
|
2214
|
-
break;
|
2215
2214
|
case "error":
|
2216
|
-
controller.error(chunk.error);
|
2217
2215
|
break;
|
2218
2216
|
default: {
|
2219
2217
|
const _exhaustiveCheck = chunk;
|
@@ -2980,6 +2978,7 @@ function streamObject({
|
|
2980
2978
|
experimental_telemetry: telemetry,
|
2981
2979
|
experimental_providerMetadata,
|
2982
2980
|
providerOptions = experimental_providerMetadata,
|
2981
|
+
onError,
|
2983
2982
|
onFinish,
|
2984
2983
|
_internal: {
|
2985
2984
|
generateId: generateId3 = originalGenerateId2,
|
@@ -3014,6 +3013,7 @@ function streamObject({
|
|
3014
3013
|
schemaDescription,
|
3015
3014
|
providerOptions,
|
3016
3015
|
mode,
|
3016
|
+
onError,
|
3017
3017
|
onFinish,
|
3018
3018
|
generateId: generateId3,
|
3019
3019
|
currentDate,
|
@@ -3036,6 +3036,7 @@ var DefaultStreamObjectResult = class {
|
|
3036
3036
|
schemaDescription,
|
3037
3037
|
providerOptions,
|
3038
3038
|
mode,
|
3039
|
+
onError,
|
3039
3040
|
onFinish,
|
3040
3041
|
generateId: generateId3,
|
3041
3042
|
currentDate,
|
@@ -3047,7 +3048,6 @@ var DefaultStreamObjectResult = class {
|
|
3047
3048
|
this.warningsPromise = new DelayedPromise();
|
3048
3049
|
this.requestPromise = new DelayedPromise();
|
3049
3050
|
this.responsePromise = new DelayedPromise();
|
3050
|
-
this.stitchableStream = createStitchableStream();
|
3051
3051
|
const { maxRetries, retry } = prepareRetries({
|
3052
3052
|
maxRetries: maxRetriesArg
|
3053
3053
|
});
|
@@ -3059,6 +3059,16 @@ var DefaultStreamObjectResult = class {
|
|
3059
3059
|
});
|
3060
3060
|
const tracer = getTracer(telemetry);
|
3061
3061
|
const self = this;
|
3062
|
+
const stitchableStream = createStitchableStream();
|
3063
|
+
const eventProcessor = new TransformStream({
|
3064
|
+
transform(chunk, controller) {
|
3065
|
+
controller.enqueue(chunk);
|
3066
|
+
if (chunk.type === "error") {
|
3067
|
+
onError == null ? void 0 : onError({ error: chunk.error });
|
3068
|
+
}
|
3069
|
+
}
|
3070
|
+
});
|
3071
|
+
this.baseStream = stitchableStream.stream.pipeThrough(eventProcessor);
|
3062
3072
|
recordSpan({
|
3063
3073
|
name: "ai.streamObject",
|
3064
3074
|
attributes: selectTelemetryAttributes({
|
@@ -3402,25 +3412,26 @@ var DefaultStreamObjectResult = class {
|
|
3402
3412
|
experimental_providerMetadata: providerMetadata
|
3403
3413
|
}));
|
3404
3414
|
} catch (error2) {
|
3405
|
-
controller.error
|
3415
|
+
controller.enqueue({ type: "error", error: error2 });
|
3406
3416
|
} finally {
|
3407
3417
|
rootSpan.end();
|
3408
3418
|
}
|
3409
3419
|
}
|
3410
3420
|
})
|
3411
3421
|
);
|
3412
|
-
|
3422
|
+
stitchableStream.addStream(transformedStream);
|
3413
3423
|
}
|
3414
3424
|
}).catch((error) => {
|
3415
|
-
|
3425
|
+
stitchableStream.addStream(
|
3416
3426
|
new ReadableStream({
|
3417
3427
|
start(controller) {
|
3418
|
-
controller.
|
3428
|
+
controller.enqueue({ type: "error", error });
|
3429
|
+
controller.close();
|
3419
3430
|
}
|
3420
3431
|
})
|
3421
3432
|
);
|
3422
3433
|
}).finally(() => {
|
3423
|
-
|
3434
|
+
stitchableStream.close();
|
3424
3435
|
});
|
3425
3436
|
this.outputStrategy = outputStrategy;
|
3426
3437
|
}
|
@@ -3444,7 +3455,7 @@ var DefaultStreamObjectResult = class {
|
|
3444
3455
|
}
|
3445
3456
|
get partialObjectStream() {
|
3446
3457
|
return createAsyncIterableStream(
|
3447
|
-
this.
|
3458
|
+
this.baseStream.pipeThrough(
|
3448
3459
|
new TransformStream({
|
3449
3460
|
transform(chunk, controller) {
|
3450
3461
|
switch (chunk.type) {
|
@@ -3453,9 +3464,7 @@ var DefaultStreamObjectResult = class {
|
|
3453
3464
|
break;
|
3454
3465
|
case "text-delta":
|
3455
3466
|
case "finish":
|
3456
|
-
break;
|
3457
3467
|
case "error":
|
3458
|
-
controller.error(chunk.error);
|
3459
3468
|
break;
|
3460
3469
|
default: {
|
3461
3470
|
const _exhaustiveCheck = chunk;
|
@@ -3468,13 +3477,11 @@ var DefaultStreamObjectResult = class {
|
|
3468
3477
|
);
|
3469
3478
|
}
|
3470
3479
|
get elementStream() {
|
3471
|
-
return this.outputStrategy.createElementStream(
|
3472
|
-
this.stitchableStream.stream
|
3473
|
-
);
|
3480
|
+
return this.outputStrategy.createElementStream(this.baseStream);
|
3474
3481
|
}
|
3475
3482
|
get textStream() {
|
3476
3483
|
return createAsyncIterableStream(
|
3477
|
-
this.
|
3484
|
+
this.baseStream.pipeThrough(
|
3478
3485
|
new TransformStream({
|
3479
3486
|
transform(chunk, controller) {
|
3480
3487
|
switch (chunk.type) {
|
@@ -3483,9 +3490,7 @@ var DefaultStreamObjectResult = class {
|
|
3483
3490
|
break;
|
3484
3491
|
case "object":
|
3485
3492
|
case "finish":
|
3486
|
-
break;
|
3487
3493
|
case "error":
|
3488
|
-
controller.error(chunk.error);
|
3489
3494
|
break;
|
3490
3495
|
default: {
|
3491
3496
|
const _exhaustiveCheck = chunk;
|
@@ -3498,7 +3503,7 @@ var DefaultStreamObjectResult = class {
|
|
3498
3503
|
);
|
3499
3504
|
}
|
3500
3505
|
get fullStream() {
|
3501
|
-
return createAsyncIterableStream(this.
|
3506
|
+
return createAsyncIterableStream(this.baseStream);
|
3502
3507
|
}
|
3503
3508
|
pipeTextStreamToResponse(response, init) {
|
3504
3509
|
writeToServerResponse({
|