ai 4.0.0-canary.3 → 4.0.0-canary.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +39 -0
- package/dist/index.d.mts +45 -374
- package/dist/index.d.ts +45 -374
- package/dist/index.js +87 -338
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +61 -303
- package/dist/index.mjs.map +1 -1
- package/package.json +6 -9
- package/react/dist/index.d.ts +0 -17
- package/rsc/dist/rsc-server.mjs +0 -6
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/react/dist/index.server.d.mts +0 -17
- package/react/dist/index.server.d.ts +0 -17
- package/react/dist/index.server.js +0 -50
- package/react/dist/index.server.js.map +0 -1
- package/react/dist/index.server.mjs +0 -23
- package/react/dist/index.server.mjs.map +0 -1
package/dist/index.js
CHANGED
@@ -21,7 +21,6 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
21
21
|
var streams_exports = {};
|
22
22
|
__export(streams_exports, {
|
23
23
|
AISDKError: () => import_provider13.AISDKError,
|
24
|
-
AIStream: () => AIStream,
|
25
24
|
APICallError: () => import_provider13.APICallError,
|
26
25
|
AssistantResponse: () => AssistantResponse,
|
27
26
|
DownloadError: () => DownloadError,
|
@@ -43,14 +42,11 @@ __export(streams_exports, {
|
|
43
42
|
NoSuchProviderError: () => NoSuchProviderError,
|
44
43
|
NoSuchToolError: () => NoSuchToolError,
|
45
44
|
RetryError: () => RetryError,
|
46
|
-
StreamData: () =>
|
47
|
-
StreamingTextResponse: () => StreamingTextResponse,
|
45
|
+
StreamData: () => StreamData,
|
48
46
|
TypeValidationError: () => import_provider13.TypeValidationError,
|
49
47
|
UnsupportedFunctionalityError: () => import_provider13.UnsupportedFunctionalityError,
|
50
48
|
convertToCoreMessages: () => convertToCoreMessages,
|
51
49
|
cosineSimilarity: () => cosineSimilarity,
|
52
|
-
createCallbacksTransformer: () => createCallbacksTransformer,
|
53
|
-
createEventStreamTransformer: () => createEventStreamTransformer,
|
54
50
|
createStreamDataTransformer: () => createStreamDataTransformer,
|
55
51
|
embed: () => embed,
|
56
52
|
embedMany: () => embedMany,
|
@@ -59,32 +55,25 @@ __export(streams_exports, {
|
|
59
55
|
experimental_createModelRegistry: () => experimental_createModelRegistry,
|
60
56
|
experimental_createProviderRegistry: () => experimental_createProviderRegistry,
|
61
57
|
experimental_customProvider: () => experimental_customProvider,
|
62
|
-
experimental_generateObject: () => experimental_generateObject,
|
63
|
-
experimental_generateText: () => experimental_generateText,
|
64
|
-
experimental_streamObject: () => experimental_streamObject,
|
65
|
-
experimental_streamText: () => experimental_streamText,
|
66
58
|
experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
|
67
|
-
formatStreamPart: () =>
|
68
|
-
generateId: () =>
|
59
|
+
formatStreamPart: () => import_ui_utils10.formatStreamPart,
|
60
|
+
generateId: () => import_provider_utils11.generateId,
|
69
61
|
generateObject: () => generateObject,
|
70
62
|
generateText: () => generateText,
|
71
|
-
jsonSchema: () =>
|
72
|
-
parseStreamPart: () =>
|
73
|
-
processDataProtocolResponse: () =>
|
74
|
-
readDataStream: () =>
|
75
|
-
readableFromAsyncIterable: () => readableFromAsyncIterable,
|
63
|
+
jsonSchema: () => import_ui_utils7.jsonSchema,
|
64
|
+
parseStreamPart: () => import_ui_utils10.parseStreamPart,
|
65
|
+
processDataProtocolResponse: () => import_ui_utils10.processDataProtocolResponse,
|
66
|
+
readDataStream: () => import_ui_utils10.readDataStream,
|
76
67
|
streamObject: () => streamObject,
|
77
68
|
streamText: () => streamText,
|
78
|
-
|
79
|
-
tool: () => tool,
|
80
|
-
trimStartOfStreamHelper: () => trimStartOfStreamHelper
|
69
|
+
tool: () => tool
|
81
70
|
});
|
82
71
|
module.exports = __toCommonJS(streams_exports);
|
83
|
-
var
|
84
|
-
var
|
72
|
+
var import_ui_utils10 = require("@ai-sdk/ui-utils");
|
73
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
85
74
|
|
86
75
|
// core/index.ts
|
87
|
-
var
|
76
|
+
var import_ui_utils7 = require("@ai-sdk/ui-utils");
|
88
77
|
|
89
78
|
// util/retry-with-exponential-backoff.ts
|
90
79
|
var import_provider2 = require("@ai-sdk/provider");
|
@@ -2147,9 +2136,6 @@ async function generateObject({
|
|
2147
2136
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2148
2137
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2149
2138
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2150
|
-
// deprecated:
|
2151
|
-
"ai.finishReason": result2.finishReason,
|
2152
|
-
"ai.result.object": { output: () => result2.text },
|
2153
2139
|
// standardized gen-ai llm span attributes:
|
2154
2140
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2155
2141
|
"gen_ai.response.id": responseData.id,
|
@@ -2254,9 +2240,6 @@ async function generateObject({
|
|
2254
2240
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2255
2241
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2256
2242
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2257
|
-
// deprecated:
|
2258
|
-
"ai.finishReason": result2.finishReason,
|
2259
|
-
"ai.result.object": { output: () => objectText },
|
2260
2243
|
// standardized gen-ai llm span attributes:
|
2261
2244
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2262
2245
|
"gen_ai.response.id": responseData.id,
|
@@ -2310,12 +2293,7 @@ async function generateObject({
|
|
2310
2293
|
output: () => JSON.stringify(validationResult.value)
|
2311
2294
|
},
|
2312
2295
|
"ai.usage.promptTokens": usage.promptTokens,
|
2313
|
-
"ai.usage.completionTokens": usage.completionTokens
|
2314
|
-
// deprecated:
|
2315
|
-
"ai.finishReason": finishReason,
|
2316
|
-
"ai.result.object": {
|
2317
|
-
output: () => JSON.stringify(validationResult.value)
|
2318
|
-
}
|
2296
|
+
"ai.usage.completionTokens": usage.completionTokens
|
2319
2297
|
}
|
2320
2298
|
})
|
2321
2299
|
);
|
@@ -2344,9 +2322,6 @@ var DefaultGenerateObjectResult = class {
|
|
2344
2322
|
this.experimental_providerMetadata = options.providerMetadata;
|
2345
2323
|
this.response = options.response;
|
2346
2324
|
this.request = options.request;
|
2347
|
-
this.rawResponse = {
|
2348
|
-
headers: options.response.headers
|
2349
|
-
};
|
2350
2325
|
this.logprobs = options.logprobs;
|
2351
2326
|
}
|
2352
2327
|
toJsonResponse(init) {
|
@@ -2359,7 +2334,6 @@ var DefaultGenerateObjectResult = class {
|
|
2359
2334
|
});
|
2360
2335
|
}
|
2361
2336
|
};
|
2362
|
-
var experimental_generateObject = generateObject;
|
2363
2337
|
|
2364
2338
|
// core/generate-object/stream-object.ts
|
2365
2339
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
@@ -2722,7 +2696,6 @@ var DefaultStreamObjectResult = class {
|
|
2722
2696
|
generateId: generateId3
|
2723
2697
|
}) {
|
2724
2698
|
this.warnings = warnings;
|
2725
|
-
this.rawResponse = rawResponse;
|
2726
2699
|
this.outputStrategy = outputStrategy;
|
2727
2700
|
this.request = Promise.resolve(request);
|
2728
2701
|
this.objectPromise = new DelayedPromise();
|
@@ -2855,9 +2828,6 @@ var DefaultStreamObjectResult = class {
|
|
2855
2828
|
"ai.response.timestamp": response.timestamp.toISOString(),
|
2856
2829
|
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2857
2830
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2858
|
-
// deprecated
|
2859
|
-
"ai.finishReason": finishReason,
|
2860
|
-
"ai.result.object": { output: () => JSON.stringify(object) },
|
2861
2831
|
// standardized gen-ai llm span attributes:
|
2862
2832
|
"gen_ai.response.finish_reasons": [finishReason],
|
2863
2833
|
"gen_ai.response.id": response.id,
|
@@ -2876,9 +2846,7 @@ var DefaultStreamObjectResult = class {
|
|
2876
2846
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2877
2847
|
"ai.response.object": {
|
2878
2848
|
output: () => JSON.stringify(object)
|
2879
|
-
}
|
2880
|
-
// deprecated
|
2881
|
-
"ai.result.object": { output: () => JSON.stringify(object) }
|
2849
|
+
}
|
2882
2850
|
}
|
2883
2851
|
})
|
2884
2852
|
);
|
@@ -2886,7 +2854,6 @@ var DefaultStreamObjectResult = class {
|
|
2886
2854
|
usage: finalUsage,
|
2887
2855
|
object,
|
2888
2856
|
error,
|
2889
|
-
rawResponse,
|
2890
2857
|
response: {
|
2891
2858
|
...response,
|
2892
2859
|
headers: rawResponse == null ? void 0 : rawResponse.headers
|
@@ -2979,7 +2946,6 @@ var DefaultStreamObjectResult = class {
|
|
2979
2946
|
});
|
2980
2947
|
}
|
2981
2948
|
};
|
2982
|
-
var experimental_streamObject = streamObject;
|
2983
2949
|
|
2984
2950
|
// core/generate-text/generate-text.ts
|
2985
2951
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
@@ -3010,12 +2976,6 @@ var InvalidToolArgumentsError = class extends import_provider11.AISDKError {
|
|
3010
2976
|
static isInstance(error) {
|
3011
2977
|
return import_provider11.AISDKError.hasMarker(error, marker8);
|
3012
2978
|
}
|
3013
|
-
/**
|
3014
|
-
* @deprecated use `isInstance` instead
|
3015
|
-
*/
|
3016
|
-
static isInvalidToolArgumentsError(error) {
|
3017
|
-
return error instanceof Error && error.name === name8 && typeof error.toolName === "string" && typeof error.toolArgs === "string";
|
3018
|
-
}
|
3019
2979
|
};
|
3020
2980
|
_a8 = symbol8;
|
3021
2981
|
|
@@ -3191,9 +3151,7 @@ async function generateText({
|
|
3191
3151
|
maxRetries,
|
3192
3152
|
abortSignal,
|
3193
3153
|
headers,
|
3194
|
-
|
3195
|
-
maxToolRoundtrips = maxAutomaticRoundtrips,
|
3196
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3154
|
+
maxSteps = 1,
|
3197
3155
|
experimental_continuationSteps,
|
3198
3156
|
experimental_continueSteps: continueSteps = experimental_continuationSteps != null ? experimental_continuationSteps : false,
|
3199
3157
|
experimental_telemetry: telemetry,
|
@@ -3347,14 +3305,6 @@ async function generateText({
|
|
3347
3305
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
3348
3306
|
"ai.usage.promptTokens": result.usage.promptTokens,
|
3349
3307
|
"ai.usage.completionTokens": result.usage.completionTokens,
|
3350
|
-
// deprecated:
|
3351
|
-
"ai.finishReason": result.finishReason,
|
3352
|
-
"ai.result.text": {
|
3353
|
-
output: () => result.text
|
3354
|
-
},
|
3355
|
-
"ai.result.toolCalls": {
|
3356
|
-
output: () => JSON.stringify(result.toolCalls)
|
3357
|
-
},
|
3358
3308
|
// standardized gen-ai llm span attributes:
|
3359
3309
|
"gen_ai.response.finish_reasons": [result.finishReason],
|
3360
3310
|
"gen_ai.response.id": responseData.id,
|
@@ -3457,15 +3407,7 @@ async function generateText({
|
|
3457
3407
|
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3458
3408
|
},
|
3459
3409
|
"ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
|
3460
|
-
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3461
|
-
// deprecated:
|
3462
|
-
"ai.finishReason": currentModelResponse.finishReason,
|
3463
|
-
"ai.result.text": {
|
3464
|
-
output: () => currentModelResponse.text
|
3465
|
-
},
|
3466
|
-
"ai.result.toolCalls": {
|
3467
|
-
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3468
|
-
}
|
3410
|
+
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3469
3411
|
}
|
3470
3412
|
})
|
3471
3413
|
);
|
@@ -3561,19 +3503,15 @@ var DefaultGenerateTextResult = class {
|
|
3561
3503
|
this.request = options.request;
|
3562
3504
|
this.response = options.response;
|
3563
3505
|
this.responseMessages = options.responseMessages;
|
3564
|
-
this.roundtrips = options.steps;
|
3565
3506
|
this.steps = options.steps;
|
3566
3507
|
this.experimental_providerMetadata = options.providerMetadata;
|
3567
|
-
this.rawResponse = {
|
3568
|
-
headers: options.response.headers
|
3569
|
-
};
|
3570
3508
|
this.logprobs = options.logprobs;
|
3571
3509
|
}
|
3572
3510
|
};
|
3573
|
-
var experimental_generateText = generateText;
|
3574
3511
|
|
3575
3512
|
// core/generate-text/stream-text.ts
|
3576
3513
|
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
3514
|
+
var import_ui_utils6 = require("@ai-sdk/ui-utils");
|
3577
3515
|
|
3578
3516
|
// core/util/create-stitchable-stream.ts
|
3579
3517
|
function createStitchableStream() {
|
@@ -3930,8 +3868,7 @@ async function streamText({
|
|
3930
3868
|
maxRetries,
|
3931
3869
|
abortSignal,
|
3932
3870
|
headers,
|
3933
|
-
|
3934
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3871
|
+
maxSteps = 1,
|
3935
3872
|
experimental_continueSteps: continueSteps = false,
|
3936
3873
|
experimental_telemetry: telemetry,
|
3937
3874
|
experimental_providerMetadata: providerMetadata,
|
@@ -4224,14 +4161,10 @@ var DefaultStreamTextResult = class {
|
|
4224
4161
|
const msToFirstChunk = now2() - startTimestamp;
|
4225
4162
|
stepFirstChunk = false;
|
4226
4163
|
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
4227
|
-
"ai.response.msToFirstChunk": msToFirstChunk
|
4228
|
-
// deprecated:
|
4229
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4164
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4230
4165
|
});
|
4231
4166
|
doStreamSpan2.setAttributes({
|
4232
|
-
"ai.response.msToFirstChunk": msToFirstChunk
|
4233
|
-
// deprecated:
|
4234
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4167
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4235
4168
|
});
|
4236
4169
|
}
|
4237
4170
|
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
@@ -4356,12 +4289,6 @@ var DefaultStreamTextResult = class {
|
|
4356
4289
|
"ai.response.timestamp": stepResponse.timestamp.toISOString(),
|
4357
4290
|
"ai.usage.promptTokens": stepUsage.promptTokens,
|
4358
4291
|
"ai.usage.completionTokens": stepUsage.completionTokens,
|
4359
|
-
// deprecated
|
4360
|
-
"ai.finishReason": stepFinishReason,
|
4361
|
-
"ai.result.text": { output: () => stepText },
|
4362
|
-
"ai.result.toolCalls": {
|
4363
|
-
output: () => stepToolCallsJson
|
4364
|
-
},
|
4365
4292
|
// standardized gen-ai llm span attributes:
|
4366
4293
|
"gen_ai.response.finish_reasons": [stepFinishReason],
|
4367
4294
|
"gen_ai.response.id": stepResponse.id,
|
@@ -4416,7 +4343,6 @@ var DefaultStreamTextResult = class {
|
|
4416
4343
|
warnings: self.warnings,
|
4417
4344
|
logprobs: stepLogProbs,
|
4418
4345
|
request: stepRequest,
|
4419
|
-
rawResponse: self.rawResponse,
|
4420
4346
|
response: {
|
4421
4347
|
...stepResponse,
|
4422
4348
|
headers: (_a11 = self.rawResponse) == null ? void 0 : _a11.headers,
|
@@ -4477,13 +4403,7 @@ var DefaultStreamTextResult = class {
|
|
4477
4403
|
output: () => stepToolCallsJson
|
4478
4404
|
},
|
4479
4405
|
"ai.usage.promptTokens": combinedUsage.promptTokens,
|
4480
|
-
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4481
|
-
// deprecated
|
4482
|
-
"ai.finishReason": stepFinishReason,
|
4483
|
-
"ai.result.text": { output: () => fullStepText },
|
4484
|
-
"ai.result.toolCalls": {
|
4485
|
-
output: () => stepToolCallsJson
|
4486
|
-
}
|
4406
|
+
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4487
4407
|
}
|
4488
4408
|
})
|
4489
4409
|
);
|
@@ -4513,7 +4433,6 @@ var DefaultStreamTextResult = class {
|
|
4513
4433
|
// The type exposed to the users will be correctly inferred.
|
4514
4434
|
toolResults: stepToolResults,
|
4515
4435
|
request: stepRequest,
|
4516
|
-
rawResponse,
|
4517
4436
|
response: {
|
4518
4437
|
...stepResponse,
|
4519
4438
|
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
@@ -4577,37 +4496,18 @@ var DefaultStreamTextResult = class {
|
|
4577
4496
|
}
|
4578
4497
|
});
|
4579
4498
|
}
|
4580
|
-
toAIStream(callbacks = {}) {
|
4581
|
-
return this.toDataStreamInternal({ callbacks });
|
4582
|
-
}
|
4583
4499
|
toDataStreamInternal({
|
4584
|
-
callbacks = {},
|
4585
4500
|
getErrorMessage: getErrorMessage3 = () => "",
|
4586
4501
|
// mask error messages for safety by default
|
4587
4502
|
sendUsage = true
|
4588
4503
|
} = {}) {
|
4589
4504
|
let aggregatedResponse = "";
|
4590
4505
|
const callbackTransformer = new TransformStream({
|
4591
|
-
async start() {
|
4592
|
-
if (callbacks.onStart)
|
4593
|
-
await callbacks.onStart();
|
4594
|
-
},
|
4595
4506
|
async transform(chunk, controller) {
|
4596
4507
|
controller.enqueue(chunk);
|
4597
4508
|
if (chunk.type === "text-delta") {
|
4598
|
-
|
4599
|
-
aggregatedResponse += textDelta;
|
4600
|
-
if (callbacks.onToken)
|
4601
|
-
await callbacks.onToken(textDelta);
|
4602
|
-
if (callbacks.onText)
|
4603
|
-
await callbacks.onText(textDelta);
|
4509
|
+
aggregatedResponse += chunk.textDelta;
|
4604
4510
|
}
|
4605
|
-
},
|
4606
|
-
async flush() {
|
4607
|
-
if (callbacks.onCompletion)
|
4608
|
-
await callbacks.onCompletion(aggregatedResponse);
|
4609
|
-
if (callbacks.onFinal)
|
4610
|
-
await callbacks.onFinal(aggregatedResponse);
|
4611
4511
|
}
|
4612
4512
|
});
|
4613
4513
|
const streamPartsTransformer = new TransformStream({
|
@@ -4615,12 +4515,12 @@ var DefaultStreamTextResult = class {
|
|
4615
4515
|
const chunkType = chunk.type;
|
4616
4516
|
switch (chunkType) {
|
4617
4517
|
case "text-delta": {
|
4618
|
-
controller.enqueue((0,
|
4518
|
+
controller.enqueue((0, import_ui_utils6.formatStreamPart)("text", chunk.textDelta));
|
4619
4519
|
break;
|
4620
4520
|
}
|
4621
4521
|
case "tool-call-streaming-start": {
|
4622
4522
|
controller.enqueue(
|
4623
|
-
(0,
|
4523
|
+
(0, import_ui_utils6.formatStreamPart)("tool_call_streaming_start", {
|
4624
4524
|
toolCallId: chunk.toolCallId,
|
4625
4525
|
toolName: chunk.toolName
|
4626
4526
|
})
|
@@ -4629,7 +4529,7 @@ var DefaultStreamTextResult = class {
|
|
4629
4529
|
}
|
4630
4530
|
case "tool-call-delta": {
|
4631
4531
|
controller.enqueue(
|
4632
|
-
(0,
|
4532
|
+
(0, import_ui_utils6.formatStreamPart)("tool_call_delta", {
|
4633
4533
|
toolCallId: chunk.toolCallId,
|
4634
4534
|
argsTextDelta: chunk.argsTextDelta
|
4635
4535
|
})
|
@@ -4638,7 +4538,7 @@ var DefaultStreamTextResult = class {
|
|
4638
4538
|
}
|
4639
4539
|
case "tool-call": {
|
4640
4540
|
controller.enqueue(
|
4641
|
-
(0,
|
4541
|
+
(0, import_ui_utils6.formatStreamPart)("tool_call", {
|
4642
4542
|
toolCallId: chunk.toolCallId,
|
4643
4543
|
toolName: chunk.toolName,
|
4644
4544
|
args: chunk.args
|
@@ -4648,7 +4548,7 @@ var DefaultStreamTextResult = class {
|
|
4648
4548
|
}
|
4649
4549
|
case "tool-result": {
|
4650
4550
|
controller.enqueue(
|
4651
|
-
(0,
|
4551
|
+
(0, import_ui_utils6.formatStreamPart)("tool_result", {
|
4652
4552
|
toolCallId: chunk.toolCallId,
|
4653
4553
|
result: chunk.result
|
4654
4554
|
})
|
@@ -4657,13 +4557,13 @@ var DefaultStreamTextResult = class {
|
|
4657
4557
|
}
|
4658
4558
|
case "error": {
|
4659
4559
|
controller.enqueue(
|
4660
|
-
(0,
|
4560
|
+
(0, import_ui_utils6.formatStreamPart)("error", getErrorMessage3(chunk.error))
|
4661
4561
|
);
|
4662
4562
|
break;
|
4663
4563
|
}
|
4664
4564
|
case "step-finish": {
|
4665
4565
|
controller.enqueue(
|
4666
|
-
(0,
|
4566
|
+
(0, import_ui_utils6.formatStreamPart)("finish_step", {
|
4667
4567
|
finishReason: chunk.finishReason,
|
4668
4568
|
usage: sendUsage ? {
|
4669
4569
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4676,7 +4576,7 @@ var DefaultStreamTextResult = class {
|
|
4676
4576
|
}
|
4677
4577
|
case "finish": {
|
4678
4578
|
controller.enqueue(
|
4679
|
-
(0,
|
4579
|
+
(0, import_ui_utils6.formatStreamPart)("finish_message", {
|
4680
4580
|
finishReason: chunk.finishReason,
|
4681
4581
|
usage: sendUsage ? {
|
4682
4582
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4695,9 +4595,6 @@ var DefaultStreamTextResult = class {
|
|
4695
4595
|
});
|
4696
4596
|
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
|
4697
4597
|
}
|
4698
|
-
pipeAIStreamToResponse(response, init) {
|
4699
|
-
return this.pipeDataStreamToResponse(response, init);
|
4700
|
-
}
|
4701
4598
|
pipeDataStreamToResponse(response, options) {
|
4702
4599
|
const init = options == null ? void 0 : "init" in options ? options.init : {
|
4703
4600
|
headers: "headers" in options ? options.headers : void 0,
|
@@ -4729,9 +4626,6 @@ var DefaultStreamTextResult = class {
|
|
4729
4626
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
4730
4627
|
});
|
4731
4628
|
}
|
4732
|
-
toAIStreamResponse(options) {
|
4733
|
-
return this.toDataStreamResponse(options);
|
4734
|
-
}
|
4735
4629
|
toDataStream(options) {
|
4736
4630
|
const stream = this.toDataStreamInternal({
|
4737
4631
|
getErrorMessage: options == null ? void 0 : options.getErrorMessage,
|
@@ -4771,7 +4665,6 @@ var DefaultStreamTextResult = class {
|
|
4771
4665
|
});
|
4772
4666
|
}
|
4773
4667
|
};
|
4774
|
-
var experimental_streamText = streamText;
|
4775
4668
|
|
4776
4669
|
// core/middleware/wrap-language-model.ts
|
4777
4670
|
var experimental_wrapLanguageModel = ({
|
@@ -4958,123 +4851,8 @@ function magnitude(vector) {
|
|
4958
4851
|
return Math.sqrt(dotProduct(vector, vector));
|
4959
4852
|
}
|
4960
4853
|
|
4961
|
-
// streams/ai-stream.ts
|
4962
|
-
var import_eventsource_parser = require("eventsource-parser");
|
4963
|
-
function createEventStreamTransformer(customParser) {
|
4964
|
-
const textDecoder = new TextDecoder();
|
4965
|
-
let eventSourceParser;
|
4966
|
-
return new TransformStream({
|
4967
|
-
async start(controller) {
|
4968
|
-
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
4969
|
-
(event) => {
|
4970
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
4971
|
-
// @see https://replicate.com/docs/streaming
|
4972
|
-
event.event === "done") {
|
4973
|
-
controller.terminate();
|
4974
|
-
return;
|
4975
|
-
}
|
4976
|
-
if ("data" in event) {
|
4977
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
4978
|
-
event: event.event
|
4979
|
-
}) : event.data;
|
4980
|
-
if (parsedMessage)
|
4981
|
-
controller.enqueue(parsedMessage);
|
4982
|
-
}
|
4983
|
-
}
|
4984
|
-
);
|
4985
|
-
},
|
4986
|
-
transform(chunk) {
|
4987
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
4988
|
-
}
|
4989
|
-
});
|
4990
|
-
}
|
4991
|
-
function createCallbacksTransformer(cb) {
|
4992
|
-
const textEncoder = new TextEncoder();
|
4993
|
-
let aggregatedResponse = "";
|
4994
|
-
const callbacks = cb || {};
|
4995
|
-
return new TransformStream({
|
4996
|
-
async start() {
|
4997
|
-
if (callbacks.onStart)
|
4998
|
-
await callbacks.onStart();
|
4999
|
-
},
|
5000
|
-
async transform(message, controller) {
|
5001
|
-
const content = typeof message === "string" ? message : message.content;
|
5002
|
-
controller.enqueue(textEncoder.encode(content));
|
5003
|
-
aggregatedResponse += content;
|
5004
|
-
if (callbacks.onToken)
|
5005
|
-
await callbacks.onToken(content);
|
5006
|
-
if (callbacks.onText && typeof message === "string") {
|
5007
|
-
await callbacks.onText(message);
|
5008
|
-
}
|
5009
|
-
},
|
5010
|
-
async flush() {
|
5011
|
-
if (callbacks.onCompletion) {
|
5012
|
-
await callbacks.onCompletion(aggregatedResponse);
|
5013
|
-
}
|
5014
|
-
}
|
5015
|
-
});
|
5016
|
-
}
|
5017
|
-
function trimStartOfStreamHelper() {
|
5018
|
-
let isStreamStart = true;
|
5019
|
-
return (text) => {
|
5020
|
-
if (isStreamStart) {
|
5021
|
-
text = text.trimStart();
|
5022
|
-
if (text)
|
5023
|
-
isStreamStart = false;
|
5024
|
-
}
|
5025
|
-
return text;
|
5026
|
-
};
|
5027
|
-
}
|
5028
|
-
function AIStream(response, customParser, callbacks) {
|
5029
|
-
if (!response.ok) {
|
5030
|
-
if (response.body) {
|
5031
|
-
const reader = response.body.getReader();
|
5032
|
-
return new ReadableStream({
|
5033
|
-
async start(controller) {
|
5034
|
-
const { done, value } = await reader.read();
|
5035
|
-
if (!done) {
|
5036
|
-
const errorText = new TextDecoder().decode(value);
|
5037
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
5038
|
-
}
|
5039
|
-
}
|
5040
|
-
});
|
5041
|
-
} else {
|
5042
|
-
return new ReadableStream({
|
5043
|
-
start(controller) {
|
5044
|
-
controller.error(new Error("Response error: No response body"));
|
5045
|
-
}
|
5046
|
-
});
|
5047
|
-
}
|
5048
|
-
}
|
5049
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
5050
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
5051
|
-
}
|
5052
|
-
function createEmptyReadableStream() {
|
5053
|
-
return new ReadableStream({
|
5054
|
-
start(controller) {
|
5055
|
-
controller.close();
|
5056
|
-
}
|
5057
|
-
});
|
5058
|
-
}
|
5059
|
-
function readableFromAsyncIterable(iterable) {
|
5060
|
-
let it = iterable[Symbol.asyncIterator]();
|
5061
|
-
return new ReadableStream({
|
5062
|
-
async pull(controller) {
|
5063
|
-
const { done, value } = await it.next();
|
5064
|
-
if (done)
|
5065
|
-
controller.close();
|
5066
|
-
else
|
5067
|
-
controller.enqueue(value);
|
5068
|
-
},
|
5069
|
-
async cancel(reason) {
|
5070
|
-
var _a11;
|
5071
|
-
await ((_a11 = it.return) == null ? void 0 : _a11.call(it, reason));
|
5072
|
-
}
|
5073
|
-
});
|
5074
|
-
}
|
5075
|
-
|
5076
4854
|
// streams/assistant-response.ts
|
5077
|
-
var
|
4855
|
+
var import_ui_utils8 = require("@ai-sdk/ui-utils");
|
5078
4856
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5079
4857
|
const stream = new ReadableStream({
|
5080
4858
|
async start(controller) {
|
@@ -5082,17 +4860,17 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5082
4860
|
const textEncoder = new TextEncoder();
|
5083
4861
|
const sendMessage = (message) => {
|
5084
4862
|
controller.enqueue(
|
5085
|
-
textEncoder.encode((0,
|
4863
|
+
textEncoder.encode((0, import_ui_utils8.formatStreamPart)("assistant_message", message))
|
5086
4864
|
);
|
5087
4865
|
};
|
5088
4866
|
const sendDataMessage = (message) => {
|
5089
4867
|
controller.enqueue(
|
5090
|
-
textEncoder.encode((0,
|
4868
|
+
textEncoder.encode((0, import_ui_utils8.formatStreamPart)("data_message", message))
|
5091
4869
|
);
|
5092
4870
|
};
|
5093
4871
|
const sendError = (errorMessage) => {
|
5094
4872
|
controller.enqueue(
|
5095
|
-
textEncoder.encode((0,
|
4873
|
+
textEncoder.encode((0, import_ui_utils8.formatStreamPart)("error", errorMessage))
|
5096
4874
|
);
|
5097
4875
|
};
|
5098
4876
|
const forwardStream = async (stream2) => {
|
@@ -5103,7 +4881,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5103
4881
|
case "thread.message.created": {
|
5104
4882
|
controller.enqueue(
|
5105
4883
|
textEncoder.encode(
|
5106
|
-
(0,
|
4884
|
+
(0, import_ui_utils8.formatStreamPart)("assistant_message", {
|
5107
4885
|
id: value.data.id,
|
5108
4886
|
role: "assistant",
|
5109
4887
|
content: [{ type: "text", text: { value: "" } }]
|
@@ -5117,7 +4895,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5117
4895
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5118
4896
|
controller.enqueue(
|
5119
4897
|
textEncoder.encode(
|
5120
|
-
(0,
|
4898
|
+
(0, import_ui_utils8.formatStreamPart)("text", content.text.value)
|
5121
4899
|
)
|
5122
4900
|
);
|
5123
4901
|
}
|
@@ -5134,7 +4912,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5134
4912
|
};
|
5135
4913
|
controller.enqueue(
|
5136
4914
|
textEncoder.encode(
|
5137
|
-
(0,
|
4915
|
+
(0, import_ui_utils8.formatStreamPart)("assistant_control_data", {
|
5138
4916
|
threadId,
|
5139
4917
|
messageId
|
5140
4918
|
})
|
@@ -5176,14 +4954,40 @@ __export(langchain_adapter_exports, {
|
|
5176
4954
|
toDataStreamResponse: () => toDataStreamResponse
|
5177
4955
|
});
|
5178
4956
|
|
4957
|
+
// streams/stream-callbacks.ts
|
4958
|
+
function createCallbacksTransformer(callbacks = {}) {
|
4959
|
+
const textEncoder = new TextEncoder();
|
4960
|
+
let aggregatedResponse = "";
|
4961
|
+
return new TransformStream({
|
4962
|
+
async start() {
|
4963
|
+
if (callbacks.onStart)
|
4964
|
+
await callbacks.onStart();
|
4965
|
+
},
|
4966
|
+
async transform(message, controller) {
|
4967
|
+
controller.enqueue(textEncoder.encode(message));
|
4968
|
+
aggregatedResponse += message;
|
4969
|
+
if (callbacks.onToken)
|
4970
|
+
await callbacks.onToken(message);
|
4971
|
+
if (callbacks.onText && typeof message === "string") {
|
4972
|
+
await callbacks.onText(message);
|
4973
|
+
}
|
4974
|
+
},
|
4975
|
+
async flush() {
|
4976
|
+
if (callbacks.onCompletion) {
|
4977
|
+
await callbacks.onCompletion(aggregatedResponse);
|
4978
|
+
}
|
4979
|
+
}
|
4980
|
+
});
|
4981
|
+
}
|
4982
|
+
|
5179
4983
|
// streams/stream-data.ts
|
5180
|
-
var
|
4984
|
+
var import_ui_utils9 = require("@ai-sdk/ui-utils");
|
5181
4985
|
|
5182
4986
|
// util/constants.ts
|
5183
4987
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
5184
4988
|
|
5185
4989
|
// streams/stream-data.ts
|
5186
|
-
var
|
4990
|
+
var StreamData = class {
|
5187
4991
|
constructor() {
|
5188
4992
|
this.encoder = new TextEncoder();
|
5189
4993
|
this.controller = null;
|
@@ -5229,7 +5033,7 @@ var StreamData2 = class {
|
|
5229
5033
|
throw new Error("Stream controller is not initialized.");
|
5230
5034
|
}
|
5231
5035
|
this.controller.enqueue(
|
5232
|
-
this.encoder.encode((0,
|
5036
|
+
this.encoder.encode((0, import_ui_utils9.formatStreamPart)("data", [value]))
|
5233
5037
|
);
|
5234
5038
|
}
|
5235
5039
|
appendMessageAnnotation(value) {
|
@@ -5240,7 +5044,7 @@ var StreamData2 = class {
|
|
5240
5044
|
throw new Error("Stream controller is not initialized.");
|
5241
5045
|
}
|
5242
5046
|
this.controller.enqueue(
|
5243
|
-
this.encoder.encode((0,
|
5047
|
+
this.encoder.encode((0, import_ui_utils9.formatStreamPart)("message_annotations", [value]))
|
5244
5048
|
);
|
5245
5049
|
}
|
5246
5050
|
};
|
@@ -5250,11 +5054,11 @@ function createStreamDataTransformer() {
|
|
5250
5054
|
return new TransformStream({
|
5251
5055
|
transform: async (chunk, controller) => {
|
5252
5056
|
const message = decoder.decode(chunk);
|
5253
|
-
controller.enqueue(encoder.encode((0,
|
5057
|
+
controller.enqueue(encoder.encode((0, import_ui_utils9.formatStreamPart)("text", message)));
|
5254
5058
|
}
|
5255
5059
|
});
|
5256
5060
|
}
|
5257
|
-
var experimental_StreamData = class extends
|
5061
|
+
var experimental_StreamData = class extends StreamData {
|
5258
5062
|
};
|
5259
5063
|
|
5260
5064
|
// streams/langchain-adapter.ts
|
@@ -5318,8 +5122,16 @@ __export(llamaindex_adapter_exports, {
|
|
5318
5122
|
toDataStream: () => toDataStream2,
|
5319
5123
|
toDataStreamResponse: () => toDataStreamResponse2
|
5320
5124
|
});
|
5125
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
5321
5126
|
function toDataStream2(stream, callbacks) {
|
5322
|
-
|
5127
|
+
const trimStart = trimStartOfStream();
|
5128
|
+
return (0, import_provider_utils10.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
|
5129
|
+
new TransformStream({
|
5130
|
+
async transform(message, controller) {
|
5131
|
+
controller.enqueue(trimStart(message.delta));
|
5132
|
+
}
|
5133
|
+
})
|
5134
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
5323
5135
|
}
|
5324
5136
|
function toDataStreamResponse2(stream, options = {}) {
|
5325
5137
|
var _a11;
|
@@ -5335,73 +5147,20 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5335
5147
|
})
|
5336
5148
|
});
|
5337
5149
|
}
|
5338
|
-
function
|
5339
|
-
|
5340
|
-
|
5341
|
-
|
5342
|
-
|
5343
|
-
|
5344
|
-
|
5345
|
-
if (done) {
|
5346
|
-
controller.close();
|
5347
|
-
return;
|
5348
|
-
}
|
5349
|
-
const text = trimStartOfStream((_a11 = value.delta) != null ? _a11 : "");
|
5350
|
-
if (text) {
|
5351
|
-
controller.enqueue(text);
|
5352
|
-
}
|
5150
|
+
function trimStartOfStream() {
|
5151
|
+
let isStreamStart = true;
|
5152
|
+
return (text) => {
|
5153
|
+
if (isStreamStart) {
|
5154
|
+
text = text.trimStart();
|
5155
|
+
if (text)
|
5156
|
+
isStreamStart = false;
|
5353
5157
|
}
|
5354
|
-
|
5355
|
-
}
|
5356
|
-
|
5357
|
-
// streams/stream-to-response.ts
|
5358
|
-
function streamToResponse(res, response, init, data) {
|
5359
|
-
var _a11;
|
5360
|
-
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
5361
|
-
"Content-Type": "text/plain; charset=utf-8",
|
5362
|
-
...init == null ? void 0 : init.headers
|
5363
|
-
});
|
5364
|
-
let processedStream = res;
|
5365
|
-
if (data) {
|
5366
|
-
processedStream = mergeStreams(data.stream, res);
|
5367
|
-
}
|
5368
|
-
const reader = processedStream.getReader();
|
5369
|
-
function read() {
|
5370
|
-
reader.read().then(({ done, value }) => {
|
5371
|
-
if (done) {
|
5372
|
-
response.end();
|
5373
|
-
return;
|
5374
|
-
}
|
5375
|
-
response.write(value);
|
5376
|
-
read();
|
5377
|
-
});
|
5378
|
-
}
|
5379
|
-
read();
|
5158
|
+
return text;
|
5159
|
+
};
|
5380
5160
|
}
|
5381
|
-
|
5382
|
-
// streams/streaming-text-response.ts
|
5383
|
-
var StreamingTextResponse = class extends Response {
|
5384
|
-
constructor(res, init, data) {
|
5385
|
-
let processedStream = res;
|
5386
|
-
if (data) {
|
5387
|
-
processedStream = mergeStreams(data.stream, res);
|
5388
|
-
}
|
5389
|
-
super(processedStream, {
|
5390
|
-
...init,
|
5391
|
-
status: 200,
|
5392
|
-
headers: prepareResponseHeaders(init, {
|
5393
|
-
contentType: "text/plain; charset=utf-8"
|
5394
|
-
})
|
5395
|
-
});
|
5396
|
-
}
|
5397
|
-
};
|
5398
|
-
|
5399
|
-
// streams/index.ts
|
5400
|
-
var generateId2 = import_provider_utils10.generateId;
|
5401
5161
|
// Annotate the CommonJS export names for ESM import in node:
|
5402
5162
|
0 && (module.exports = {
|
5403
5163
|
AISDKError,
|
5404
|
-
AIStream,
|
5405
5164
|
APICallError,
|
5406
5165
|
AssistantResponse,
|
5407
5166
|
DownloadError,
|
@@ -5424,13 +5183,10 @@ var generateId2 = import_provider_utils10.generateId;
|
|
5424
5183
|
NoSuchToolError,
|
5425
5184
|
RetryError,
|
5426
5185
|
StreamData,
|
5427
|
-
StreamingTextResponse,
|
5428
5186
|
TypeValidationError,
|
5429
5187
|
UnsupportedFunctionalityError,
|
5430
5188
|
convertToCoreMessages,
|
5431
5189
|
cosineSimilarity,
|
5432
|
-
createCallbacksTransformer,
|
5433
|
-
createEventStreamTransformer,
|
5434
5190
|
createStreamDataTransformer,
|
5435
5191
|
embed,
|
5436
5192
|
embedMany,
|
@@ -5439,10 +5195,6 @@ var generateId2 = import_provider_utils10.generateId;
|
|
5439
5195
|
experimental_createModelRegistry,
|
5440
5196
|
experimental_createProviderRegistry,
|
5441
5197
|
experimental_customProvider,
|
5442
|
-
experimental_generateObject,
|
5443
|
-
experimental_generateText,
|
5444
|
-
experimental_streamObject,
|
5445
|
-
experimental_streamText,
|
5446
5198
|
experimental_wrapLanguageModel,
|
5447
5199
|
formatStreamPart,
|
5448
5200
|
generateId,
|
@@ -5452,11 +5204,8 @@ var generateId2 = import_provider_utils10.generateId;
|
|
5452
5204
|
parseStreamPart,
|
5453
5205
|
processDataProtocolResponse,
|
5454
5206
|
readDataStream,
|
5455
|
-
readableFromAsyncIterable,
|
5456
5207
|
streamObject,
|
5457
5208
|
streamText,
|
5458
|
-
|
5459
|
-
tool,
|
5460
|
-
trimStartOfStreamHelper
|
5209
|
+
tool
|
5461
5210
|
});
|
5462
5211
|
//# sourceMappingURL=index.js.map
|