ai 4.0.8 → 4.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +66 -16
- package/dist/index.d.ts +66 -16
- package/dist/index.js +334 -191
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +302 -161
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
@@ -48,32 +48,200 @@ __export(streams_exports, {
|
|
48
48
|
UnsupportedFunctionalityError: () => import_provider13.UnsupportedFunctionalityError,
|
49
49
|
convertToCoreMessages: () => convertToCoreMessages,
|
50
50
|
cosineSimilarity: () => cosineSimilarity,
|
51
|
-
|
51
|
+
createDataStream: () => createDataStream,
|
52
|
+
createDataStreamResponse: () => createDataStreamResponse,
|
52
53
|
embed: () => embed,
|
53
54
|
embedMany: () => embedMany,
|
54
55
|
experimental_createProviderRegistry: () => experimental_createProviderRegistry,
|
55
56
|
experimental_customProvider: () => experimental_customProvider,
|
56
57
|
experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
|
57
|
-
formatAssistantStreamPart: () =>
|
58
|
-
formatDataStreamPart: () =>
|
58
|
+
formatAssistantStreamPart: () => import_ui_utils14.formatAssistantStreamPart,
|
59
|
+
formatDataStreamPart: () => import_ui_utils14.formatDataStreamPart,
|
59
60
|
generateId: () => import_provider_utils12.generateId,
|
60
61
|
generateObject: () => generateObject,
|
61
62
|
generateText: () => generateText,
|
62
|
-
jsonSchema: () =>
|
63
|
-
parseAssistantStreamPart: () =>
|
64
|
-
parseDataStreamPart: () =>
|
65
|
-
|
66
|
-
|
63
|
+
jsonSchema: () => import_ui_utils9.jsonSchema,
|
64
|
+
parseAssistantStreamPart: () => import_ui_utils14.parseAssistantStreamPart,
|
65
|
+
parseDataStreamPart: () => import_ui_utils14.parseDataStreamPart,
|
66
|
+
pipeDataStreamToResponse: () => pipeDataStreamToResponse,
|
67
|
+
processDataStream: () => import_ui_utils14.processDataStream,
|
68
|
+
processTextStream: () => import_ui_utils14.processTextStream,
|
67
69
|
streamObject: () => streamObject,
|
68
70
|
streamText: () => streamText,
|
69
71
|
tool: () => tool
|
70
72
|
});
|
71
73
|
module.exports = __toCommonJS(streams_exports);
|
72
|
-
var
|
74
|
+
var import_ui_utils14 = require("@ai-sdk/ui-utils");
|
73
75
|
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
74
76
|
|
75
77
|
// core/index.ts
|
76
|
-
var
|
78
|
+
var import_ui_utils9 = require("@ai-sdk/ui-utils");
|
79
|
+
|
80
|
+
// core/data-stream/create-data-stream.ts
|
81
|
+
var import_ui_utils = require("@ai-sdk/ui-utils");
|
82
|
+
function createDataStream({
|
83
|
+
execute,
|
84
|
+
onError = () => "An error occurred."
|
85
|
+
// mask error messages for safety by default
|
86
|
+
}) {
|
87
|
+
let controller;
|
88
|
+
const ongoingStreamPromises = [];
|
89
|
+
const stream = new ReadableStream({
|
90
|
+
start(controllerArg) {
|
91
|
+
controller = controllerArg;
|
92
|
+
}
|
93
|
+
});
|
94
|
+
try {
|
95
|
+
const result = execute({
|
96
|
+
writeData(data) {
|
97
|
+
controller.enqueue((0, import_ui_utils.formatDataStreamPart)("data", [data]));
|
98
|
+
},
|
99
|
+
writeMessageAnnotation(annotation) {
|
100
|
+
controller.enqueue(
|
101
|
+
(0, import_ui_utils.formatDataStreamPart)("message_annotations", [annotation])
|
102
|
+
);
|
103
|
+
},
|
104
|
+
merge(streamArg) {
|
105
|
+
ongoingStreamPromises.push(
|
106
|
+
(async () => {
|
107
|
+
const reader = streamArg.getReader();
|
108
|
+
while (true) {
|
109
|
+
const { done, value } = await reader.read();
|
110
|
+
if (done)
|
111
|
+
break;
|
112
|
+
controller.enqueue(value);
|
113
|
+
}
|
114
|
+
})().catch((error) => {
|
115
|
+
controller.enqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
|
116
|
+
})
|
117
|
+
);
|
118
|
+
},
|
119
|
+
onError
|
120
|
+
});
|
121
|
+
if (result) {
|
122
|
+
result.catch((error) => {
|
123
|
+
controller.enqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
|
124
|
+
});
|
125
|
+
}
|
126
|
+
} catch (error) {
|
127
|
+
controller.enqueue((0, import_ui_utils.formatDataStreamPart)("error", onError(error)));
|
128
|
+
}
|
129
|
+
const waitForStreams = new Promise(async (resolve) => {
|
130
|
+
while (ongoingStreamPromises.length > 0) {
|
131
|
+
await ongoingStreamPromises.shift();
|
132
|
+
}
|
133
|
+
resolve();
|
134
|
+
});
|
135
|
+
waitForStreams.finally(() => {
|
136
|
+
controller.close();
|
137
|
+
});
|
138
|
+
return stream;
|
139
|
+
}
|
140
|
+
|
141
|
+
// core/util/prepare-response-headers.ts
|
142
|
+
function prepareResponseHeaders(headers, {
|
143
|
+
contentType,
|
144
|
+
dataStreamVersion
|
145
|
+
}) {
|
146
|
+
const responseHeaders = new Headers(headers != null ? headers : {});
|
147
|
+
if (!responseHeaders.has("Content-Type")) {
|
148
|
+
responseHeaders.set("Content-Type", contentType);
|
149
|
+
}
|
150
|
+
if (dataStreamVersion !== void 0) {
|
151
|
+
responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
|
152
|
+
}
|
153
|
+
return responseHeaders;
|
154
|
+
}
|
155
|
+
|
156
|
+
// core/data-stream/create-data-stream-response.ts
|
157
|
+
function createDataStreamResponse({
|
158
|
+
status,
|
159
|
+
statusText,
|
160
|
+
headers,
|
161
|
+
execute,
|
162
|
+
onError
|
163
|
+
}) {
|
164
|
+
return new Response(
|
165
|
+
createDataStream({ execute, onError }).pipeThrough(new TextEncoderStream()),
|
166
|
+
{
|
167
|
+
status,
|
168
|
+
statusText,
|
169
|
+
headers: prepareResponseHeaders(headers, {
|
170
|
+
contentType: "text/plain; charset=utf-8",
|
171
|
+
dataStreamVersion: "v1"
|
172
|
+
})
|
173
|
+
}
|
174
|
+
);
|
175
|
+
}
|
176
|
+
|
177
|
+
// core/util/prepare-outgoing-http-headers.ts
|
178
|
+
function prepareOutgoingHttpHeaders(headers, {
|
179
|
+
contentType,
|
180
|
+
dataStreamVersion
|
181
|
+
}) {
|
182
|
+
const outgoingHeaders = {};
|
183
|
+
if (headers != null) {
|
184
|
+
for (const [key, value] of Object.entries(headers)) {
|
185
|
+
outgoingHeaders[key] = value;
|
186
|
+
}
|
187
|
+
}
|
188
|
+
if (outgoingHeaders["Content-Type"] == null) {
|
189
|
+
outgoingHeaders["Content-Type"] = contentType;
|
190
|
+
}
|
191
|
+
if (dataStreamVersion !== void 0) {
|
192
|
+
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
193
|
+
}
|
194
|
+
return outgoingHeaders;
|
195
|
+
}
|
196
|
+
|
197
|
+
// core/util/write-to-server-response.ts
|
198
|
+
function writeToServerResponse({
|
199
|
+
response,
|
200
|
+
status,
|
201
|
+
statusText,
|
202
|
+
headers,
|
203
|
+
stream
|
204
|
+
}) {
|
205
|
+
response.writeHead(status != null ? status : 200, statusText, headers);
|
206
|
+
const reader = stream.getReader();
|
207
|
+
const read = async () => {
|
208
|
+
try {
|
209
|
+
while (true) {
|
210
|
+
const { done, value } = await reader.read();
|
211
|
+
if (done)
|
212
|
+
break;
|
213
|
+
response.write(value);
|
214
|
+
}
|
215
|
+
} catch (error) {
|
216
|
+
throw error;
|
217
|
+
} finally {
|
218
|
+
response.end();
|
219
|
+
}
|
220
|
+
};
|
221
|
+
read();
|
222
|
+
}
|
223
|
+
|
224
|
+
// core/data-stream/pipe-data-stream-to-response.ts
|
225
|
+
function pipeDataStreamToResponse(response, {
|
226
|
+
status,
|
227
|
+
statusText,
|
228
|
+
headers,
|
229
|
+
execute,
|
230
|
+
onError
|
231
|
+
}) {
|
232
|
+
writeToServerResponse({
|
233
|
+
response,
|
234
|
+
status,
|
235
|
+
statusText,
|
236
|
+
headers: prepareOutgoingHttpHeaders(headers, {
|
237
|
+
contentType: "text/plain; charset=utf-8",
|
238
|
+
dataStreamVersion: "v1"
|
239
|
+
}),
|
240
|
+
stream: createDataStream({ execute, onError }).pipeThrough(
|
241
|
+
new TextEncoderStream()
|
242
|
+
)
|
243
|
+
});
|
244
|
+
}
|
77
245
|
|
78
246
|
// errors/invalid-argument-error.ts
|
79
247
|
var import_provider = require("@ai-sdk/provider");
|
@@ -1566,21 +1734,6 @@ function calculateLanguageModelUsage({
|
|
1566
1734
|
};
|
1567
1735
|
}
|
1568
1736
|
|
1569
|
-
// core/util/prepare-response-headers.ts
|
1570
|
-
function prepareResponseHeaders(headers, {
|
1571
|
-
contentType,
|
1572
|
-
dataStreamVersion
|
1573
|
-
}) {
|
1574
|
-
const responseHeaders = new Headers(headers != null ? headers : {});
|
1575
|
-
if (!responseHeaders.has("Content-Type")) {
|
1576
|
-
responseHeaders.set("Content-Type", contentType);
|
1577
|
-
}
|
1578
|
-
if (dataStreamVersion !== void 0) {
|
1579
|
-
responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
|
1580
|
-
}
|
1581
|
-
return responseHeaders;
|
1582
|
-
}
|
1583
|
-
|
1584
1737
|
// core/generate-object/inject-json-instruction.ts
|
1585
1738
|
var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
1586
1739
|
var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
|
@@ -1622,7 +1775,7 @@ _a7 = symbol7;
|
|
1622
1775
|
// core/generate-object/output-strategy.ts
|
1623
1776
|
var import_provider10 = require("@ai-sdk/provider");
|
1624
1777
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1625
|
-
var
|
1778
|
+
var import_ui_utils2 = require("@ai-sdk/ui-utils");
|
1626
1779
|
|
1627
1780
|
// core/util/async-iterable-stream.ts
|
1628
1781
|
function createAsyncIterableStream(source, transformer) {
|
@@ -1839,9 +1992,9 @@ function getOutputStrategy({
|
|
1839
1992
|
}) {
|
1840
1993
|
switch (output) {
|
1841
1994
|
case "object":
|
1842
|
-
return objectOutputStrategy((0,
|
1995
|
+
return objectOutputStrategy((0, import_ui_utils2.asSchema)(schema));
|
1843
1996
|
case "array":
|
1844
|
-
return arrayOutputStrategy((0,
|
1997
|
+
return arrayOutputStrategy((0, import_ui_utils2.asSchema)(schema));
|
1845
1998
|
case "enum":
|
1846
1999
|
return enumOutputStrategy(enumValues);
|
1847
2000
|
case "no-schema":
|
@@ -2348,7 +2501,7 @@ var DefaultGenerateObjectResult = class {
|
|
2348
2501
|
|
2349
2502
|
// core/generate-object/stream-object.ts
|
2350
2503
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
2351
|
-
var
|
2504
|
+
var import_ui_utils3 = require("@ai-sdk/ui-utils");
|
2352
2505
|
|
2353
2506
|
// util/delayed-promise.ts
|
2354
2507
|
var DelayedPromise = class {
|
@@ -2476,53 +2629,6 @@ function now() {
|
|
2476
2629
|
return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
|
2477
2630
|
}
|
2478
2631
|
|
2479
|
-
// core/util/prepare-outgoing-http-headers.ts
|
2480
|
-
function prepareOutgoingHttpHeaders(headers, {
|
2481
|
-
contentType,
|
2482
|
-
dataStreamVersion
|
2483
|
-
}) {
|
2484
|
-
const outgoingHeaders = {};
|
2485
|
-
if (headers != null) {
|
2486
|
-
for (const [key, value] of Object.entries(headers)) {
|
2487
|
-
outgoingHeaders[key] = value;
|
2488
|
-
}
|
2489
|
-
}
|
2490
|
-
if (outgoingHeaders["Content-Type"] == null) {
|
2491
|
-
outgoingHeaders["Content-Type"] = contentType;
|
2492
|
-
}
|
2493
|
-
if (dataStreamVersion !== void 0) {
|
2494
|
-
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
2495
|
-
}
|
2496
|
-
return outgoingHeaders;
|
2497
|
-
}
|
2498
|
-
|
2499
|
-
// core/util/write-to-server-response.ts
|
2500
|
-
function writeToServerResponse({
|
2501
|
-
response,
|
2502
|
-
status,
|
2503
|
-
statusText,
|
2504
|
-
headers,
|
2505
|
-
stream
|
2506
|
-
}) {
|
2507
|
-
response.writeHead(status != null ? status : 200, statusText, headers);
|
2508
|
-
const reader = stream.getReader();
|
2509
|
-
const read = async () => {
|
2510
|
-
try {
|
2511
|
-
while (true) {
|
2512
|
-
const { done, value } = await reader.read();
|
2513
|
-
if (done)
|
2514
|
-
break;
|
2515
|
-
response.write(value);
|
2516
|
-
}
|
2517
|
-
} catch (error) {
|
2518
|
-
throw error;
|
2519
|
-
} finally {
|
2520
|
-
response.end();
|
2521
|
-
}
|
2522
|
-
};
|
2523
|
-
read();
|
2524
|
-
}
|
2525
|
-
|
2526
2632
|
// core/generate-object/stream-object.ts
|
2527
2633
|
var originalGenerateId2 = (0, import_provider_utils6.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
2528
2634
|
function streamObject({
|
@@ -2823,8 +2929,8 @@ var DefaultStreamObjectResult = class {
|
|
2823
2929
|
if (typeof chunk === "string") {
|
2824
2930
|
accumulatedText += chunk;
|
2825
2931
|
textDelta += chunk;
|
2826
|
-
const { value: currentObjectJson, state: parseState } = (0,
|
2827
|
-
if (currentObjectJson !== void 0 && !(0,
|
2932
|
+
const { value: currentObjectJson, state: parseState } = (0, import_ui_utils3.parsePartialJson)(accumulatedText);
|
2933
|
+
if (currentObjectJson !== void 0 && !(0, import_ui_utils3.isDeepEqualData)(latestObjectJson, currentObjectJson)) {
|
2828
2934
|
const validationResult = outputStrategy.validatePartialResult({
|
2829
2935
|
value: currentObjectJson,
|
2830
2936
|
textDelta,
|
@@ -2832,7 +2938,7 @@ var DefaultStreamObjectResult = class {
|
|
2832
2938
|
isFirstDelta,
|
2833
2939
|
isFinalDelta: parseState === "successful-parse"
|
2834
2940
|
});
|
2835
|
-
if (validationResult.success && !(0,
|
2941
|
+
if (validationResult.success && !(0, import_ui_utils3.isDeepEqualData)(
|
2836
2942
|
latestObject,
|
2837
2943
|
validationResult.value.partial
|
2838
2944
|
)) {
|
@@ -3118,7 +3224,7 @@ var NoSuchToolError = class extends import_provider12.AISDKError {
|
|
3118
3224
|
_a9 = symbol9;
|
3119
3225
|
|
3120
3226
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
3121
|
-
var
|
3227
|
+
var import_ui_utils4 = require("@ai-sdk/ui-utils");
|
3122
3228
|
|
3123
3229
|
// core/util/is-non-empty-object.ts
|
3124
3230
|
function isNonEmptyObject(object2) {
|
@@ -3150,7 +3256,7 @@ function prepareToolsAndToolChoice({
|
|
3150
3256
|
type: "function",
|
3151
3257
|
name: name11,
|
3152
3258
|
description: tool2.description,
|
3153
|
-
parameters: (0,
|
3259
|
+
parameters: (0, import_ui_utils4.asSchema)(tool2.parameters).jsonSchema
|
3154
3260
|
};
|
3155
3261
|
case "provider-defined":
|
3156
3262
|
return {
|
@@ -3184,7 +3290,7 @@ function removeTextAfterLastWhitespace(text2) {
|
|
3184
3290
|
|
3185
3291
|
// core/generate-text/parse-tool-call.ts
|
3186
3292
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
3187
|
-
var
|
3293
|
+
var import_ui_utils5 = require("@ai-sdk/ui-utils");
|
3188
3294
|
function parseToolCall({
|
3189
3295
|
toolCall,
|
3190
3296
|
tools
|
@@ -3200,7 +3306,7 @@ function parseToolCall({
|
|
3200
3306
|
availableTools: Object.keys(tools)
|
3201
3307
|
});
|
3202
3308
|
}
|
3203
|
-
const schema = (0,
|
3309
|
+
const schema = (0, import_ui_utils5.asSchema)(tool2.parameters);
|
3204
3310
|
const parseResult = toolCall.args.trim() === "" ? (0, import_provider_utils7.safeValidateTypes)({ value: {}, schema }) : (0, import_provider_utils7.safeParseJSON)({ text: toolCall.args, schema });
|
3205
3311
|
if (parseResult.success === false) {
|
3206
3312
|
throw new InvalidToolArgumentsError({
|
@@ -3639,7 +3745,7 @@ var DefaultGenerateTextResult = class {
|
|
3639
3745
|
|
3640
3746
|
// core/generate-text/stream-text.ts
|
3641
3747
|
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
3642
|
-
var
|
3748
|
+
var import_ui_utils7 = require("@ai-sdk/ui-utils");
|
3643
3749
|
|
3644
3750
|
// core/util/merge-streams.ts
|
3645
3751
|
function mergeStreams(stream1, stream2) {
|
@@ -3730,7 +3836,7 @@ function mergeStreams(stream1, stream2) {
|
|
3730
3836
|
}
|
3731
3837
|
|
3732
3838
|
// core/generate-text/run-tools-transformation.ts
|
3733
|
-
var
|
3839
|
+
var import_ui_utils6 = require("@ai-sdk/ui-utils");
|
3734
3840
|
function runToolsTransformation({
|
3735
3841
|
tools,
|
3736
3842
|
generatorStream,
|
@@ -3814,7 +3920,7 @@ function runToolsTransformation({
|
|
3814
3920
|
});
|
3815
3921
|
controller.enqueue(toolCall);
|
3816
3922
|
if (tool2.execute != null) {
|
3817
|
-
const toolExecutionId = (0,
|
3923
|
+
const toolExecutionId = (0, import_ui_utils6.generateId)();
|
3818
3924
|
outstandingToolResults.add(toolExecutionId);
|
3819
3925
|
recordSpan({
|
3820
3926
|
name: "ai.toolCall",
|
@@ -4578,12 +4684,12 @@ var DefaultStreamTextResult = class {
|
|
4578
4684
|
const chunkType = chunk.type;
|
4579
4685
|
switch (chunkType) {
|
4580
4686
|
case "text-delta": {
|
4581
|
-
controller.enqueue((0,
|
4687
|
+
controller.enqueue((0, import_ui_utils7.formatDataStreamPart)("text", chunk.textDelta));
|
4582
4688
|
break;
|
4583
4689
|
}
|
4584
4690
|
case "tool-call-streaming-start": {
|
4585
4691
|
controller.enqueue(
|
4586
|
-
(0,
|
4692
|
+
(0, import_ui_utils7.formatDataStreamPart)("tool_call_streaming_start", {
|
4587
4693
|
toolCallId: chunk.toolCallId,
|
4588
4694
|
toolName: chunk.toolName
|
4589
4695
|
})
|
@@ -4592,7 +4698,7 @@ var DefaultStreamTextResult = class {
|
|
4592
4698
|
}
|
4593
4699
|
case "tool-call-delta": {
|
4594
4700
|
controller.enqueue(
|
4595
|
-
(0,
|
4701
|
+
(0, import_ui_utils7.formatDataStreamPart)("tool_call_delta", {
|
4596
4702
|
toolCallId: chunk.toolCallId,
|
4597
4703
|
argsTextDelta: chunk.argsTextDelta
|
4598
4704
|
})
|
@@ -4601,7 +4707,7 @@ var DefaultStreamTextResult = class {
|
|
4601
4707
|
}
|
4602
4708
|
case "tool-call": {
|
4603
4709
|
controller.enqueue(
|
4604
|
-
(0,
|
4710
|
+
(0, import_ui_utils7.formatDataStreamPart)("tool_call", {
|
4605
4711
|
toolCallId: chunk.toolCallId,
|
4606
4712
|
toolName: chunk.toolName,
|
4607
4713
|
args: chunk.args
|
@@ -4611,7 +4717,7 @@ var DefaultStreamTextResult = class {
|
|
4611
4717
|
}
|
4612
4718
|
case "tool-result": {
|
4613
4719
|
controller.enqueue(
|
4614
|
-
(0,
|
4720
|
+
(0, import_ui_utils7.formatDataStreamPart)("tool_result", {
|
4615
4721
|
toolCallId: chunk.toolCallId,
|
4616
4722
|
result: chunk.result
|
4617
4723
|
})
|
@@ -4620,13 +4726,13 @@ var DefaultStreamTextResult = class {
|
|
4620
4726
|
}
|
4621
4727
|
case "error": {
|
4622
4728
|
controller.enqueue(
|
4623
|
-
(0,
|
4729
|
+
(0, import_ui_utils7.formatDataStreamPart)("error", getErrorMessage3(chunk.error))
|
4624
4730
|
);
|
4625
4731
|
break;
|
4626
4732
|
}
|
4627
4733
|
case "step-finish": {
|
4628
4734
|
controller.enqueue(
|
4629
|
-
(0,
|
4735
|
+
(0, import_ui_utils7.formatDataStreamPart)("finish_step", {
|
4630
4736
|
finishReason: chunk.finishReason,
|
4631
4737
|
usage: sendUsage ? {
|
4632
4738
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4639,7 +4745,7 @@ var DefaultStreamTextResult = class {
|
|
4639
4745
|
}
|
4640
4746
|
case "finish": {
|
4641
4747
|
controller.enqueue(
|
4642
|
-
(0,
|
4748
|
+
(0, import_ui_utils7.formatDataStreamPart)("finish_message", {
|
4643
4749
|
finishReason: chunk.finishReason,
|
4644
4750
|
usage: sendUsage ? {
|
4645
4751
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4656,7 +4762,7 @@ var DefaultStreamTextResult = class {
|
|
4656
4762
|
}
|
4657
4763
|
}
|
4658
4764
|
});
|
4659
|
-
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer)
|
4765
|
+
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer);
|
4660
4766
|
}
|
4661
4767
|
pipeDataStreamToResponse(response, {
|
4662
4768
|
status,
|
@@ -4688,13 +4794,21 @@ var DefaultStreamTextResult = class {
|
|
4688
4794
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
4689
4795
|
});
|
4690
4796
|
}
|
4797
|
+
// TODO breaking change 5.0: remove pipeThrough(new TextEncoderStream())
|
4691
4798
|
toDataStream(options) {
|
4692
4799
|
const stream = this.toDataStreamInternal({
|
4693
4800
|
getErrorMessage: options == null ? void 0 : options.getErrorMessage,
|
4694
4801
|
sendUsage: options == null ? void 0 : options.sendUsage
|
4695
|
-
});
|
4802
|
+
}).pipeThrough(new TextEncoderStream());
|
4696
4803
|
return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
|
4697
4804
|
}
|
4805
|
+
mergeIntoDataStream(writer) {
|
4806
|
+
writer.merge(
|
4807
|
+
this.toDataStreamInternal({
|
4808
|
+
getErrorMessage: writer.onError
|
4809
|
+
})
|
4810
|
+
);
|
4811
|
+
}
|
4698
4812
|
toDataStreamResponse({
|
4699
4813
|
headers,
|
4700
4814
|
status,
|
@@ -4733,7 +4847,7 @@ __export(output_exports, {
|
|
4733
4847
|
text: () => text
|
4734
4848
|
});
|
4735
4849
|
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
4736
|
-
var
|
4850
|
+
var import_ui_utils8 = require("@ai-sdk/ui-utils");
|
4737
4851
|
var text = () => ({
|
4738
4852
|
type: "text",
|
4739
4853
|
responseFormat: () => ({ type: "text" }),
|
@@ -4747,7 +4861,7 @@ var text = () => ({
|
|
4747
4861
|
var object = ({
|
4748
4862
|
schema: inputSchema
|
4749
4863
|
}) => {
|
4750
|
-
const schema = (0,
|
4864
|
+
const schema = (0, import_ui_utils8.asSchema)(inputSchema);
|
4751
4865
|
return {
|
4752
4866
|
type: "object",
|
4753
4867
|
responseFormat: ({ model }) => ({
|
@@ -4948,7 +5062,7 @@ function magnitude(vector) {
|
|
4948
5062
|
}
|
4949
5063
|
|
4950
5064
|
// streams/assistant-response.ts
|
4951
|
-
var
|
5065
|
+
var import_ui_utils10 = require("@ai-sdk/ui-utils");
|
4952
5066
|
function AssistantResponse({ threadId, messageId }, process2) {
|
4953
5067
|
const stream = new ReadableStream({
|
4954
5068
|
async start(controller) {
|
@@ -4957,20 +5071,20 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4957
5071
|
const sendMessage = (message) => {
|
4958
5072
|
controller.enqueue(
|
4959
5073
|
textEncoder.encode(
|
4960
|
-
(0,
|
5074
|
+
(0, import_ui_utils10.formatAssistantStreamPart)("assistant_message", message)
|
4961
5075
|
)
|
4962
5076
|
);
|
4963
5077
|
};
|
4964
5078
|
const sendDataMessage = (message) => {
|
4965
5079
|
controller.enqueue(
|
4966
5080
|
textEncoder.encode(
|
4967
|
-
(0,
|
5081
|
+
(0, import_ui_utils10.formatAssistantStreamPart)("data_message", message)
|
4968
5082
|
)
|
4969
5083
|
);
|
4970
5084
|
};
|
4971
5085
|
const sendError = (errorMessage) => {
|
4972
5086
|
controller.enqueue(
|
4973
|
-
textEncoder.encode((0,
|
5087
|
+
textEncoder.encode((0, import_ui_utils10.formatAssistantStreamPart)("error", errorMessage))
|
4974
5088
|
);
|
4975
5089
|
};
|
4976
5090
|
const forwardStream = async (stream2) => {
|
@@ -4981,7 +5095,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4981
5095
|
case "thread.message.created": {
|
4982
5096
|
controller.enqueue(
|
4983
5097
|
textEncoder.encode(
|
4984
|
-
(0,
|
5098
|
+
(0, import_ui_utils10.formatAssistantStreamPart)("assistant_message", {
|
4985
5099
|
id: value.data.id,
|
4986
5100
|
role: "assistant",
|
4987
5101
|
content: [{ type: "text", text: { value: "" } }]
|
@@ -4995,7 +5109,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4995
5109
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
4996
5110
|
controller.enqueue(
|
4997
5111
|
textEncoder.encode(
|
4998
|
-
(0,
|
5112
|
+
(0, import_ui_utils10.formatAssistantStreamPart)("text", content.text.value)
|
4999
5113
|
)
|
5000
5114
|
);
|
5001
5115
|
}
|
@@ -5012,7 +5126,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5012
5126
|
};
|
5013
5127
|
controller.enqueue(
|
5014
5128
|
textEncoder.encode(
|
5015
|
-
(0,
|
5129
|
+
(0, import_ui_utils10.formatAssistantStreamPart)("assistant_control_data", {
|
5016
5130
|
threadId,
|
5017
5131
|
messageId
|
5018
5132
|
})
|
@@ -5046,9 +5160,11 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5046
5160
|
// streams/langchain-adapter.ts
|
5047
5161
|
var langchain_adapter_exports = {};
|
5048
5162
|
__export(langchain_adapter_exports, {
|
5163
|
+
mergeIntoDataStream: () => mergeIntoDataStream,
|
5049
5164
|
toDataStream: () => toDataStream,
|
5050
5165
|
toDataStreamResponse: () => toDataStreamResponse
|
5051
5166
|
});
|
5167
|
+
var import_ui_utils11 = require("@ai-sdk/ui-utils");
|
5052
5168
|
|
5053
5169
|
// streams/stream-callbacks.ts
|
5054
5170
|
function createCallbacksTransformer(callbacks = {}) {
|
@@ -5079,87 +5195,8 @@ function createCallbacksTransformer(callbacks = {}) {
|
|
5079
5195
|
});
|
5080
5196
|
}
|
5081
5197
|
|
5082
|
-
// streams/stream-data.ts
|
5083
|
-
var import_ui_utils10 = require("@ai-sdk/ui-utils");
|
5084
|
-
|
5085
|
-
// util/constants.ts
|
5086
|
-
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
5087
|
-
|
5088
|
-
// streams/stream-data.ts
|
5089
|
-
var StreamData = class {
|
5090
|
-
constructor() {
|
5091
|
-
this.encoder = new TextEncoder();
|
5092
|
-
this.controller = null;
|
5093
|
-
this.isClosed = false;
|
5094
|
-
this.warningTimeout = null;
|
5095
|
-
const self = this;
|
5096
|
-
this.stream = new ReadableStream({
|
5097
|
-
start: async (controller) => {
|
5098
|
-
self.controller = controller;
|
5099
|
-
if (process.env.NODE_ENV === "development") {
|
5100
|
-
self.warningTimeout = setTimeout(() => {
|
5101
|
-
console.warn(
|
5102
|
-
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
5103
|
-
);
|
5104
|
-
}, HANGING_STREAM_WARNING_TIME_MS);
|
5105
|
-
}
|
5106
|
-
},
|
5107
|
-
pull: (controller) => {
|
5108
|
-
},
|
5109
|
-
cancel: (reason) => {
|
5110
|
-
this.isClosed = true;
|
5111
|
-
}
|
5112
|
-
});
|
5113
|
-
}
|
5114
|
-
async close() {
|
5115
|
-
if (this.isClosed) {
|
5116
|
-
throw new Error("Data Stream has already been closed.");
|
5117
|
-
}
|
5118
|
-
if (!this.controller) {
|
5119
|
-
throw new Error("Stream controller is not initialized.");
|
5120
|
-
}
|
5121
|
-
this.controller.close();
|
5122
|
-
this.isClosed = true;
|
5123
|
-
if (this.warningTimeout) {
|
5124
|
-
clearTimeout(this.warningTimeout);
|
5125
|
-
}
|
5126
|
-
}
|
5127
|
-
append(value) {
|
5128
|
-
if (this.isClosed) {
|
5129
|
-
throw new Error("Data Stream has already been closed.");
|
5130
|
-
}
|
5131
|
-
if (!this.controller) {
|
5132
|
-
throw new Error("Stream controller is not initialized.");
|
5133
|
-
}
|
5134
|
-
this.controller.enqueue(
|
5135
|
-
this.encoder.encode((0, import_ui_utils10.formatDataStreamPart)("data", [value]))
|
5136
|
-
);
|
5137
|
-
}
|
5138
|
-
appendMessageAnnotation(value) {
|
5139
|
-
if (this.isClosed) {
|
5140
|
-
throw new Error("Data Stream has already been closed.");
|
5141
|
-
}
|
5142
|
-
if (!this.controller) {
|
5143
|
-
throw new Error("Stream controller is not initialized.");
|
5144
|
-
}
|
5145
|
-
this.controller.enqueue(
|
5146
|
-
this.encoder.encode((0, import_ui_utils10.formatDataStreamPart)("message_annotations", [value]))
|
5147
|
-
);
|
5148
|
-
}
|
5149
|
-
};
|
5150
|
-
function createStreamDataTransformer() {
|
5151
|
-
const encoder = new TextEncoder();
|
5152
|
-
const decoder = new TextDecoder();
|
5153
|
-
return new TransformStream({
|
5154
|
-
transform: async (chunk, controller) => {
|
5155
|
-
const message = decoder.decode(chunk);
|
5156
|
-
controller.enqueue(encoder.encode((0, import_ui_utils10.formatDataStreamPart)("text", message)));
|
5157
|
-
}
|
5158
|
-
});
|
5159
|
-
}
|
5160
|
-
|
5161
5198
|
// streams/langchain-adapter.ts
|
5162
|
-
function
|
5199
|
+
function toDataStreamInternal(stream, callbacks) {
|
5163
5200
|
return stream.pipeThrough(
|
5164
5201
|
new TransformStream({
|
5165
5202
|
transform: async (value, controller) => {
|
@@ -5180,11 +5217,25 @@ function toDataStream(stream, callbacks) {
|
|
5180
5217
|
forwardAIMessageChunk(value, controller);
|
5181
5218
|
}
|
5182
5219
|
})
|
5183
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
5220
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
|
5221
|
+
new TransformStream({
|
5222
|
+
transform: async (chunk, controller) => {
|
5223
|
+
controller.enqueue((0, import_ui_utils11.formatDataStreamPart)("text", chunk));
|
5224
|
+
}
|
5225
|
+
})
|
5226
|
+
);
|
5227
|
+
}
|
5228
|
+
function toDataStream(stream, callbacks) {
|
5229
|
+
return toDataStreamInternal(stream, callbacks).pipeThrough(
|
5230
|
+
new TextEncoderStream()
|
5231
|
+
);
|
5184
5232
|
}
|
5185
5233
|
function toDataStreamResponse(stream, options) {
|
5186
5234
|
var _a11;
|
5187
|
-
const dataStream =
|
5235
|
+
const dataStream = toDataStreamInternal(
|
5236
|
+
stream,
|
5237
|
+
options == null ? void 0 : options.callbacks
|
5238
|
+
).pipeThrough(new TextEncoderStream());
|
5188
5239
|
const data = options == null ? void 0 : options.data;
|
5189
5240
|
const init = options == null ? void 0 : options.init;
|
5190
5241
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
@@ -5197,6 +5248,9 @@ function toDataStreamResponse(stream, options) {
|
|
5197
5248
|
})
|
5198
5249
|
});
|
5199
5250
|
}
|
5251
|
+
function mergeIntoDataStream(stream, options) {
|
5252
|
+
options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));
|
5253
|
+
}
|
5200
5254
|
function forwardAIMessageChunk(chunk, controller) {
|
5201
5255
|
if (typeof chunk.content === "string") {
|
5202
5256
|
controller.enqueue(chunk.content);
|
@@ -5213,11 +5267,13 @@ function forwardAIMessageChunk(chunk, controller) {
|
|
5213
5267
|
// streams/llamaindex-adapter.ts
|
5214
5268
|
var llamaindex_adapter_exports = {};
|
5215
5269
|
__export(llamaindex_adapter_exports, {
|
5270
|
+
mergeIntoDataStream: () => mergeIntoDataStream2,
|
5216
5271
|
toDataStream: () => toDataStream2,
|
5217
5272
|
toDataStreamResponse: () => toDataStreamResponse2
|
5218
5273
|
});
|
5219
5274
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
5220
|
-
|
5275
|
+
var import_ui_utils12 = require("@ai-sdk/ui-utils");
|
5276
|
+
function toDataStreamInternal2(stream, callbacks) {
|
5221
5277
|
const trimStart = trimStartOfStream();
|
5222
5278
|
return (0, import_provider_utils11.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
|
5223
5279
|
new TransformStream({
|
@@ -5225,12 +5281,25 @@ function toDataStream2(stream, callbacks) {
|
|
5225
5281
|
controller.enqueue(trimStart(message.delta));
|
5226
5282
|
}
|
5227
5283
|
})
|
5228
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
5284
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
|
5285
|
+
new TransformStream({
|
5286
|
+
transform: async (chunk, controller) => {
|
5287
|
+
controller.enqueue((0, import_ui_utils12.formatDataStreamPart)("text", chunk));
|
5288
|
+
}
|
5289
|
+
})
|
5290
|
+
);
|
5291
|
+
}
|
5292
|
+
function toDataStream2(stream, callbacks) {
|
5293
|
+
return toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5294
|
+
new TextEncoderStream()
|
5295
|
+
);
|
5229
5296
|
}
|
5230
5297
|
function toDataStreamResponse2(stream, options = {}) {
|
5231
5298
|
var _a11;
|
5232
5299
|
const { init, data, callbacks } = options;
|
5233
|
-
const dataStream =
|
5300
|
+
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5301
|
+
new TextEncoderStream()
|
5302
|
+
);
|
5234
5303
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5235
5304
|
return new Response(responseStream, {
|
5236
5305
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
@@ -5241,6 +5310,9 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5241
5310
|
})
|
5242
5311
|
});
|
5243
5312
|
}
|
5313
|
+
function mergeIntoDataStream2(stream, options) {
|
5314
|
+
options.dataStream.merge(toDataStreamInternal2(stream, options.callbacks));
|
5315
|
+
}
|
5244
5316
|
function trimStartOfStream() {
|
5245
5317
|
let isStreamStart = true;
|
5246
5318
|
return (text2) => {
|
@@ -5252,6 +5324,75 @@ function trimStartOfStream() {
|
|
5252
5324
|
return text2;
|
5253
5325
|
};
|
5254
5326
|
}
|
5327
|
+
|
5328
|
+
// streams/stream-data.ts
|
5329
|
+
var import_ui_utils13 = require("@ai-sdk/ui-utils");
|
5330
|
+
|
5331
|
+
// util/constants.ts
|
5332
|
+
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
5333
|
+
|
5334
|
+
// streams/stream-data.ts
|
5335
|
+
var StreamData = class {
|
5336
|
+
constructor() {
|
5337
|
+
this.encoder = new TextEncoder();
|
5338
|
+
this.controller = null;
|
5339
|
+
this.isClosed = false;
|
5340
|
+
this.warningTimeout = null;
|
5341
|
+
const self = this;
|
5342
|
+
this.stream = new ReadableStream({
|
5343
|
+
start: async (controller) => {
|
5344
|
+
self.controller = controller;
|
5345
|
+
if (process.env.NODE_ENV === "development") {
|
5346
|
+
self.warningTimeout = setTimeout(() => {
|
5347
|
+
console.warn(
|
5348
|
+
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
5349
|
+
);
|
5350
|
+
}, HANGING_STREAM_WARNING_TIME_MS);
|
5351
|
+
}
|
5352
|
+
},
|
5353
|
+
pull: (controller) => {
|
5354
|
+
},
|
5355
|
+
cancel: (reason) => {
|
5356
|
+
this.isClosed = true;
|
5357
|
+
}
|
5358
|
+
});
|
5359
|
+
}
|
5360
|
+
async close() {
|
5361
|
+
if (this.isClosed) {
|
5362
|
+
throw new Error("Data Stream has already been closed.");
|
5363
|
+
}
|
5364
|
+
if (!this.controller) {
|
5365
|
+
throw new Error("Stream controller is not initialized.");
|
5366
|
+
}
|
5367
|
+
this.controller.close();
|
5368
|
+
this.isClosed = true;
|
5369
|
+
if (this.warningTimeout) {
|
5370
|
+
clearTimeout(this.warningTimeout);
|
5371
|
+
}
|
5372
|
+
}
|
5373
|
+
append(value) {
|
5374
|
+
if (this.isClosed) {
|
5375
|
+
throw new Error("Data Stream has already been closed.");
|
5376
|
+
}
|
5377
|
+
if (!this.controller) {
|
5378
|
+
throw new Error("Stream controller is not initialized.");
|
5379
|
+
}
|
5380
|
+
this.controller.enqueue(
|
5381
|
+
this.encoder.encode((0, import_ui_utils13.formatDataStreamPart)("data", [value]))
|
5382
|
+
);
|
5383
|
+
}
|
5384
|
+
appendMessageAnnotation(value) {
|
5385
|
+
if (this.isClosed) {
|
5386
|
+
throw new Error("Data Stream has already been closed.");
|
5387
|
+
}
|
5388
|
+
if (!this.controller) {
|
5389
|
+
throw new Error("Stream controller is not initialized.");
|
5390
|
+
}
|
5391
|
+
this.controller.enqueue(
|
5392
|
+
this.encoder.encode((0, import_ui_utils13.formatDataStreamPart)("message_annotations", [value]))
|
5393
|
+
);
|
5394
|
+
}
|
5395
|
+
};
|
5255
5396
|
// Annotate the CommonJS export names for ESM import in node:
|
5256
5397
|
0 && (module.exports = {
|
5257
5398
|
AISDKError,
|
@@ -5282,7 +5423,8 @@ function trimStartOfStream() {
|
|
5282
5423
|
UnsupportedFunctionalityError,
|
5283
5424
|
convertToCoreMessages,
|
5284
5425
|
cosineSimilarity,
|
5285
|
-
|
5426
|
+
createDataStream,
|
5427
|
+
createDataStreamResponse,
|
5286
5428
|
embed,
|
5287
5429
|
embedMany,
|
5288
5430
|
experimental_createProviderRegistry,
|
@@ -5296,6 +5438,7 @@ function trimStartOfStream() {
|
|
5296
5438
|
jsonSchema,
|
5297
5439
|
parseAssistantStreamPart,
|
5298
5440
|
parseDataStreamPart,
|
5441
|
+
pipeDataStreamToResponse,
|
5299
5442
|
processDataStream,
|
5300
5443
|
processTextStream,
|
5301
5444
|
streamObject,
|