ai 4.0.0-canary.4 → 4.0.0-canary.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/index.d.mts +15 -102
- package/dist/index.d.ts +15 -102
- package/dist/index.js +50 -158
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +50 -156
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -10
package/dist/index.js
CHANGED
@@ -43,7 +43,6 @@ __export(streams_exports, {
|
|
43
43
|
NoSuchToolError: () => NoSuchToolError,
|
44
44
|
RetryError: () => RetryError,
|
45
45
|
StreamData: () => StreamData,
|
46
|
-
StreamingTextResponse: () => StreamingTextResponse,
|
47
46
|
TypeValidationError: () => import_provider13.TypeValidationError,
|
48
47
|
UnsupportedFunctionalityError: () => import_provider13.UnsupportedFunctionalityError,
|
49
48
|
convertToCoreMessages: () => convertToCoreMessages,
|
@@ -67,7 +66,6 @@ __export(streams_exports, {
|
|
67
66
|
readDataStream: () => import_ui_utils10.readDataStream,
|
68
67
|
streamObject: () => streamObject,
|
69
68
|
streamText: () => streamText,
|
70
|
-
streamToResponse: () => streamToResponse,
|
71
69
|
tool: () => tool
|
72
70
|
});
|
73
71
|
module.exports = __toCommonJS(streams_exports);
|
@@ -1557,19 +1555,18 @@ function calculateLanguageModelUsage(usage) {
|
|
1557
1555
|
}
|
1558
1556
|
|
1559
1557
|
// core/util/prepare-response-headers.ts
|
1560
|
-
function prepareResponseHeaders(
|
1558
|
+
function prepareResponseHeaders(headers, {
|
1561
1559
|
contentType,
|
1562
1560
|
dataStreamVersion
|
1563
1561
|
}) {
|
1564
|
-
|
1565
|
-
|
1566
|
-
|
1567
|
-
headers.set("Content-Type", contentType);
|
1562
|
+
const responseHeaders = new Headers(headers != null ? headers : {});
|
1563
|
+
if (!responseHeaders.has("Content-Type")) {
|
1564
|
+
responseHeaders.set("Content-Type", contentType);
|
1568
1565
|
}
|
1569
1566
|
if (dataStreamVersion !== void 0) {
|
1570
|
-
|
1567
|
+
responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
|
1571
1568
|
}
|
1572
|
-
return
|
1569
|
+
return responseHeaders;
|
1573
1570
|
}
|
1574
1571
|
|
1575
1572
|
// core/generate-object/inject-json-instruction.ts
|
@@ -2138,9 +2135,6 @@ async function generateObject({
|
|
2138
2135
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2139
2136
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2140
2137
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2141
|
-
// deprecated:
|
2142
|
-
"ai.finishReason": result2.finishReason,
|
2143
|
-
"ai.result.object": { output: () => result2.text },
|
2144
2138
|
// standardized gen-ai llm span attributes:
|
2145
2139
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2146
2140
|
"gen_ai.response.id": responseData.id,
|
@@ -2245,9 +2239,6 @@ async function generateObject({
|
|
2245
2239
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2246
2240
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2247
2241
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2248
|
-
// deprecated:
|
2249
|
-
"ai.finishReason": result2.finishReason,
|
2250
|
-
"ai.result.object": { output: () => objectText },
|
2251
2242
|
// standardized gen-ai llm span attributes:
|
2252
2243
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2253
2244
|
"gen_ai.response.id": responseData.id,
|
@@ -2301,12 +2292,7 @@ async function generateObject({
|
|
2301
2292
|
output: () => JSON.stringify(validationResult.value)
|
2302
2293
|
},
|
2303
2294
|
"ai.usage.promptTokens": usage.promptTokens,
|
2304
|
-
"ai.usage.completionTokens": usage.completionTokens
|
2305
|
-
// deprecated:
|
2306
|
-
"ai.finishReason": finishReason,
|
2307
|
-
"ai.result.object": {
|
2308
|
-
output: () => JSON.stringify(validationResult.value)
|
2309
|
-
}
|
2295
|
+
"ai.usage.completionTokens": usage.completionTokens
|
2310
2296
|
}
|
2311
2297
|
})
|
2312
2298
|
);
|
@@ -2335,16 +2321,13 @@ var DefaultGenerateObjectResult = class {
|
|
2335
2321
|
this.experimental_providerMetadata = options.providerMetadata;
|
2336
2322
|
this.response = options.response;
|
2337
2323
|
this.request = options.request;
|
2338
|
-
this.rawResponse = {
|
2339
|
-
headers: options.response.headers
|
2340
|
-
};
|
2341
2324
|
this.logprobs = options.logprobs;
|
2342
2325
|
}
|
2343
2326
|
toJsonResponse(init) {
|
2344
2327
|
var _a11;
|
2345
2328
|
return new Response(JSON.stringify(this.object), {
|
2346
2329
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
2347
|
-
headers: prepareResponseHeaders(init, {
|
2330
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2348
2331
|
contentType: "application/json; charset=utf-8"
|
2349
2332
|
})
|
2350
2333
|
});
|
@@ -2415,23 +2398,23 @@ function now() {
|
|
2415
2398
|
}
|
2416
2399
|
|
2417
2400
|
// core/util/prepare-outgoing-http-headers.ts
|
2418
|
-
function prepareOutgoingHttpHeaders(
|
2401
|
+
function prepareOutgoingHttpHeaders(headers, {
|
2419
2402
|
contentType,
|
2420
2403
|
dataStreamVersion
|
2421
2404
|
}) {
|
2422
|
-
const
|
2423
|
-
if (
|
2424
|
-
for (const [key, value] of Object.entries(
|
2425
|
-
|
2405
|
+
const outgoingHeaders = {};
|
2406
|
+
if (headers != null) {
|
2407
|
+
for (const [key, value] of Object.entries(headers)) {
|
2408
|
+
outgoingHeaders[key] = value;
|
2426
2409
|
}
|
2427
2410
|
}
|
2428
|
-
if (
|
2429
|
-
|
2411
|
+
if (outgoingHeaders["Content-Type"] == null) {
|
2412
|
+
outgoingHeaders["Content-Type"] = contentType;
|
2430
2413
|
}
|
2431
2414
|
if (dataStreamVersion !== void 0) {
|
2432
|
-
|
2415
|
+
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
2433
2416
|
}
|
2434
|
-
return
|
2417
|
+
return outgoingHeaders;
|
2435
2418
|
}
|
2436
2419
|
|
2437
2420
|
// core/util/write-to-server-response.ts
|
@@ -2712,7 +2695,6 @@ var DefaultStreamObjectResult = class {
|
|
2712
2695
|
generateId: generateId3
|
2713
2696
|
}) {
|
2714
2697
|
this.warnings = warnings;
|
2715
|
-
this.rawResponse = rawResponse;
|
2716
2698
|
this.outputStrategy = outputStrategy;
|
2717
2699
|
this.request = Promise.resolve(request);
|
2718
2700
|
this.objectPromise = new DelayedPromise();
|
@@ -2845,9 +2827,6 @@ var DefaultStreamObjectResult = class {
|
|
2845
2827
|
"ai.response.timestamp": response.timestamp.toISOString(),
|
2846
2828
|
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2847
2829
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2848
|
-
// deprecated
|
2849
|
-
"ai.finishReason": finishReason,
|
2850
|
-
"ai.result.object": { output: () => JSON.stringify(object) },
|
2851
2830
|
// standardized gen-ai llm span attributes:
|
2852
2831
|
"gen_ai.response.finish_reasons": [finishReason],
|
2853
2832
|
"gen_ai.response.id": response.id,
|
@@ -2866,9 +2845,7 @@ var DefaultStreamObjectResult = class {
|
|
2866
2845
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2867
2846
|
"ai.response.object": {
|
2868
2847
|
output: () => JSON.stringify(object)
|
2869
|
-
}
|
2870
|
-
// deprecated
|
2871
|
-
"ai.result.object": { output: () => JSON.stringify(object) }
|
2848
|
+
}
|
2872
2849
|
}
|
2873
2850
|
})
|
2874
2851
|
);
|
@@ -2876,7 +2853,6 @@ var DefaultStreamObjectResult = class {
|
|
2876
2853
|
usage: finalUsage,
|
2877
2854
|
object,
|
2878
2855
|
error,
|
2879
|
-
rawResponse,
|
2880
2856
|
response: {
|
2881
2857
|
...response,
|
2882
2858
|
headers: rawResponse == null ? void 0 : rawResponse.headers
|
@@ -2953,7 +2929,7 @@ var DefaultStreamObjectResult = class {
|
|
2953
2929
|
response,
|
2954
2930
|
status: init == null ? void 0 : init.status,
|
2955
2931
|
statusText: init == null ? void 0 : init.statusText,
|
2956
|
-
headers: prepareOutgoingHttpHeaders(init, {
|
2932
|
+
headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
|
2957
2933
|
contentType: "text/plain; charset=utf-8"
|
2958
2934
|
}),
|
2959
2935
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
@@ -2963,7 +2939,7 @@ var DefaultStreamObjectResult = class {
|
|
2963
2939
|
var _a11;
|
2964
2940
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
2965
2941
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
2966
|
-
headers: prepareResponseHeaders(init, {
|
2942
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2967
2943
|
contentType: "text/plain; charset=utf-8"
|
2968
2944
|
})
|
2969
2945
|
});
|
@@ -3328,14 +3304,6 @@ async function generateText({
|
|
3328
3304
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
3329
3305
|
"ai.usage.promptTokens": result.usage.promptTokens,
|
3330
3306
|
"ai.usage.completionTokens": result.usage.completionTokens,
|
3331
|
-
// deprecated:
|
3332
|
-
"ai.finishReason": result.finishReason,
|
3333
|
-
"ai.result.text": {
|
3334
|
-
output: () => result.text
|
3335
|
-
},
|
3336
|
-
"ai.result.toolCalls": {
|
3337
|
-
output: () => JSON.stringify(result.toolCalls)
|
3338
|
-
},
|
3339
3307
|
// standardized gen-ai llm span attributes:
|
3340
3308
|
"gen_ai.response.finish_reasons": [result.finishReason],
|
3341
3309
|
"gen_ai.response.id": responseData.id,
|
@@ -3438,15 +3406,7 @@ async function generateText({
|
|
3438
3406
|
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3439
3407
|
},
|
3440
3408
|
"ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
|
3441
|
-
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3442
|
-
// deprecated:
|
3443
|
-
"ai.finishReason": currentModelResponse.finishReason,
|
3444
|
-
"ai.result.text": {
|
3445
|
-
output: () => currentModelResponse.text
|
3446
|
-
},
|
3447
|
-
"ai.result.toolCalls": {
|
3448
|
-
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3449
|
-
}
|
3409
|
+
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3450
3410
|
}
|
3451
3411
|
})
|
3452
3412
|
);
|
@@ -3544,9 +3504,6 @@ var DefaultGenerateTextResult = class {
|
|
3544
3504
|
this.responseMessages = options.responseMessages;
|
3545
3505
|
this.steps = options.steps;
|
3546
3506
|
this.experimental_providerMetadata = options.providerMetadata;
|
3547
|
-
this.rawResponse = {
|
3548
|
-
headers: options.response.headers
|
3549
|
-
};
|
3550
3507
|
this.logprobs = options.logprobs;
|
3551
3508
|
}
|
3552
3509
|
};
|
@@ -4203,14 +4160,10 @@ var DefaultStreamTextResult = class {
|
|
4203
4160
|
const msToFirstChunk = now2() - startTimestamp;
|
4204
4161
|
stepFirstChunk = false;
|
4205
4162
|
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
4206
|
-
"ai.response.msToFirstChunk": msToFirstChunk
|
4207
|
-
// deprecated:
|
4208
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4163
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4209
4164
|
});
|
4210
4165
|
doStreamSpan2.setAttributes({
|
4211
|
-
"ai.response.msToFirstChunk": msToFirstChunk
|
4212
|
-
// deprecated:
|
4213
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4166
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4214
4167
|
});
|
4215
4168
|
}
|
4216
4169
|
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
@@ -4335,12 +4288,6 @@ var DefaultStreamTextResult = class {
|
|
4335
4288
|
"ai.response.timestamp": stepResponse.timestamp.toISOString(),
|
4336
4289
|
"ai.usage.promptTokens": stepUsage.promptTokens,
|
4337
4290
|
"ai.usage.completionTokens": stepUsage.completionTokens,
|
4338
|
-
// deprecated
|
4339
|
-
"ai.finishReason": stepFinishReason,
|
4340
|
-
"ai.result.text": { output: () => stepText },
|
4341
|
-
"ai.result.toolCalls": {
|
4342
|
-
output: () => stepToolCallsJson
|
4343
|
-
},
|
4344
4291
|
// standardized gen-ai llm span attributes:
|
4345
4292
|
"gen_ai.response.finish_reasons": [stepFinishReason],
|
4346
4293
|
"gen_ai.response.id": stepResponse.id,
|
@@ -4395,7 +4342,6 @@ var DefaultStreamTextResult = class {
|
|
4395
4342
|
warnings: self.warnings,
|
4396
4343
|
logprobs: stepLogProbs,
|
4397
4344
|
request: stepRequest,
|
4398
|
-
rawResponse: self.rawResponse,
|
4399
4345
|
response: {
|
4400
4346
|
...stepResponse,
|
4401
4347
|
headers: (_a11 = self.rawResponse) == null ? void 0 : _a11.headers,
|
@@ -4456,13 +4402,7 @@ var DefaultStreamTextResult = class {
|
|
4456
4402
|
output: () => stepToolCallsJson
|
4457
4403
|
},
|
4458
4404
|
"ai.usage.promptTokens": combinedUsage.promptTokens,
|
4459
|
-
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4460
|
-
// deprecated
|
4461
|
-
"ai.finishReason": stepFinishReason,
|
4462
|
-
"ai.result.text": { output: () => fullStepText },
|
4463
|
-
"ai.result.toolCalls": {
|
4464
|
-
output: () => stepToolCallsJson
|
4465
|
-
}
|
4405
|
+
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4466
4406
|
}
|
4467
4407
|
})
|
4468
4408
|
);
|
@@ -4492,7 +4432,6 @@ var DefaultStreamTextResult = class {
|
|
4492
4432
|
// The type exposed to the users will be correctly inferred.
|
4493
4433
|
toolResults: stepToolResults,
|
4494
4434
|
request: stepRequest,
|
4495
|
-
rawResponse,
|
4496
4435
|
response: {
|
4497
4436
|
...stepResponse,
|
4498
4437
|
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
@@ -4655,20 +4594,19 @@ var DefaultStreamTextResult = class {
|
|
4655
4594
|
});
|
4656
4595
|
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
|
4657
4596
|
}
|
4658
|
-
pipeDataStreamToResponse(response,
|
4659
|
-
|
4660
|
-
|
4661
|
-
|
4662
|
-
|
4663
|
-
|
4664
|
-
|
4665
|
-
|
4666
|
-
const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
|
4597
|
+
pipeDataStreamToResponse(response, {
|
4598
|
+
status,
|
4599
|
+
statusText,
|
4600
|
+
headers,
|
4601
|
+
data,
|
4602
|
+
getErrorMessage: getErrorMessage3,
|
4603
|
+
sendUsage
|
4604
|
+
} = {}) {
|
4667
4605
|
writeToServerResponse({
|
4668
4606
|
response,
|
4669
|
-
status
|
4670
|
-
statusText
|
4671
|
-
headers: prepareOutgoingHttpHeaders(
|
4607
|
+
status,
|
4608
|
+
statusText,
|
4609
|
+
headers: prepareOutgoingHttpHeaders(headers, {
|
4672
4610
|
contentType: "text/plain; charset=utf-8",
|
4673
4611
|
dataStreamVersion: "v1"
|
4674
4612
|
}),
|
@@ -4680,7 +4618,7 @@ var DefaultStreamTextResult = class {
|
|
4680
4618
|
response,
|
4681
4619
|
status: init == null ? void 0 : init.status,
|
4682
4620
|
statusText: init == null ? void 0 : init.statusText,
|
4683
|
-
headers: prepareOutgoingHttpHeaders(init, {
|
4621
|
+
headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
|
4684
4622
|
contentType: "text/plain; charset=utf-8"
|
4685
4623
|
}),
|
4686
4624
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
@@ -4693,22 +4631,20 @@ var DefaultStreamTextResult = class {
|
|
4693
4631
|
});
|
4694
4632
|
return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
|
4695
4633
|
}
|
4696
|
-
toDataStreamResponse(
|
4697
|
-
|
4698
|
-
|
4699
|
-
|
4700
|
-
|
4701
|
-
|
4702
|
-
|
4703
|
-
|
4704
|
-
const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
4705
|
-
const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
|
4634
|
+
toDataStreamResponse({
|
4635
|
+
headers,
|
4636
|
+
status,
|
4637
|
+
statusText,
|
4638
|
+
data,
|
4639
|
+
getErrorMessage: getErrorMessage3,
|
4640
|
+
sendUsage
|
4641
|
+
} = {}) {
|
4706
4642
|
return new Response(
|
4707
4643
|
this.toDataStream({ data, getErrorMessage: getErrorMessage3, sendUsage }),
|
4708
4644
|
{
|
4709
|
-
status
|
4710
|
-
statusText
|
4711
|
-
headers: prepareResponseHeaders(
|
4645
|
+
status,
|
4646
|
+
statusText,
|
4647
|
+
headers: prepareResponseHeaders(headers, {
|
4712
4648
|
contentType: "text/plain; charset=utf-8",
|
4713
4649
|
dataStreamVersion: "v1"
|
4714
4650
|
})
|
@@ -4719,7 +4655,7 @@ var DefaultStreamTextResult = class {
|
|
4719
4655
|
var _a11;
|
4720
4656
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
4721
4657
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
4722
|
-
headers: prepareResponseHeaders(init, {
|
4658
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
4723
4659
|
contentType: "text/plain; charset=utf-8"
|
4724
4660
|
})
|
4725
4661
|
});
|
@@ -5157,7 +5093,7 @@ function toDataStreamResponse(stream, options) {
|
|
5157
5093
|
return new Response(responseStream, {
|
5158
5094
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
5159
5095
|
statusText: init == null ? void 0 : init.statusText,
|
5160
|
-
headers: prepareResponseHeaders(init, {
|
5096
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5161
5097
|
contentType: "text/plain; charset=utf-8",
|
5162
5098
|
dataStreamVersion: "v1"
|
5163
5099
|
})
|
@@ -5201,7 +5137,7 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5201
5137
|
return new Response(responseStream, {
|
5202
5138
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
5203
5139
|
statusText: init == null ? void 0 : init.statusText,
|
5204
|
-
headers: prepareResponseHeaders(init, {
|
5140
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5205
5141
|
contentType: "text/plain; charset=utf-8",
|
5206
5142
|
dataStreamVersion: "v1"
|
5207
5143
|
})
|
@@ -5218,48 +5154,6 @@ function trimStartOfStream() {
|
|
5218
5154
|
return text;
|
5219
5155
|
};
|
5220
5156
|
}
|
5221
|
-
|
5222
|
-
// streams/stream-to-response.ts
|
5223
|
-
function streamToResponse(res, response, init, data) {
|
5224
|
-
var _a11;
|
5225
|
-
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
5226
|
-
"Content-Type": "text/plain; charset=utf-8",
|
5227
|
-
...init == null ? void 0 : init.headers
|
5228
|
-
});
|
5229
|
-
let processedStream = res;
|
5230
|
-
if (data) {
|
5231
|
-
processedStream = mergeStreams(data.stream, res);
|
5232
|
-
}
|
5233
|
-
const reader = processedStream.getReader();
|
5234
|
-
function read() {
|
5235
|
-
reader.read().then(({ done, value }) => {
|
5236
|
-
if (done) {
|
5237
|
-
response.end();
|
5238
|
-
return;
|
5239
|
-
}
|
5240
|
-
response.write(value);
|
5241
|
-
read();
|
5242
|
-
});
|
5243
|
-
}
|
5244
|
-
read();
|
5245
|
-
}
|
5246
|
-
|
5247
|
-
// streams/streaming-text-response.ts
|
5248
|
-
var StreamingTextResponse = class extends Response {
|
5249
|
-
constructor(res, init, data) {
|
5250
|
-
let processedStream = res;
|
5251
|
-
if (data) {
|
5252
|
-
processedStream = mergeStreams(data.stream, res);
|
5253
|
-
}
|
5254
|
-
super(processedStream, {
|
5255
|
-
...init,
|
5256
|
-
status: 200,
|
5257
|
-
headers: prepareResponseHeaders(init, {
|
5258
|
-
contentType: "text/plain; charset=utf-8"
|
5259
|
-
})
|
5260
|
-
});
|
5261
|
-
}
|
5262
|
-
};
|
5263
5157
|
// Annotate the CommonJS export names for ESM import in node:
|
5264
5158
|
0 && (module.exports = {
|
5265
5159
|
AISDKError,
|
@@ -5285,7 +5179,6 @@ var StreamingTextResponse = class extends Response {
|
|
5285
5179
|
NoSuchToolError,
|
5286
5180
|
RetryError,
|
5287
5181
|
StreamData,
|
5288
|
-
StreamingTextResponse,
|
5289
5182
|
TypeValidationError,
|
5290
5183
|
UnsupportedFunctionalityError,
|
5291
5184
|
convertToCoreMessages,
|
@@ -5309,7 +5202,6 @@ var StreamingTextResponse = class extends Response {
|
|
5309
5202
|
readDataStream,
|
5310
5203
|
streamObject,
|
5311
5204
|
streamText,
|
5312
|
-
streamToResponse,
|
5313
5205
|
tool
|
5314
5206
|
});
|
5315
5207
|
//# sourceMappingURL=index.js.map
|