ai 4.0.0-canary.2 → 4.0.0-canary.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/index.d.mts +46 -282
- package/dist/index.d.ts +46 -282
- package/dist/index.js +82 -231
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +56 -198
- package/dist/index.mjs.map +1 -1
- package/package.json +6 -9
- package/react/dist/index.d.ts +0 -17
- package/rsc/dist/index.d.ts +1 -1
- package/rsc/dist/rsc-server.d.mts +1 -1
- package/rsc/dist/rsc-server.mjs +1 -9
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/react/dist/index.server.d.mts +0 -17
- package/react/dist/index.server.d.ts +0 -17
- package/react/dist/index.server.js +0 -50
- package/react/dist/index.server.js.map +0 -1
- package/react/dist/index.server.mjs +0 -23
- package/react/dist/index.server.mjs.map +0 -1
package/dist/index.mjs
CHANGED
@@ -6,12 +6,12 @@ var __export = (target, all) => {
|
|
6
6
|
|
7
7
|
// streams/index.ts
|
8
8
|
import {
|
9
|
-
formatStreamPart,
|
9
|
+
formatStreamPart as formatStreamPart4,
|
10
10
|
parseStreamPart,
|
11
11
|
readDataStream,
|
12
12
|
processDataProtocolResponse
|
13
13
|
} from "@ai-sdk/ui-utils";
|
14
|
-
import { generateId as
|
14
|
+
import { generateId as generateId2 } from "@ai-sdk/provider-utils";
|
15
15
|
|
16
16
|
// core/index.ts
|
17
17
|
import { jsonSchema } from "@ai-sdk/ui-utils";
|
@@ -1402,9 +1402,7 @@ function convertToCoreMessages(messages, options) {
|
|
1402
1402
|
});
|
1403
1403
|
break;
|
1404
1404
|
}
|
1405
|
-
case "
|
1406
|
-
case "data":
|
1407
|
-
case "tool": {
|
1405
|
+
case "data": {
|
1408
1406
|
break;
|
1409
1407
|
}
|
1410
1408
|
default: {
|
@@ -2299,7 +2297,6 @@ var DefaultGenerateObjectResult = class {
|
|
2299
2297
|
});
|
2300
2298
|
}
|
2301
2299
|
};
|
2302
|
-
var experimental_generateObject = generateObject;
|
2303
2300
|
|
2304
2301
|
// core/generate-object/stream-object.ts
|
2305
2302
|
import { createIdGenerator as createIdGenerator2 } from "@ai-sdk/provider-utils";
|
@@ -2922,7 +2919,6 @@ var DefaultStreamObjectResult = class {
|
|
2922
2919
|
});
|
2923
2920
|
}
|
2924
2921
|
};
|
2925
|
-
var experimental_streamObject = streamObject;
|
2926
2922
|
|
2927
2923
|
// core/generate-text/generate-text.ts
|
2928
2924
|
import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils";
|
@@ -2965,12 +2961,6 @@ var InvalidToolArgumentsError = class extends AISDKError8 {
|
|
2965
2961
|
static isInstance(error) {
|
2966
2962
|
return AISDKError8.hasMarker(error, marker8);
|
2967
2963
|
}
|
2968
|
-
/**
|
2969
|
-
* @deprecated use `isInstance` instead
|
2970
|
-
*/
|
2971
|
-
static isInvalidToolArgumentsError(error) {
|
2972
|
-
return error instanceof Error && error.name === name8 && typeof error.toolName === "string" && typeof error.toolArgs === "string";
|
2973
|
-
}
|
2974
2964
|
};
|
2975
2965
|
_a8 = symbol8;
|
2976
2966
|
|
@@ -3146,9 +3136,7 @@ async function generateText({
|
|
3146
3136
|
maxRetries,
|
3147
3137
|
abortSignal,
|
3148
3138
|
headers,
|
3149
|
-
|
3150
|
-
maxToolRoundtrips = maxAutomaticRoundtrips,
|
3151
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3139
|
+
maxSteps = 1,
|
3152
3140
|
experimental_continuationSteps,
|
3153
3141
|
experimental_continueSteps: continueSteps = experimental_continuationSteps != null ? experimental_continuationSteps : false,
|
3154
3142
|
experimental_telemetry: telemetry,
|
@@ -3516,7 +3504,6 @@ var DefaultGenerateTextResult = class {
|
|
3516
3504
|
this.request = options.request;
|
3517
3505
|
this.response = options.response;
|
3518
3506
|
this.responseMessages = options.responseMessages;
|
3519
|
-
this.roundtrips = options.steps;
|
3520
3507
|
this.steps = options.steps;
|
3521
3508
|
this.experimental_providerMetadata = options.providerMetadata;
|
3522
3509
|
this.rawResponse = {
|
@@ -3525,10 +3512,10 @@ var DefaultGenerateTextResult = class {
|
|
3525
3512
|
this.logprobs = options.logprobs;
|
3526
3513
|
}
|
3527
3514
|
};
|
3528
|
-
var experimental_generateText = generateText;
|
3529
3515
|
|
3530
3516
|
// core/generate-text/stream-text.ts
|
3531
3517
|
import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
|
3518
|
+
import { formatStreamPart } from "@ai-sdk/ui-utils";
|
3532
3519
|
|
3533
3520
|
// core/util/create-stitchable-stream.ts
|
3534
3521
|
function createStitchableStream() {
|
@@ -3885,8 +3872,7 @@ async function streamText({
|
|
3885
3872
|
maxRetries,
|
3886
3873
|
abortSignal,
|
3887
3874
|
headers,
|
3888
|
-
|
3889
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3875
|
+
maxSteps = 1,
|
3890
3876
|
experimental_continueSteps: continueSteps = false,
|
3891
3877
|
experimental_telemetry: telemetry,
|
3892
3878
|
experimental_providerMetadata: providerMetadata,
|
@@ -4532,37 +4518,18 @@ var DefaultStreamTextResult = class {
|
|
4532
4518
|
}
|
4533
4519
|
});
|
4534
4520
|
}
|
4535
|
-
toAIStream(callbacks = {}) {
|
4536
|
-
return this.toDataStreamInternal({ callbacks });
|
4537
|
-
}
|
4538
4521
|
toDataStreamInternal({
|
4539
|
-
callbacks = {},
|
4540
4522
|
getErrorMessage: getErrorMessage3 = () => "",
|
4541
4523
|
// mask error messages for safety by default
|
4542
4524
|
sendUsage = true
|
4543
4525
|
} = {}) {
|
4544
4526
|
let aggregatedResponse = "";
|
4545
4527
|
const callbackTransformer = new TransformStream({
|
4546
|
-
async start() {
|
4547
|
-
if (callbacks.onStart)
|
4548
|
-
await callbacks.onStart();
|
4549
|
-
},
|
4550
4528
|
async transform(chunk, controller) {
|
4551
4529
|
controller.enqueue(chunk);
|
4552
4530
|
if (chunk.type === "text-delta") {
|
4553
|
-
|
4554
|
-
aggregatedResponse += textDelta;
|
4555
|
-
if (callbacks.onToken)
|
4556
|
-
await callbacks.onToken(textDelta);
|
4557
|
-
if (callbacks.onText)
|
4558
|
-
await callbacks.onText(textDelta);
|
4531
|
+
aggregatedResponse += chunk.textDelta;
|
4559
4532
|
}
|
4560
|
-
},
|
4561
|
-
async flush() {
|
4562
|
-
if (callbacks.onCompletion)
|
4563
|
-
await callbacks.onCompletion(aggregatedResponse);
|
4564
|
-
if (callbacks.onFinal)
|
4565
|
-
await callbacks.onFinal(aggregatedResponse);
|
4566
4533
|
}
|
4567
4534
|
});
|
4568
4535
|
const streamPartsTransformer = new TransformStream({
|
@@ -4650,9 +4617,6 @@ var DefaultStreamTextResult = class {
|
|
4650
4617
|
});
|
4651
4618
|
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
|
4652
4619
|
}
|
4653
|
-
pipeAIStreamToResponse(response, init) {
|
4654
|
-
return this.pipeDataStreamToResponse(response, init);
|
4655
|
-
}
|
4656
4620
|
pipeDataStreamToResponse(response, options) {
|
4657
4621
|
const init = options == null ? void 0 : "init" in options ? options.init : {
|
4658
4622
|
headers: "headers" in options ? options.headers : void 0,
|
@@ -4684,9 +4648,6 @@ var DefaultStreamTextResult = class {
|
|
4684
4648
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
4685
4649
|
});
|
4686
4650
|
}
|
4687
|
-
toAIStreamResponse(options) {
|
4688
|
-
return this.toDataStreamResponse(options);
|
4689
|
-
}
|
4690
4651
|
toDataStream(options) {
|
4691
4652
|
const stream = this.toDataStreamInternal({
|
4692
4653
|
getErrorMessage: options == null ? void 0 : options.getErrorMessage,
|
@@ -4726,7 +4687,6 @@ var DefaultStreamTextResult = class {
|
|
4726
4687
|
});
|
4727
4688
|
}
|
4728
4689
|
};
|
4729
|
-
var experimental_streamText = streamText;
|
4730
4690
|
|
4731
4691
|
// core/middleware/wrap-language-model.ts
|
4732
4692
|
var experimental_wrapLanguageModel = ({
|
@@ -4913,123 +4873,6 @@ function magnitude(vector) {
|
|
4913
4873
|
return Math.sqrt(dotProduct(vector, vector));
|
4914
4874
|
}
|
4915
4875
|
|
4916
|
-
// streams/ai-stream.ts
|
4917
|
-
import {
|
4918
|
-
createParser
|
4919
|
-
} from "eventsource-parser";
|
4920
|
-
function createEventStreamTransformer(customParser) {
|
4921
|
-
const textDecoder = new TextDecoder();
|
4922
|
-
let eventSourceParser;
|
4923
|
-
return new TransformStream({
|
4924
|
-
async start(controller) {
|
4925
|
-
eventSourceParser = createParser(
|
4926
|
-
(event) => {
|
4927
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
4928
|
-
// @see https://replicate.com/docs/streaming
|
4929
|
-
event.event === "done") {
|
4930
|
-
controller.terminate();
|
4931
|
-
return;
|
4932
|
-
}
|
4933
|
-
if ("data" in event) {
|
4934
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
4935
|
-
event: event.event
|
4936
|
-
}) : event.data;
|
4937
|
-
if (parsedMessage)
|
4938
|
-
controller.enqueue(parsedMessage);
|
4939
|
-
}
|
4940
|
-
}
|
4941
|
-
);
|
4942
|
-
},
|
4943
|
-
transform(chunk) {
|
4944
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
4945
|
-
}
|
4946
|
-
});
|
4947
|
-
}
|
4948
|
-
function createCallbacksTransformer(cb) {
|
4949
|
-
const textEncoder = new TextEncoder();
|
4950
|
-
let aggregatedResponse = "";
|
4951
|
-
const callbacks = cb || {};
|
4952
|
-
return new TransformStream({
|
4953
|
-
async start() {
|
4954
|
-
if (callbacks.onStart)
|
4955
|
-
await callbacks.onStart();
|
4956
|
-
},
|
4957
|
-
async transform(message, controller) {
|
4958
|
-
const content = typeof message === "string" ? message : message.content;
|
4959
|
-
controller.enqueue(textEncoder.encode(content));
|
4960
|
-
aggregatedResponse += content;
|
4961
|
-
if (callbacks.onToken)
|
4962
|
-
await callbacks.onToken(content);
|
4963
|
-
if (callbacks.onText && typeof message === "string") {
|
4964
|
-
await callbacks.onText(message);
|
4965
|
-
}
|
4966
|
-
},
|
4967
|
-
async flush() {
|
4968
|
-
if (callbacks.onCompletion) {
|
4969
|
-
await callbacks.onCompletion(aggregatedResponse);
|
4970
|
-
}
|
4971
|
-
}
|
4972
|
-
});
|
4973
|
-
}
|
4974
|
-
function trimStartOfStreamHelper() {
|
4975
|
-
let isStreamStart = true;
|
4976
|
-
return (text) => {
|
4977
|
-
if (isStreamStart) {
|
4978
|
-
text = text.trimStart();
|
4979
|
-
if (text)
|
4980
|
-
isStreamStart = false;
|
4981
|
-
}
|
4982
|
-
return text;
|
4983
|
-
};
|
4984
|
-
}
|
4985
|
-
function AIStream(response, customParser, callbacks) {
|
4986
|
-
if (!response.ok) {
|
4987
|
-
if (response.body) {
|
4988
|
-
const reader = response.body.getReader();
|
4989
|
-
return new ReadableStream({
|
4990
|
-
async start(controller) {
|
4991
|
-
const { done, value } = await reader.read();
|
4992
|
-
if (!done) {
|
4993
|
-
const errorText = new TextDecoder().decode(value);
|
4994
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
4995
|
-
}
|
4996
|
-
}
|
4997
|
-
});
|
4998
|
-
} else {
|
4999
|
-
return new ReadableStream({
|
5000
|
-
start(controller) {
|
5001
|
-
controller.error(new Error("Response error: No response body"));
|
5002
|
-
}
|
5003
|
-
});
|
5004
|
-
}
|
5005
|
-
}
|
5006
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
5007
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
5008
|
-
}
|
5009
|
-
function createEmptyReadableStream() {
|
5010
|
-
return new ReadableStream({
|
5011
|
-
start(controller) {
|
5012
|
-
controller.close();
|
5013
|
-
}
|
5014
|
-
});
|
5015
|
-
}
|
5016
|
-
function readableFromAsyncIterable(iterable) {
|
5017
|
-
let it = iterable[Symbol.asyncIterator]();
|
5018
|
-
return new ReadableStream({
|
5019
|
-
async pull(controller) {
|
5020
|
-
const { done, value } = await it.next();
|
5021
|
-
if (done)
|
5022
|
-
controller.close();
|
5023
|
-
else
|
5024
|
-
controller.enqueue(value);
|
5025
|
-
},
|
5026
|
-
async cancel(reason) {
|
5027
|
-
var _a11;
|
5028
|
-
await ((_a11 = it.return) == null ? void 0 : _a11.call(it, reason));
|
5029
|
-
}
|
5030
|
-
});
|
5031
|
-
}
|
5032
|
-
|
5033
4876
|
// streams/assistant-response.ts
|
5034
4877
|
import {
|
5035
4878
|
formatStreamPart as formatStreamPart2
|
@@ -5135,6 +4978,32 @@ __export(langchain_adapter_exports, {
|
|
5135
4978
|
toDataStreamResponse: () => toDataStreamResponse
|
5136
4979
|
});
|
5137
4980
|
|
4981
|
+
// streams/stream-callbacks.ts
|
4982
|
+
function createCallbacksTransformer(callbacks = {}) {
|
4983
|
+
const textEncoder = new TextEncoder();
|
4984
|
+
let aggregatedResponse = "";
|
4985
|
+
return new TransformStream({
|
4986
|
+
async start() {
|
4987
|
+
if (callbacks.onStart)
|
4988
|
+
await callbacks.onStart();
|
4989
|
+
},
|
4990
|
+
async transform(message, controller) {
|
4991
|
+
controller.enqueue(textEncoder.encode(message));
|
4992
|
+
aggregatedResponse += message;
|
4993
|
+
if (callbacks.onToken)
|
4994
|
+
await callbacks.onToken(message);
|
4995
|
+
if (callbacks.onText && typeof message === "string") {
|
4996
|
+
await callbacks.onText(message);
|
4997
|
+
}
|
4998
|
+
},
|
4999
|
+
async flush() {
|
5000
|
+
if (callbacks.onCompletion) {
|
5001
|
+
await callbacks.onCompletion(aggregatedResponse);
|
5002
|
+
}
|
5003
|
+
}
|
5004
|
+
});
|
5005
|
+
}
|
5006
|
+
|
5138
5007
|
// streams/stream-data.ts
|
5139
5008
|
import { formatStreamPart as formatStreamPart3 } from "@ai-sdk/ui-utils";
|
5140
5009
|
|
@@ -5142,7 +5011,7 @@ import { formatStreamPart as formatStreamPart3 } from "@ai-sdk/ui-utils";
|
|
5142
5011
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
5143
5012
|
|
5144
5013
|
// streams/stream-data.ts
|
5145
|
-
var
|
5014
|
+
var StreamData = class {
|
5146
5015
|
constructor() {
|
5147
5016
|
this.encoder = new TextEncoder();
|
5148
5017
|
this.controller = null;
|
@@ -5213,7 +5082,7 @@ function createStreamDataTransformer() {
|
|
5213
5082
|
}
|
5214
5083
|
});
|
5215
5084
|
}
|
5216
|
-
var experimental_StreamData = class extends
|
5085
|
+
var experimental_StreamData = class extends StreamData {
|
5217
5086
|
};
|
5218
5087
|
|
5219
5088
|
// streams/langchain-adapter.ts
|
@@ -5277,8 +5146,16 @@ __export(llamaindex_adapter_exports, {
|
|
5277
5146
|
toDataStream: () => toDataStream2,
|
5278
5147
|
toDataStreamResponse: () => toDataStreamResponse2
|
5279
5148
|
});
|
5149
|
+
import { convertAsyncIteratorToReadableStream } from "@ai-sdk/provider-utils";
|
5280
5150
|
function toDataStream2(stream, callbacks) {
|
5281
|
-
|
5151
|
+
const trimStart = trimStartOfStream();
|
5152
|
+
return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(
|
5153
|
+
new TransformStream({
|
5154
|
+
async transform(message, controller) {
|
5155
|
+
controller.enqueue(trimStart(message.delta));
|
5156
|
+
}
|
5157
|
+
})
|
5158
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
5282
5159
|
}
|
5283
5160
|
function toDataStreamResponse2(stream, options = {}) {
|
5284
5161
|
var _a11;
|
@@ -5294,23 +5171,16 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5294
5171
|
})
|
5295
5172
|
});
|
5296
5173
|
}
|
5297
|
-
function
|
5298
|
-
|
5299
|
-
|
5300
|
-
|
5301
|
-
|
5302
|
-
|
5303
|
-
|
5304
|
-
if (done) {
|
5305
|
-
controller.close();
|
5306
|
-
return;
|
5307
|
-
}
|
5308
|
-
const text = trimStartOfStream((_a11 = value.delta) != null ? _a11 : "");
|
5309
|
-
if (text) {
|
5310
|
-
controller.enqueue(text);
|
5311
|
-
}
|
5174
|
+
function trimStartOfStream() {
|
5175
|
+
let isStreamStart = true;
|
5176
|
+
return (text) => {
|
5177
|
+
if (isStreamStart) {
|
5178
|
+
text = text.trimStart();
|
5179
|
+
if (text)
|
5180
|
+
isStreamStart = false;
|
5312
5181
|
}
|
5313
|
-
|
5182
|
+
return text;
|
5183
|
+
};
|
5314
5184
|
}
|
5315
5185
|
|
5316
5186
|
// streams/stream-to-response.ts
|
@@ -5354,12 +5224,8 @@ var StreamingTextResponse = class extends Response {
|
|
5354
5224
|
});
|
5355
5225
|
}
|
5356
5226
|
};
|
5357
|
-
|
5358
|
-
// streams/index.ts
|
5359
|
-
var generateId2 = generateIdImpl;
|
5360
5227
|
export {
|
5361
5228
|
AISDKError10 as AISDKError,
|
5362
|
-
AIStream,
|
5363
5229
|
APICallError2 as APICallError,
|
5364
5230
|
AssistantResponse,
|
5365
5231
|
DownloadError,
|
@@ -5381,14 +5247,12 @@ export {
|
|
5381
5247
|
NoSuchProviderError,
|
5382
5248
|
NoSuchToolError,
|
5383
5249
|
RetryError,
|
5384
|
-
|
5250
|
+
StreamData,
|
5385
5251
|
StreamingTextResponse,
|
5386
5252
|
TypeValidationError2 as TypeValidationError,
|
5387
5253
|
UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
|
5388
5254
|
convertToCoreMessages,
|
5389
5255
|
cosineSimilarity,
|
5390
|
-
createCallbacksTransformer,
|
5391
|
-
createEventStreamTransformer,
|
5392
5256
|
createStreamDataTransformer,
|
5393
5257
|
embed,
|
5394
5258
|
embedMany,
|
@@ -5397,12 +5261,8 @@ export {
|
|
5397
5261
|
experimental_createModelRegistry,
|
5398
5262
|
experimental_createProviderRegistry,
|
5399
5263
|
experimental_customProvider,
|
5400
|
-
experimental_generateObject,
|
5401
|
-
experimental_generateText,
|
5402
|
-
experimental_streamObject,
|
5403
|
-
experimental_streamText,
|
5404
5264
|
experimental_wrapLanguageModel,
|
5405
|
-
formatStreamPart,
|
5265
|
+
formatStreamPart4 as formatStreamPart,
|
5406
5266
|
generateId2 as generateId,
|
5407
5267
|
generateObject,
|
5408
5268
|
generateText,
|
@@ -5410,11 +5270,9 @@ export {
|
|
5410
5270
|
parseStreamPart,
|
5411
5271
|
processDataProtocolResponse,
|
5412
5272
|
readDataStream,
|
5413
|
-
readableFromAsyncIterable,
|
5414
5273
|
streamObject,
|
5415
5274
|
streamText,
|
5416
5275
|
streamToResponse,
|
5417
|
-
tool
|
5418
|
-
trimStartOfStreamHelper
|
5276
|
+
tool
|
5419
5277
|
};
|
5420
5278
|
//# sourceMappingURL=index.mjs.map
|