ai 4.0.0-canary.6 → 4.0.0-canary.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -0
- package/dist/index.d.mts +37 -161
- package/dist/index.d.ts +37 -161
- package/dist/index.js +18 -40
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +18 -37
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
- package/rsc/dist/index.d.ts +18 -18
- package/rsc/dist/rsc-server.d.mts +18 -18
- package/rsc/dist/rsc-server.mjs +7 -4
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -1490,11 +1490,14 @@ function standardizePrompt({
|
|
1490
1490
|
}
|
1491
1491
|
|
1492
1492
|
// core/types/usage.ts
|
1493
|
-
function calculateLanguageModelUsage(
|
1493
|
+
function calculateLanguageModelUsage({
|
1494
|
+
promptTokens,
|
1495
|
+
completionTokens
|
1496
|
+
}) {
|
1494
1497
|
return {
|
1495
|
-
promptTokens
|
1496
|
-
completionTokens
|
1497
|
-
totalTokens:
|
1498
|
+
promptTokens,
|
1499
|
+
completionTokens,
|
1500
|
+
totalTokens: promptTokens + completionTokens
|
1498
1501
|
};
|
1499
1502
|
}
|
1500
1503
|
|
@@ -3115,8 +3118,7 @@ async function generateText({
|
|
3115
3118
|
abortSignal,
|
3116
3119
|
headers,
|
3117
3120
|
maxSteps = 1,
|
3118
|
-
|
3119
|
-
experimental_continueSteps: continueSteps = experimental_continuationSteps != null ? experimental_continuationSteps : false,
|
3121
|
+
experimental_continueSteps: continueSteps = false,
|
3120
3122
|
experimental_telemetry: telemetry,
|
3121
3123
|
experimental_providerMetadata: providerMetadata,
|
3122
3124
|
experimental_activeTools: activeTools,
|
@@ -3388,7 +3390,6 @@ async function generateText({
|
|
3388
3390
|
messages: responseMessages
|
3389
3391
|
},
|
3390
3392
|
logprobs: currentModelResponse.logprobs,
|
3391
|
-
responseMessages,
|
3392
3393
|
steps,
|
3393
3394
|
providerMetadata: currentModelResponse.providerMetadata
|
3394
3395
|
});
|
@@ -3465,7 +3466,6 @@ var DefaultGenerateTextResult = class {
|
|
3465
3466
|
this.warnings = options.warnings;
|
3466
3467
|
this.request = options.request;
|
3467
3468
|
this.response = options.response;
|
3468
|
-
this.responseMessages = options.responseMessages;
|
3469
3469
|
this.steps = options.steps;
|
3470
3470
|
this.experimental_providerMetadata = options.providerMetadata;
|
3471
3471
|
this.logprobs = options.logprobs;
|
@@ -4030,7 +4030,7 @@ var DefaultStreamTextResult = class {
|
|
4030
4030
|
generateId: generateId3,
|
4031
4031
|
tools
|
4032
4032
|
}) {
|
4033
|
-
this.
|
4033
|
+
this.rawWarnings = warnings;
|
4034
4034
|
this.rawResponse = rawResponse;
|
4035
4035
|
const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
|
4036
4036
|
this.usage = usagePromise;
|
@@ -4053,11 +4053,8 @@ var DefaultStreamTextResult = class {
|
|
4053
4053
|
this.request = requestPromise;
|
4054
4054
|
const { resolve: resolveResponse, promise: responsePromise } = createResolvablePromise();
|
4055
4055
|
this.response = responsePromise;
|
4056
|
-
const {
|
4057
|
-
|
4058
|
-
promise: responseMessagesPromise
|
4059
|
-
} = createResolvablePromise();
|
4060
|
-
this.responseMessages = responseMessagesPromise;
|
4056
|
+
const { resolve: resolveWarnings, promise: warningsPromise } = createResolvablePromise();
|
4057
|
+
this.warnings = warningsPromise;
|
4061
4058
|
const {
|
4062
4059
|
stream: stitchableStream,
|
4063
4060
|
addStream,
|
@@ -4211,7 +4208,7 @@ var DefaultStreamTextResult = class {
|
|
4211
4208
|
},
|
4212
4209
|
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
4213
4210
|
async flush(controller) {
|
4214
|
-
var _a11;
|
4211
|
+
var _a11, _b;
|
4215
4212
|
const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
|
4216
4213
|
let nextStepType = "done";
|
4217
4214
|
if (currentStep + 1 < maxSteps) {
|
@@ -4303,7 +4300,7 @@ var DefaultStreamTextResult = class {
|
|
4303
4300
|
toolResults: stepToolResults,
|
4304
4301
|
finishReason: stepFinishReason,
|
4305
4302
|
usage: stepUsage,
|
4306
|
-
warnings: self.
|
4303
|
+
warnings: self.rawWarnings,
|
4307
4304
|
logprobs: stepLogProbs,
|
4308
4305
|
request: stepRequest,
|
4309
4306
|
response: {
|
@@ -4328,7 +4325,7 @@ var DefaultStreamTextResult = class {
|
|
4328
4325
|
doStreamSpan: doStreamSpan3,
|
4329
4326
|
startTimestampMs: startTimestamp2
|
4330
4327
|
} = await startStep({ responseMessages });
|
4331
|
-
self.
|
4328
|
+
self.rawWarnings = result.warnings;
|
4332
4329
|
self.rawResponse = result.rawResponse;
|
4333
4330
|
addStepStream({
|
4334
4331
|
stream: result.stream,
|
@@ -4383,7 +4380,7 @@ var DefaultStreamTextResult = class {
|
|
4383
4380
|
messages: responseMessages
|
4384
4381
|
});
|
4385
4382
|
resolveSteps(stepResults);
|
4386
|
-
|
4383
|
+
resolveWarnings((_b = self.rawWarnings) != null ? _b : []);
|
4387
4384
|
await (onFinish == null ? void 0 : onFinish({
|
4388
4385
|
finishReason: stepFinishReason,
|
4389
4386
|
logprobs: stepLogProbs,
|
@@ -4723,15 +4720,11 @@ function experimental_createProviderRegistry(providers) {
|
|
4723
4720
|
}
|
4724
4721
|
return registry;
|
4725
4722
|
}
|
4726
|
-
var experimental_createModelRegistry = experimental_createProviderRegistry;
|
4727
4723
|
var DefaultProviderRegistry = class {
|
4728
4724
|
constructor() {
|
4729
4725
|
this.providers = {};
|
4730
4726
|
}
|
4731
|
-
registerProvider({
|
4732
|
-
id,
|
4733
|
-
provider
|
4734
|
-
}) {
|
4727
|
+
registerProvider({ id, provider }) {
|
4735
4728
|
this.providers[id] = provider;
|
4736
4729
|
}
|
4737
4730
|
getProvider(id) {
|
@@ -4767,10 +4760,10 @@ var DefaultProviderRegistry = class {
|
|
4767
4760
|
return model;
|
4768
4761
|
}
|
4769
4762
|
textEmbeddingModel(id) {
|
4770
|
-
var _a11
|
4763
|
+
var _a11;
|
4771
4764
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
4772
4765
|
const provider = this.getProvider(providerId);
|
4773
|
-
const model = (
|
4766
|
+
const model = (_a11 = provider.textEmbeddingModel) == null ? void 0 : _a11.call(provider, modelId);
|
4774
4767
|
if (model == null) {
|
4775
4768
|
throw new NoSuchModelError4({
|
4776
4769
|
modelId: id,
|
@@ -4882,8 +4875,6 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4882
4875
|
);
|
4883
4876
|
try {
|
4884
4877
|
await process2({
|
4885
|
-
threadId,
|
4886
|
-
messageId,
|
4887
4878
|
sendMessage,
|
4888
4879
|
sendDataMessage,
|
4889
4880
|
forwardStream
|
@@ -4906,12 +4897,10 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4906
4897
|
}
|
4907
4898
|
});
|
4908
4899
|
}
|
4909
|
-
var experimental_AssistantResponse = AssistantResponse;
|
4910
4900
|
|
4911
4901
|
// streams/langchain-adapter.ts
|
4912
4902
|
var langchain_adapter_exports = {};
|
4913
4903
|
__export(langchain_adapter_exports, {
|
4914
|
-
toAIStream: () => toAIStream,
|
4915
4904
|
toDataStream: () => toDataStream,
|
4916
4905
|
toDataStreamResponse: () => toDataStreamResponse
|
4917
4906
|
});
|
@@ -5020,13 +5009,8 @@ function createStreamDataTransformer() {
|
|
5020
5009
|
}
|
5021
5010
|
});
|
5022
5011
|
}
|
5023
|
-
var experimental_StreamData = class extends StreamData {
|
5024
|
-
};
|
5025
5012
|
|
5026
5013
|
// streams/langchain-adapter.ts
|
5027
|
-
function toAIStream(stream, callbacks) {
|
5028
|
-
return toDataStream(stream, callbacks);
|
5029
|
-
}
|
5030
5014
|
function toDataStream(stream, callbacks) {
|
5031
5015
|
return stream.pipeThrough(
|
5032
5016
|
new TransformStream({
|
@@ -5151,9 +5135,6 @@ export {
|
|
5151
5135
|
createStreamDataTransformer,
|
5152
5136
|
embed,
|
5153
5137
|
embedMany,
|
5154
|
-
experimental_AssistantResponse,
|
5155
|
-
experimental_StreamData,
|
5156
|
-
experimental_createModelRegistry,
|
5157
5138
|
experimental_createProviderRegistry,
|
5158
5139
|
experimental_customProvider,
|
5159
5140
|
experimental_wrapLanguageModel,
|