ai 4.1.27 → 4.1.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +25 -2
- package/dist/index.d.ts +25 -2
- package/dist/index.js +34 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +34 -7
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -3832,7 +3832,7 @@ async function generateText({
|
|
3832
3832
|
}),
|
3833
3833
|
tracer,
|
3834
3834
|
fn: async (span) => {
|
3835
|
-
var _a16, _b, _c, _d, _e, _f, _g;
|
3835
|
+
var _a16, _b, _c, _d, _e, _f, _g, _h, _i;
|
3836
3836
|
const mode = {
|
3837
3837
|
type: "regular",
|
3838
3838
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3844,6 +3844,7 @@ async function generateText({
|
|
3844
3844
|
let stepCount = 0;
|
3845
3845
|
const responseMessages = [];
|
3846
3846
|
let text2 = "";
|
3847
|
+
const sources = [];
|
3847
3848
|
const steps = [];
|
3848
3849
|
let usage = {
|
3849
3850
|
completionTokens: 0,
|
@@ -3992,6 +3993,7 @@ async function generateText({
|
|
3992
3993
|
text2.trimEnd() !== text2 ? originalText.trimStart() : originalText;
|
3993
3994
|
const stepText = nextStepType === "continue" ? removeTextAfterLastWhitespace(stepTextLeadingWhitespaceTrimmed) : stepTextLeadingWhitespaceTrimmed;
|
3994
3995
|
text2 = nextStepType === "continue" || stepType === "continue" ? text2 + stepText : stepText;
|
3996
|
+
sources.push(...(_d = currentModelResponse.sources) != null ? _d : []);
|
3995
3997
|
if (stepType === "continue") {
|
3996
3998
|
const lastMessage = responseMessages[responseMessages.length - 1];
|
3997
3999
|
if (typeof lastMessage.content === "string") {
|
@@ -4018,16 +4020,17 @@ async function generateText({
|
|
4018
4020
|
stepType,
|
4019
4021
|
text: stepText,
|
4020
4022
|
reasoning: currentModelResponse.reasoning,
|
4023
|
+
sources: (_e = currentModelResponse.sources) != null ? _e : [],
|
4021
4024
|
toolCalls: currentToolCalls,
|
4022
4025
|
toolResults: currentToolResults,
|
4023
4026
|
finishReason: currentModelResponse.finishReason,
|
4024
4027
|
usage: currentUsage,
|
4025
4028
|
warnings: currentModelResponse.warnings,
|
4026
4029
|
logprobs: currentModelResponse.logprobs,
|
4027
|
-
request: (
|
4030
|
+
request: (_f = currentModelResponse.request) != null ? _f : {},
|
4028
4031
|
response: {
|
4029
4032
|
...currentModelResponse.response,
|
4030
|
-
headers: (
|
4033
|
+
headers: (_g = currentModelResponse.rawResponse) == null ? void 0 : _g.headers,
|
4031
4034
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
4032
4035
|
messages: structuredClone(responseMessages)
|
4033
4036
|
},
|
@@ -4057,6 +4060,7 @@ async function generateText({
|
|
4057
4060
|
return new DefaultGenerateTextResult({
|
4058
4061
|
text: text2,
|
4059
4062
|
reasoning: currentModelResponse.reasoning,
|
4063
|
+
sources,
|
4060
4064
|
outputResolver: () => {
|
4061
4065
|
if (output == null) {
|
4062
4066
|
throw new NoOutputSpecifiedError();
|
@@ -4071,10 +4075,10 @@ async function generateText({
|
|
4071
4075
|
finishReason: currentModelResponse.finishReason,
|
4072
4076
|
usage,
|
4073
4077
|
warnings: currentModelResponse.warnings,
|
4074
|
-
request: (
|
4078
|
+
request: (_h = currentModelResponse.request) != null ? _h : {},
|
4075
4079
|
response: {
|
4076
4080
|
...currentModelResponse.response,
|
4077
|
-
headers: (
|
4081
|
+
headers: (_i = currentModelResponse.rawResponse) == null ? void 0 : _i.headers,
|
4078
4082
|
messages: responseMessages
|
4079
4083
|
},
|
4080
4084
|
logprobs: currentModelResponse.logprobs,
|
@@ -4174,6 +4178,7 @@ var DefaultGenerateTextResult = class {
|
|
4174
4178
|
this.experimental_providerMetadata = options.providerMetadata;
|
4175
4179
|
this.logprobs = options.logprobs;
|
4176
4180
|
this.outputResolver = options.outputResolver;
|
4181
|
+
this.sources = options.sources;
|
4177
4182
|
}
|
4178
4183
|
get experimental_output() {
|
4179
4184
|
return this.outputResolver();
|
@@ -4466,6 +4471,7 @@ function runToolsTransformation({
|
|
4466
4471
|
switch (chunkType) {
|
4467
4472
|
case "text-delta":
|
4468
4473
|
case "reasoning":
|
4474
|
+
case "source":
|
4469
4475
|
case "response-metadata":
|
4470
4476
|
case "error": {
|
4471
4477
|
controller.enqueue(chunk);
|
@@ -4779,6 +4785,7 @@ var DefaultStreamTextResult = class {
|
|
4779
4785
|
this.providerMetadataPromise = new DelayedPromise();
|
4780
4786
|
this.textPromise = new DelayedPromise();
|
4781
4787
|
this.reasoningPromise = new DelayedPromise();
|
4788
|
+
this.sourcesPromise = new DelayedPromise();
|
4782
4789
|
this.toolCallsPromise = new DelayedPromise();
|
4783
4790
|
this.toolResultsPromise = new DelayedPromise();
|
4784
4791
|
this.requestPromise = new DelayedPromise();
|
@@ -4797,6 +4804,8 @@ var DefaultStreamTextResult = class {
|
|
4797
4804
|
let recordedContinuationText = "";
|
4798
4805
|
let recordedFullText = "";
|
4799
4806
|
let recordedReasoningText = void 0;
|
4807
|
+
let recordedStepSources = [];
|
4808
|
+
const recordedSources = [];
|
4800
4809
|
const recordedResponse = {
|
4801
4810
|
id: generateId3(),
|
4802
4811
|
timestamp: currentDate(),
|
@@ -4814,7 +4823,7 @@ var DefaultStreamTextResult = class {
|
|
4814
4823
|
async transform(chunk, controller) {
|
4815
4824
|
controller.enqueue(chunk);
|
4816
4825
|
const { part } = chunk;
|
4817
|
-
if (part.type === "text-delta" || part.type === "reasoning" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
4826
|
+
if (part.type === "text-delta" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
4818
4827
|
await (onChunk == null ? void 0 : onChunk({ chunk: part }));
|
4819
4828
|
}
|
4820
4829
|
if (part.type === "error") {
|
@@ -4828,6 +4837,10 @@ var DefaultStreamTextResult = class {
|
|
4828
4837
|
if (part.type === "reasoning") {
|
4829
4838
|
recordedReasoningText = (recordedReasoningText != null ? recordedReasoningText : "") + part.textDelta;
|
4830
4839
|
}
|
4840
|
+
if (part.type === "source") {
|
4841
|
+
recordedSources.push(part.source);
|
4842
|
+
recordedStepSources.push(part.source);
|
4843
|
+
}
|
4831
4844
|
if (part.type === "tool-call") {
|
4832
4845
|
recordedToolCalls.push(part);
|
4833
4846
|
}
|
@@ -4861,6 +4874,7 @@ var DefaultStreamTextResult = class {
|
|
4861
4874
|
stepType,
|
4862
4875
|
text: recordedStepText,
|
4863
4876
|
reasoning: recordedReasoningText,
|
4877
|
+
sources: recordedStepSources,
|
4864
4878
|
toolCalls: recordedToolCalls,
|
4865
4879
|
toolResults: recordedToolResults,
|
4866
4880
|
finishReason: part.finishReason,
|
@@ -4880,6 +4894,7 @@ var DefaultStreamTextResult = class {
|
|
4880
4894
|
recordedToolCalls = [];
|
4881
4895
|
recordedToolResults = [];
|
4882
4896
|
recordedStepText = "";
|
4897
|
+
recordedStepSources = [];
|
4883
4898
|
if (nextStepType !== "done") {
|
4884
4899
|
stepType = nextStepType;
|
4885
4900
|
}
|
@@ -4922,13 +4937,15 @@ var DefaultStreamTextResult = class {
|
|
4922
4937
|
self.usagePromise.resolve(usage);
|
4923
4938
|
self.textPromise.resolve(recordedFullText);
|
4924
4939
|
self.reasoningPromise.resolve(recordedReasoningText);
|
4940
|
+
self.sourcesPromise.resolve(recordedSources);
|
4925
4941
|
self.stepsPromise.resolve(recordedSteps);
|
4926
4942
|
await (onFinish == null ? void 0 : onFinish({
|
4927
4943
|
finishReason,
|
4928
4944
|
logprobs: void 0,
|
4929
4945
|
usage,
|
4930
4946
|
text: recordedFullText,
|
4931
|
-
reasoning:
|
4947
|
+
reasoning: lastStep.reasoning,
|
4948
|
+
sources: lastStep.sources,
|
4932
4949
|
toolCalls: lastStep.toolCalls,
|
4933
4950
|
toolResults: lastStep.toolResults,
|
4934
4951
|
request: (_a16 = lastStep.request) != null ? _a16 : {},
|
@@ -5204,6 +5221,10 @@ var DefaultStreamTextResult = class {
|
|
5204
5221
|
stepReasoning += chunk.textDelta;
|
5205
5222
|
break;
|
5206
5223
|
}
|
5224
|
+
case "source": {
|
5225
|
+
controller.enqueue(chunk);
|
5226
|
+
break;
|
5227
|
+
}
|
5207
5228
|
case "tool-call": {
|
5208
5229
|
controller.enqueue(chunk);
|
5209
5230
|
stepToolCalls.push(chunk);
|
@@ -5420,6 +5441,9 @@ var DefaultStreamTextResult = class {
|
|
5420
5441
|
get reasoning() {
|
5421
5442
|
return this.reasoningPromise.value;
|
5422
5443
|
}
|
5444
|
+
get sources() {
|
5445
|
+
return this.sourcesPromise.value;
|
5446
|
+
}
|
5423
5447
|
get toolCalls() {
|
5424
5448
|
return this.toolCallsPromise.value;
|
5425
5449
|
}
|
@@ -5524,6 +5548,9 @@ var DefaultStreamTextResult = class {
|
|
5524
5548
|
}
|
5525
5549
|
break;
|
5526
5550
|
}
|
5551
|
+
case "source": {
|
5552
|
+
break;
|
5553
|
+
}
|
5527
5554
|
case "tool-call-streaming-start": {
|
5528
5555
|
controller.enqueue(
|
5529
5556
|
formatDataStreamPart2("tool_call_streaming_start", {
|