ai 4.0.0-canary.4 → 4.0.0-canary.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # ai
2
2
 
3
+ ## 4.0.0-canary.5
4
+
5
+ ### Major Changes
6
+
7
+ - 4d61295: chore (ai): remove streamToResponse and streamingTextResponse
8
+ - d3ae4f6: chore (ui/react): remove useObject setInput helper
9
+ - 6090cea: chore (ai): remove rawResponse from generate/stream result objects
10
+ - 2f09717: chore (ai): remove deprecated telemetry data
11
+
12
+ ### Patch Changes
13
+
14
+ - Updated dependencies [9f81e66]
15
+ - Updated dependencies [d3ae4f6]
16
+ - Updated dependencies [8426f55]
17
+ - @ai-sdk/ui-utils@1.0.0-canary.5
18
+ - @ai-sdk/react@1.0.0-canary.5
19
+ - @ai-sdk/provider-utils@2.0.0-canary.3
20
+
3
21
  ## 4.0.0-canary.4
4
22
 
5
23
  ### Major Changes
package/dist/index.d.mts CHANGED
@@ -665,17 +665,6 @@ interface GenerateObjectResult<T> {
665
665
  */
666
666
  readonly warnings: CallWarning[] | undefined;
667
667
  /**
668
- Optional raw response data.
669
-
670
- @deprecated Use `response.headers` instead.
671
- */
672
- readonly rawResponse?: {
673
- /**
674
- Response headers.
675
- */
676
- headers?: Record<string, string>;
677
- };
678
- /**
679
668
  Additional request information.
680
669
  */
681
670
  readonly request: LanguageModelRequestMetadata;
@@ -935,17 +924,6 @@ interface StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {
935
924
  */
936
925
  readonly experimental_providerMetadata: Promise<ProviderMetadata | undefined>;
937
926
  /**
938
- Optional raw response data.
939
-
940
- @deprecated Use `response` instead.
941
- */
942
- readonly rawResponse?: {
943
- /**
944
- Response headers.
945
- */
946
- headers?: Record<string, string>;
947
- };
948
- /**
949
927
  Additional request information from the last step.
950
928
  */
951
929
  readonly request: Promise<LanguageModelRequestMetadata>;
@@ -1029,17 +1007,6 @@ type OnFinishCallback<RESULT> = (event: {
1029
1007
  */
1030
1008
  error: unknown | undefined;
1031
1009
  /**
1032
- Optional raw response data.
1033
-
1034
- @deprecated Use `response` instead.
1035
- */
1036
- rawResponse?: {
1037
- /**
1038
- Response headers.
1039
- */
1040
- headers?: Record<string, string>;
1041
- };
1042
- /**
1043
1010
  Response metadata.
1044
1011
  */
1045
1012
  response: LanguageModelResponseMetadata;
@@ -1418,17 +1385,6 @@ type StepResult<TOOLS extends Record<string, CoreTool>> = {
1418
1385
  */
1419
1386
  readonly logprobs: LogProbs | undefined;
1420
1387
  /**
1421
- Optional raw response data.
1422
-
1423
- @deprecated Use `response.headers` instead.
1424
- */
1425
- readonly rawResponse?: {
1426
- /**
1427
- Response headers.
1428
- */
1429
- readonly headers?: Record<string, string>;
1430
- };
1431
- /**
1432
1388
  Additional request information.
1433
1389
  */
1434
1390
  readonly request: LanguageModelRequestMetadata;
@@ -1500,17 +1456,6 @@ interface GenerateTextResult<TOOLS extends Record<string, CoreTool>> {
1500
1456
  */
1501
1457
  readonly steps: Array<StepResult<TOOLS>>;
1502
1458
  /**
1503
- Optional raw response data.
1504
-
1505
- @deprecated Use `response.headers` instead.
1506
- */
1507
- readonly rawResponse?: {
1508
- /**
1509
- Response headers.
1510
- */
1511
- readonly headers?: Record<string, string>;
1512
- };
1513
- /**
1514
1459
  Additional request information.
1515
1460
  */
1516
1461
  readonly request: LanguageModelRequestMetadata;
@@ -1719,17 +1664,6 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1719
1664
  */
1720
1665
  readonly toolResults: Promise<ToolResultUnion<TOOLS>[]>;
1721
1666
  /**
1722
- Optional raw response data.
1723
-
1724
- @deprecated Use `response` instead.
1725
- */
1726
- readonly rawResponse?: {
1727
- /**
1728
- Response headers.
1729
- */
1730
- headers?: Record<string, string>;
1731
- };
1732
- /**
1733
1667
  @deprecated use `response.messages` instead.
1734
1668
  */
1735
1669
  readonly responseMessages: Promise<Array<CoreAssistantMessage | CoreToolMessage>>;
@@ -2436,31 +2370,4 @@ declare namespace llamaindexAdapter {
2436
2370
  };
2437
2371
  }
2438
2372
 
2439
- /**
2440
- * A utility function to stream a ReadableStream to a Node.js response-like object.
2441
- *
2442
- * @deprecated Use `pipeDataStreamToResponse` (part of `StreamTextResult`) instead.
2443
- */
2444
- declare function streamToResponse(res: ReadableStream, response: ServerResponse$1, init?: {
2445
- headers?: Record<string, string>;
2446
- status?: number;
2447
- }, data?: StreamData): void;
2448
-
2449
- /**
2450
- * A utility class for streaming text responses.
2451
- *
2452
- * @deprecated Use `streamText.toDataStreamResponse()` (if you did send StreamData)
2453
- * or a regular `Response` instead (if you did not send any StreamData):
2454
- *
2455
- * ```ts
2456
- * return new Response(stream, {
2457
- * status: 200,
2458
- * contentType: 'text/plain; charset=utf-8',
2459
- * })
2460
- * ```
2461
- */
2462
- declare class StreamingTextResponse extends Response {
2463
- constructor(res: ReadableStream, init?: ResponseInit, data?: StreamData);
2464
- }
2465
-
2466
- export { AssistantContent, AssistantResponse, CallWarning, CompletionTokenUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalTool, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamingTextResponse, TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, streamObject, streamText, streamToResponse, tool };
2373
+ export { AssistantContent, AssistantResponse, CallWarning, CompletionTokenUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalTool, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, streamObject, streamText, tool };
package/dist/index.d.ts CHANGED
@@ -665,17 +665,6 @@ interface GenerateObjectResult<T> {
665
665
  */
666
666
  readonly warnings: CallWarning[] | undefined;
667
667
  /**
668
- Optional raw response data.
669
-
670
- @deprecated Use `response.headers` instead.
671
- */
672
- readonly rawResponse?: {
673
- /**
674
- Response headers.
675
- */
676
- headers?: Record<string, string>;
677
- };
678
- /**
679
668
  Additional request information.
680
669
  */
681
670
  readonly request: LanguageModelRequestMetadata;
@@ -935,17 +924,6 @@ interface StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {
935
924
  */
936
925
  readonly experimental_providerMetadata: Promise<ProviderMetadata | undefined>;
937
926
  /**
938
- Optional raw response data.
939
-
940
- @deprecated Use `response` instead.
941
- */
942
- readonly rawResponse?: {
943
- /**
944
- Response headers.
945
- */
946
- headers?: Record<string, string>;
947
- };
948
- /**
949
927
  Additional request information from the last step.
950
928
  */
951
929
  readonly request: Promise<LanguageModelRequestMetadata>;
@@ -1029,17 +1007,6 @@ type OnFinishCallback<RESULT> = (event: {
1029
1007
  */
1030
1008
  error: unknown | undefined;
1031
1009
  /**
1032
- Optional raw response data.
1033
-
1034
- @deprecated Use `response` instead.
1035
- */
1036
- rawResponse?: {
1037
- /**
1038
- Response headers.
1039
- */
1040
- headers?: Record<string, string>;
1041
- };
1042
- /**
1043
1010
  Response metadata.
1044
1011
  */
1045
1012
  response: LanguageModelResponseMetadata;
@@ -1418,17 +1385,6 @@ type StepResult<TOOLS extends Record<string, CoreTool>> = {
1418
1385
  */
1419
1386
  readonly logprobs: LogProbs | undefined;
1420
1387
  /**
1421
- Optional raw response data.
1422
-
1423
- @deprecated Use `response.headers` instead.
1424
- */
1425
- readonly rawResponse?: {
1426
- /**
1427
- Response headers.
1428
- */
1429
- readonly headers?: Record<string, string>;
1430
- };
1431
- /**
1432
1388
  Additional request information.
1433
1389
  */
1434
1390
  readonly request: LanguageModelRequestMetadata;
@@ -1500,17 +1456,6 @@ interface GenerateTextResult<TOOLS extends Record<string, CoreTool>> {
1500
1456
  */
1501
1457
  readonly steps: Array<StepResult<TOOLS>>;
1502
1458
  /**
1503
- Optional raw response data.
1504
-
1505
- @deprecated Use `response.headers` instead.
1506
- */
1507
- readonly rawResponse?: {
1508
- /**
1509
- Response headers.
1510
- */
1511
- readonly headers?: Record<string, string>;
1512
- };
1513
- /**
1514
1459
  Additional request information.
1515
1460
  */
1516
1461
  readonly request: LanguageModelRequestMetadata;
@@ -1719,17 +1664,6 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1719
1664
  */
1720
1665
  readonly toolResults: Promise<ToolResultUnion<TOOLS>[]>;
1721
1666
  /**
1722
- Optional raw response data.
1723
-
1724
- @deprecated Use `response` instead.
1725
- */
1726
- readonly rawResponse?: {
1727
- /**
1728
- Response headers.
1729
- */
1730
- headers?: Record<string, string>;
1731
- };
1732
- /**
1733
1667
  @deprecated use `response.messages` instead.
1734
1668
  */
1735
1669
  readonly responseMessages: Promise<Array<CoreAssistantMessage | CoreToolMessage>>;
@@ -2436,31 +2370,4 @@ declare namespace llamaindexAdapter {
2436
2370
  };
2437
2371
  }
2438
2372
 
2439
- /**
2440
- * A utility function to stream a ReadableStream to a Node.js response-like object.
2441
- *
2442
- * @deprecated Use `pipeDataStreamToResponse` (part of `StreamTextResult`) instead.
2443
- */
2444
- declare function streamToResponse(res: ReadableStream, response: ServerResponse$1, init?: {
2445
- headers?: Record<string, string>;
2446
- status?: number;
2447
- }, data?: StreamData): void;
2448
-
2449
- /**
2450
- * A utility class for streaming text responses.
2451
- *
2452
- * @deprecated Use `streamText.toDataStreamResponse()` (if you did send StreamData)
2453
- * or a regular `Response` instead (if you did not send any StreamData):
2454
- *
2455
- * ```ts
2456
- * return new Response(stream, {
2457
- * status: 200,
2458
- * contentType: 'text/plain; charset=utf-8',
2459
- * })
2460
- * ```
2461
- */
2462
- declare class StreamingTextResponse extends Response {
2463
- constructor(res: ReadableStream, init?: ResponseInit, data?: StreamData);
2464
- }
2465
-
2466
- export { AssistantContent, AssistantResponse, CallWarning, CompletionTokenUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalTool, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamingTextResponse, TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, streamObject, streamText, streamToResponse, tool };
2373
+ export { AssistantContent, AssistantResponse, CallWarning, CompletionTokenUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalTool, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, streamObject, streamText, tool };
package/dist/index.js CHANGED
@@ -43,7 +43,6 @@ __export(streams_exports, {
43
43
  NoSuchToolError: () => NoSuchToolError,
44
44
  RetryError: () => RetryError,
45
45
  StreamData: () => StreamData,
46
- StreamingTextResponse: () => StreamingTextResponse,
47
46
  TypeValidationError: () => import_provider13.TypeValidationError,
48
47
  UnsupportedFunctionalityError: () => import_provider13.UnsupportedFunctionalityError,
49
48
  convertToCoreMessages: () => convertToCoreMessages,
@@ -67,7 +66,6 @@ __export(streams_exports, {
67
66
  readDataStream: () => import_ui_utils10.readDataStream,
68
67
  streamObject: () => streamObject,
69
68
  streamText: () => streamText,
70
- streamToResponse: () => streamToResponse,
71
69
  tool: () => tool
72
70
  });
73
71
  module.exports = __toCommonJS(streams_exports);
@@ -2138,9 +2136,6 @@ async function generateObject({
2138
2136
  "ai.response.timestamp": responseData.timestamp.toISOString(),
2139
2137
  "ai.usage.promptTokens": result2.usage.promptTokens,
2140
2138
  "ai.usage.completionTokens": result2.usage.completionTokens,
2141
- // deprecated:
2142
- "ai.finishReason": result2.finishReason,
2143
- "ai.result.object": { output: () => result2.text },
2144
2139
  // standardized gen-ai llm span attributes:
2145
2140
  "gen_ai.response.finish_reasons": [result2.finishReason],
2146
2141
  "gen_ai.response.id": responseData.id,
@@ -2245,9 +2240,6 @@ async function generateObject({
2245
2240
  "ai.response.timestamp": responseData.timestamp.toISOString(),
2246
2241
  "ai.usage.promptTokens": result2.usage.promptTokens,
2247
2242
  "ai.usage.completionTokens": result2.usage.completionTokens,
2248
- // deprecated:
2249
- "ai.finishReason": result2.finishReason,
2250
- "ai.result.object": { output: () => objectText },
2251
2243
  // standardized gen-ai llm span attributes:
2252
2244
  "gen_ai.response.finish_reasons": [result2.finishReason],
2253
2245
  "gen_ai.response.id": responseData.id,
@@ -2301,12 +2293,7 @@ async function generateObject({
2301
2293
  output: () => JSON.stringify(validationResult.value)
2302
2294
  },
2303
2295
  "ai.usage.promptTokens": usage.promptTokens,
2304
- "ai.usage.completionTokens": usage.completionTokens,
2305
- // deprecated:
2306
- "ai.finishReason": finishReason,
2307
- "ai.result.object": {
2308
- output: () => JSON.stringify(validationResult.value)
2309
- }
2296
+ "ai.usage.completionTokens": usage.completionTokens
2310
2297
  }
2311
2298
  })
2312
2299
  );
@@ -2335,9 +2322,6 @@ var DefaultGenerateObjectResult = class {
2335
2322
  this.experimental_providerMetadata = options.providerMetadata;
2336
2323
  this.response = options.response;
2337
2324
  this.request = options.request;
2338
- this.rawResponse = {
2339
- headers: options.response.headers
2340
- };
2341
2325
  this.logprobs = options.logprobs;
2342
2326
  }
2343
2327
  toJsonResponse(init) {
@@ -2712,7 +2696,6 @@ var DefaultStreamObjectResult = class {
2712
2696
  generateId: generateId3
2713
2697
  }) {
2714
2698
  this.warnings = warnings;
2715
- this.rawResponse = rawResponse;
2716
2699
  this.outputStrategy = outputStrategy;
2717
2700
  this.request = Promise.resolve(request);
2718
2701
  this.objectPromise = new DelayedPromise();
@@ -2845,9 +2828,6 @@ var DefaultStreamObjectResult = class {
2845
2828
  "ai.response.timestamp": response.timestamp.toISOString(),
2846
2829
  "ai.usage.promptTokens": finalUsage.promptTokens,
2847
2830
  "ai.usage.completionTokens": finalUsage.completionTokens,
2848
- // deprecated
2849
- "ai.finishReason": finishReason,
2850
- "ai.result.object": { output: () => JSON.stringify(object) },
2851
2831
  // standardized gen-ai llm span attributes:
2852
2832
  "gen_ai.response.finish_reasons": [finishReason],
2853
2833
  "gen_ai.response.id": response.id,
@@ -2866,9 +2846,7 @@ var DefaultStreamObjectResult = class {
2866
2846
  "ai.usage.completionTokens": finalUsage.completionTokens,
2867
2847
  "ai.response.object": {
2868
2848
  output: () => JSON.stringify(object)
2869
- },
2870
- // deprecated
2871
- "ai.result.object": { output: () => JSON.stringify(object) }
2849
+ }
2872
2850
  }
2873
2851
  })
2874
2852
  );
@@ -2876,7 +2854,6 @@ var DefaultStreamObjectResult = class {
2876
2854
  usage: finalUsage,
2877
2855
  object,
2878
2856
  error,
2879
- rawResponse,
2880
2857
  response: {
2881
2858
  ...response,
2882
2859
  headers: rawResponse == null ? void 0 : rawResponse.headers
@@ -3328,14 +3305,6 @@ async function generateText({
3328
3305
  "ai.response.timestamp": responseData.timestamp.toISOString(),
3329
3306
  "ai.usage.promptTokens": result.usage.promptTokens,
3330
3307
  "ai.usage.completionTokens": result.usage.completionTokens,
3331
- // deprecated:
3332
- "ai.finishReason": result.finishReason,
3333
- "ai.result.text": {
3334
- output: () => result.text
3335
- },
3336
- "ai.result.toolCalls": {
3337
- output: () => JSON.stringify(result.toolCalls)
3338
- },
3339
3308
  // standardized gen-ai llm span attributes:
3340
3309
  "gen_ai.response.finish_reasons": [result.finishReason],
3341
3310
  "gen_ai.response.id": responseData.id,
@@ -3438,15 +3407,7 @@ async function generateText({
3438
3407
  output: () => JSON.stringify(currentModelResponse.toolCalls)
3439
3408
  },
3440
3409
  "ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
3441
- "ai.usage.completionTokens": currentModelResponse.usage.completionTokens,
3442
- // deprecated:
3443
- "ai.finishReason": currentModelResponse.finishReason,
3444
- "ai.result.text": {
3445
- output: () => currentModelResponse.text
3446
- },
3447
- "ai.result.toolCalls": {
3448
- output: () => JSON.stringify(currentModelResponse.toolCalls)
3449
- }
3410
+ "ai.usage.completionTokens": currentModelResponse.usage.completionTokens
3450
3411
  }
3451
3412
  })
3452
3413
  );
@@ -3544,9 +3505,6 @@ var DefaultGenerateTextResult = class {
3544
3505
  this.responseMessages = options.responseMessages;
3545
3506
  this.steps = options.steps;
3546
3507
  this.experimental_providerMetadata = options.providerMetadata;
3547
- this.rawResponse = {
3548
- headers: options.response.headers
3549
- };
3550
3508
  this.logprobs = options.logprobs;
3551
3509
  }
3552
3510
  };
@@ -4203,14 +4161,10 @@ var DefaultStreamTextResult = class {
4203
4161
  const msToFirstChunk = now2() - startTimestamp;
4204
4162
  stepFirstChunk = false;
4205
4163
  doStreamSpan2.addEvent("ai.stream.firstChunk", {
4206
- "ai.response.msToFirstChunk": msToFirstChunk,
4207
- // deprecated:
4208
- "ai.stream.msToFirstChunk": msToFirstChunk
4164
+ "ai.response.msToFirstChunk": msToFirstChunk
4209
4165
  });
4210
4166
  doStreamSpan2.setAttributes({
4211
- "ai.response.msToFirstChunk": msToFirstChunk,
4212
- // deprecated:
4213
- "ai.stream.msToFirstChunk": msToFirstChunk
4167
+ "ai.response.msToFirstChunk": msToFirstChunk
4214
4168
  });
4215
4169
  }
4216
4170
  if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
@@ -4335,12 +4289,6 @@ var DefaultStreamTextResult = class {
4335
4289
  "ai.response.timestamp": stepResponse.timestamp.toISOString(),
4336
4290
  "ai.usage.promptTokens": stepUsage.promptTokens,
4337
4291
  "ai.usage.completionTokens": stepUsage.completionTokens,
4338
- // deprecated
4339
- "ai.finishReason": stepFinishReason,
4340
- "ai.result.text": { output: () => stepText },
4341
- "ai.result.toolCalls": {
4342
- output: () => stepToolCallsJson
4343
- },
4344
4292
  // standardized gen-ai llm span attributes:
4345
4293
  "gen_ai.response.finish_reasons": [stepFinishReason],
4346
4294
  "gen_ai.response.id": stepResponse.id,
@@ -4395,7 +4343,6 @@ var DefaultStreamTextResult = class {
4395
4343
  warnings: self.warnings,
4396
4344
  logprobs: stepLogProbs,
4397
4345
  request: stepRequest,
4398
- rawResponse: self.rawResponse,
4399
4346
  response: {
4400
4347
  ...stepResponse,
4401
4348
  headers: (_a11 = self.rawResponse) == null ? void 0 : _a11.headers,
@@ -4456,13 +4403,7 @@ var DefaultStreamTextResult = class {
4456
4403
  output: () => stepToolCallsJson
4457
4404
  },
4458
4405
  "ai.usage.promptTokens": combinedUsage.promptTokens,
4459
- "ai.usage.completionTokens": combinedUsage.completionTokens,
4460
- // deprecated
4461
- "ai.finishReason": stepFinishReason,
4462
- "ai.result.text": { output: () => fullStepText },
4463
- "ai.result.toolCalls": {
4464
- output: () => stepToolCallsJson
4465
- }
4406
+ "ai.usage.completionTokens": combinedUsage.completionTokens
4466
4407
  }
4467
4408
  })
4468
4409
  );
@@ -4492,7 +4433,6 @@ var DefaultStreamTextResult = class {
4492
4433
  // The type exposed to the users will be correctly inferred.
4493
4434
  toolResults: stepToolResults,
4494
4435
  request: stepRequest,
4495
- rawResponse,
4496
4436
  response: {
4497
4437
  ...stepResponse,
4498
4438
  headers: rawResponse == null ? void 0 : rawResponse.headers,
@@ -5218,48 +5158,6 @@ function trimStartOfStream() {
5218
5158
  return text;
5219
5159
  };
5220
5160
  }
5221
-
5222
- // streams/stream-to-response.ts
5223
- function streamToResponse(res, response, init, data) {
5224
- var _a11;
5225
- response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
5226
- "Content-Type": "text/plain; charset=utf-8",
5227
- ...init == null ? void 0 : init.headers
5228
- });
5229
- let processedStream = res;
5230
- if (data) {
5231
- processedStream = mergeStreams(data.stream, res);
5232
- }
5233
- const reader = processedStream.getReader();
5234
- function read() {
5235
- reader.read().then(({ done, value }) => {
5236
- if (done) {
5237
- response.end();
5238
- return;
5239
- }
5240
- response.write(value);
5241
- read();
5242
- });
5243
- }
5244
- read();
5245
- }
5246
-
5247
- // streams/streaming-text-response.ts
5248
- var StreamingTextResponse = class extends Response {
5249
- constructor(res, init, data) {
5250
- let processedStream = res;
5251
- if (data) {
5252
- processedStream = mergeStreams(data.stream, res);
5253
- }
5254
- super(processedStream, {
5255
- ...init,
5256
- status: 200,
5257
- headers: prepareResponseHeaders(init, {
5258
- contentType: "text/plain; charset=utf-8"
5259
- })
5260
- });
5261
- }
5262
- };
5263
5161
  // Annotate the CommonJS export names for ESM import in node:
5264
5162
  0 && (module.exports = {
5265
5163
  AISDKError,
@@ -5285,7 +5183,6 @@ var StreamingTextResponse = class extends Response {
5285
5183
  NoSuchToolError,
5286
5184
  RetryError,
5287
5185
  StreamData,
5288
- StreamingTextResponse,
5289
5186
  TypeValidationError,
5290
5187
  UnsupportedFunctionalityError,
5291
5188
  convertToCoreMessages,
@@ -5309,7 +5206,6 @@ var StreamingTextResponse = class extends Response {
5309
5206
  readDataStream,
5310
5207
  streamObject,
5311
5208
  streamText,
5312
- streamToResponse,
5313
5209
  tool
5314
5210
  });
5315
5211
  //# sourceMappingURL=index.js.map