ai 6.0.39 → 6.0.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/ui/chat.ts CHANGED
@@ -5,6 +5,7 @@ import {
5
5
  InferSchema,
6
6
  } from '@ai-sdk/provider-utils';
7
7
  import { FinishReason } from '../types/language-model';
8
+ import { LanguageModelUsage } from '../types/usage';
8
9
  import { UIMessageChunk } from '../ui-message-stream/ui-message-chunks';
9
10
  import { consumeStream } from '../util/consume-stream';
10
11
  import { SerialJobExecutor } from '../util/serial-job-executor';
@@ -124,6 +125,7 @@ export type ChatOnDataCallback<UI_MESSAGE extends UIMessage> = (
124
125
  * @param isDisconnect Indicates whether the request has been ended by a network error.
125
126
  * @param isError Indicates whether the request has been ended by an error.
126
127
  * @param finishReason The reason why the generation finished.
128
+ * @param usage Token usage information for the response.
127
129
  */
128
130
  export type ChatOnFinishCallback<UI_MESSAGE extends UIMessage> = (options: {
129
131
  message: UI_MESSAGE;
@@ -132,6 +134,7 @@ export type ChatOnFinishCallback<UI_MESSAGE extends UIMessage> = (options: {
132
134
  isDisconnect: boolean;
133
135
  isError: boolean;
134
136
  finishReason?: FinishReason;
137
+ usage?: LanguageModelUsage;
135
138
  }) => void;
136
139
 
137
140
  export interface ChatInit<UI_MESSAGE extends UIMessage> {
@@ -691,6 +694,9 @@ export abstract class AbstractChat<UI_MESSAGE extends UIMessage> {
691
694
  isDisconnect,
692
695
  isError,
693
696
  finishReason: this.activeResponse?.state.finishReason,
697
+ ...(this.activeResponse?.state.usage != null && {
698
+ usage: this.activeResponse.state.usage,
699
+ }),
694
700
  });
695
701
  } catch (err) {
696
702
  console.error(err);
@@ -2,6 +2,7 @@ import { FlexibleSchema, validateTypes } from '@ai-sdk/provider-utils';
2
2
  import { UIMessageStreamError } from '../error/ui-message-stream-error';
3
3
  import { ProviderMetadata } from '../types';
4
4
  import { FinishReason } from '../types/language-model';
5
+ import { LanguageModelUsage } from '../types/usage';
5
6
  import {
6
7
  DataUIMessageChunk,
7
8
  InferUIMessageChunk,
@@ -44,6 +45,7 @@ export type StreamingUIMessageState<UI_MESSAGE extends UIMessage> = {
44
45
  }
45
46
  >;
46
47
  finishReason?: FinishReason;
48
+ usage?: LanguageModelUsage;
47
49
  };
48
50
 
49
51
  export function createStreamingUIMessageState<UI_MESSAGE extends UIMessage>({
@@ -686,6 +688,9 @@ export function processUIMessageStream<UI_MESSAGE extends UIMessage>({
686
688
  if (chunk.finishReason != null) {
687
689
  state.finishReason = chunk.finishReason;
688
690
  }
691
+ if (chunk.usage != null) {
692
+ state.usage = chunk.usage;
693
+ }
689
694
  await updateMessageMetadata(chunk.messageMetadata);
690
695
  if (chunk.messageMetadata != null) {
691
696
  write();
@@ -4,6 +4,7 @@ import {
4
4
  providerMetadataSchema,
5
5
  } from '../types/provider-metadata';
6
6
  import { FinishReason } from '../types/language-model';
7
+ import { LanguageModelUsage, languageModelUsageSchema } from '../types/usage';
7
8
  import {
8
9
  InferUIMessageData,
9
10
  InferUIMessageMetadata,
@@ -165,6 +166,7 @@ export const uiMessageChunkSchema = lazySchema(() =>
165
166
  'other',
166
167
  ] as const satisfies readonly FinishReason[])
167
168
  .optional(),
169
+ usage: languageModelUsageSchema.optional(),
168
170
  messageMetadata: z.unknown().optional(),
169
171
  }),
170
172
  z.strictObject({
@@ -323,6 +325,7 @@ export type UIMessageChunk<
323
325
  | {
324
326
  type: 'finish';
325
327
  finishReason?: FinishReason;
328
+ usage?: LanguageModelUsage;
326
329
  messageMetadata?: METADATA;
327
330
  }
328
331
  | {