ai 3.3.33 → 3.3.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,27 @@
1
1
  # ai
2
2
 
3
+ ## 3.3.35
4
+
5
+ ### Patch Changes
6
+
7
+ - 14210d5: feat (ai/core): add sendUsage information to streamText data stream methods
8
+ - Updated dependencies [14210d5]
9
+ - @ai-sdk/ui-utils@0.0.42
10
+ - @ai-sdk/react@0.0.57
11
+ - @ai-sdk/solid@0.0.45
12
+ - @ai-sdk/svelte@0.0.47
13
+ - @ai-sdk/vue@0.0.47
14
+
15
+ ## 3.3.34
16
+
17
+ ### Patch Changes
18
+
19
+ - a0403d6: feat (react): support sending attachments using append
20
+ - 678449a: feat (ai/core): export test helpers
21
+ - ff22fac: fix (ai/rsc): streamUI onFinish is called when tool calls have finished
22
+ - Updated dependencies [a0403d6]
23
+ - @ai-sdk/react@0.0.56
24
+
3
25
  ## 3.3.33
4
26
 
5
27
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { Schema, DeepPartial, ToolInvocation, Attachment, JSONValue as JSONValue$1, CreateMessage, FunctionCall as FunctionCall$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
2
- export { AssistantMessage, AssistantStatus, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, Function, FunctionCall, FunctionCallHandler, IdGenerator, JSONValue, Message, RequestOptions, Schema, StreamPart, Tool, ToolCall, ToolCallHandler, ToolChoice, ToolInvocation, UseAssistantOptions, formatStreamPart, jsonSchema, parseStreamPart, processDataProtocolResponse, readDataStream } from '@ai-sdk/ui-utils';
2
+ export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, Function, FunctionCall, FunctionCallHandler, IdGenerator, JSONValue, Message, RequestOptions, Schema, StreamPart, Tool, ToolCall, ToolCallHandler, ToolChoice, ToolInvocation, UseAssistantOptions, formatStreamPart, jsonSchema, parseStreamPart, processDataProtocolResponse, readDataStream } from '@ai-sdk/ui-utils';
3
3
  import { AttributeValue } from '@opentelemetry/api';
4
4
  import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
5
5
  export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1Prompt, LanguageModelV1StreamPart, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
@@ -1619,12 +1619,14 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1619
1619
 
1620
1620
  @param data an optional StreamData object that will be merged into the stream.
1621
1621
  @param getErrorMessage an optional function that converts an error to an error message.
1622
+ @param sendUsage whether to send the usage information to the client. Defaults to true.
1622
1623
 
1623
1624
  @return A data stream.
1624
1625
  */
1625
1626
  toDataStream(options?: {
1626
1627
  data?: StreamData;
1627
1628
  getErrorMessage?: (error: unknown) => string;
1629
+ sendUsage?: boolean;
1628
1630
  }): ReadableStream<Uint8Array>;
1629
1631
  /**
1630
1632
  Writes stream data output to a Node.js response-like object.
@@ -1651,6 +1653,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1651
1653
  init?: ResponseInit;
1652
1654
  data?: StreamData;
1653
1655
  getErrorMessage?: (error: unknown) => string;
1656
+ sendUsage?: boolean;
1654
1657
  }): void;
1655
1658
  /**
1656
1659
  Writes text delta output to a Node.js response-like object.
@@ -1689,6 +1692,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1689
1692
  init?: ResponseInit;
1690
1693
  data?: StreamData;
1691
1694
  getErrorMessage?: (error: unknown) => string;
1695
+ sendUsage?: boolean;
1692
1696
  }): Response;
1693
1697
  /**
1694
1698
  Creates a simple text stream response.
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { Schema, DeepPartial, ToolInvocation, Attachment, JSONValue as JSONValue$1, CreateMessage, FunctionCall as FunctionCall$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
2
- export { AssistantMessage, AssistantStatus, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, Function, FunctionCall, FunctionCallHandler, IdGenerator, JSONValue, Message, RequestOptions, Schema, StreamPart, Tool, ToolCall, ToolCallHandler, ToolChoice, ToolInvocation, UseAssistantOptions, formatStreamPart, jsonSchema, parseStreamPart, processDataProtocolResponse, readDataStream } from '@ai-sdk/ui-utils';
2
+ export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, Function, FunctionCall, FunctionCallHandler, IdGenerator, JSONValue, Message, RequestOptions, Schema, StreamPart, Tool, ToolCall, ToolCallHandler, ToolChoice, ToolInvocation, UseAssistantOptions, formatStreamPart, jsonSchema, parseStreamPart, processDataProtocolResponse, readDataStream } from '@ai-sdk/ui-utils';
3
3
  import { AttributeValue } from '@opentelemetry/api';
4
4
  import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
5
5
  export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1Prompt, LanguageModelV1StreamPart, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
@@ -1619,12 +1619,14 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1619
1619
 
1620
1620
  @param data an optional StreamData object that will be merged into the stream.
1621
1621
  @param getErrorMessage an optional function that converts an error to an error message.
1622
+ @param sendUsage whether to send the usage information to the client. Defaults to true.
1622
1623
 
1623
1624
  @return A data stream.
1624
1625
  */
1625
1626
  toDataStream(options?: {
1626
1627
  data?: StreamData;
1627
1628
  getErrorMessage?: (error: unknown) => string;
1629
+ sendUsage?: boolean;
1628
1630
  }): ReadableStream<Uint8Array>;
1629
1631
  /**
1630
1632
  Writes stream data output to a Node.js response-like object.
@@ -1651,6 +1653,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1651
1653
  init?: ResponseInit;
1652
1654
  data?: StreamData;
1653
1655
  getErrorMessage?: (error: unknown) => string;
1656
+ sendUsage?: boolean;
1654
1657
  }): void;
1655
1658
  /**
1656
1659
  Writes text delta output to a Node.js response-like object.
@@ -1689,6 +1692,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1689
1692
  init?: ResponseInit;
1690
1693
  data?: StreamData;
1691
1694
  getErrorMessage?: (error: unknown) => string;
1695
+ sendUsage?: boolean;
1692
1696
  }): Response;
1693
1697
  /**
1694
1698
  Creates a simple text stream response.
package/dist/index.js CHANGED
@@ -4185,8 +4185,9 @@ var DefaultStreamTextResult = class {
4185
4185
  }
4186
4186
  toDataStreamInternal({
4187
4187
  callbacks = {},
4188
- getErrorMessage: getErrorMessage4 = () => ""
4188
+ getErrorMessage: getErrorMessage4 = () => "",
4189
4189
  // mask error messages for safety by default
4190
+ sendUsage = true
4190
4191
  } = {}) {
4191
4192
  let aggregatedResponse = "";
4192
4193
  const callbackTransformer = new TransformStream({
@@ -4261,10 +4262,10 @@ var DefaultStreamTextResult = class {
4261
4262
  controller.enqueue(
4262
4263
  (0, import_ui_utils10.formatStreamPart)("finish_roundtrip", {
4263
4264
  finishReason: chunk.finishReason,
4264
- usage: {
4265
+ usage: sendUsage ? {
4265
4266
  promptTokens: chunk.usage.promptTokens,
4266
4267
  completionTokens: chunk.usage.completionTokens
4267
- }
4268
+ } : void 0
4268
4269
  })
4269
4270
  );
4270
4271
  break;
@@ -4272,10 +4273,10 @@ var DefaultStreamTextResult = class {
4272
4273
  controller.enqueue(
4273
4274
  (0, import_ui_utils10.formatStreamPart)("finish_message", {
4274
4275
  finishReason: chunk.finishReason,
4275
- usage: {
4276
+ usage: sendUsage ? {
4276
4277
  promptTokens: chunk.usage.promptTokens,
4277
4278
  completionTokens: chunk.usage.completionTokens
4278
- }
4279
+ } : void 0
4279
4280
  })
4280
4281
  );
4281
4282
  break;
@@ -4299,6 +4300,7 @@ var DefaultStreamTextResult = class {
4299
4300
  };
4300
4301
  const data = options == null ? void 0 : "data" in options ? options.data : void 0;
4301
4302
  const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
4303
+ const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
4302
4304
  writeToServerResponse({
4303
4305
  response,
4304
4306
  status: init == null ? void 0 : init.status,
@@ -4307,7 +4309,7 @@ var DefaultStreamTextResult = class {
4307
4309
  contentType: "text/plain; charset=utf-8",
4308
4310
  dataStreamVersion: "v1"
4309
4311
  }),
4310
- stream: this.toDataStream({ data, getErrorMessage: getErrorMessage4 })
4312
+ stream: this.toDataStream({ data, getErrorMessage: getErrorMessage4, sendUsage })
4311
4313
  });
4312
4314
  }
4313
4315
  pipeTextStreamToResponse(response, init) {
@@ -4326,7 +4328,8 @@ var DefaultStreamTextResult = class {
4326
4328
  }
4327
4329
  toDataStream(options) {
4328
4330
  const stream = this.toDataStreamInternal({
4329
- getErrorMessage: options == null ? void 0 : options.getErrorMessage
4331
+ getErrorMessage: options == null ? void 0 : options.getErrorMessage,
4332
+ sendUsage: options == null ? void 0 : options.sendUsage
4330
4333
  });
4331
4334
  return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
4332
4335
  }
@@ -4339,14 +4342,18 @@ var DefaultStreamTextResult = class {
4339
4342
  };
4340
4343
  const data = options == null ? void 0 : "data" in options ? options.data : void 0;
4341
4344
  const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
4342
- return new Response(this.toDataStream({ data, getErrorMessage: getErrorMessage4 }), {
4343
- status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
4344
- statusText: init == null ? void 0 : init.statusText,
4345
- headers: prepareResponseHeaders(init, {
4346
- contentType: "text/plain; charset=utf-8",
4347
- dataStreamVersion: "v1"
4348
- })
4349
- });
4345
+ const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
4346
+ return new Response(
4347
+ this.toDataStream({ data, getErrorMessage: getErrorMessage4, sendUsage }),
4348
+ {
4349
+ status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
4350
+ statusText: init == null ? void 0 : init.statusText,
4351
+ headers: prepareResponseHeaders(init, {
4352
+ contentType: "text/plain; charset=utf-8",
4353
+ dataStreamVersion: "v1"
4354
+ })
4355
+ }
4356
+ );
4350
4357
  }
4351
4358
  toTextStreamResponse(init) {
4352
4359
  var _a11;