ai 5.0.0-beta.21 → 5.0.0-beta.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  # ai
2
2
 
3
+ ## 5.0.0-beta.22
4
+
5
+ ### Patch Changes
6
+
7
+ - de2d2ab: feat(ai): add provider and provider registry middleware functionality
8
+ - 6c42e56: feat (ai): validate ui stream data chunks
9
+ - c93a8bc: chore(ai): export AsyncIterableStream type from async-iterable-stream module
10
+ - 20398f2: feat: ai sdk cli documentation + adjusted default model
11
+ - 86293e5: fix (ai): use correct generateMessageId in streamText toUIMessageStream
12
+ - 205077b: fix: improve Zod compatibility
13
+ - Updated dependencies [205077b]
14
+ - @ai-sdk/provider-utils@3.0.0-beta.4
15
+ - @ai-sdk/gateway@1.0.0-beta.9
16
+
3
17
  ## 5.0.0-beta.21
4
18
 
5
19
  ### Patch Changes
package/dist/bin/ai.js CHANGED
@@ -751,24 +751,24 @@ function processUIMessageStream({
751
751
  }) {
752
752
  return stream.pipeThrough(
753
753
  new TransformStream({
754
- async transform(part, controller) {
754
+ async transform(chunk, controller) {
755
755
  await runUpdateMessageJob(async ({ state, write }) => {
756
756
  var _a9, _b, _c, _d;
757
757
  function updateToolInvocationPart(options) {
758
758
  var _a10;
759
- const part2 = state.message.parts.find(
760
- (part3) => isToolUIPart(part3) && part3.toolCallId === options.toolCallId
759
+ const part = state.message.parts.find(
760
+ (part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
761
761
  );
762
762
  const anyOptions = options;
763
- const anyPart = part2;
764
- if (part2 != null) {
765
- part2.state = options.state;
763
+ const anyPart = part;
764
+ if (part != null) {
765
+ part.state = options.state;
766
766
  anyPart.input = anyOptions.input;
767
767
  anyPart.output = anyOptions.output;
768
768
  anyPart.errorText = anyOptions.errorText;
769
- anyPart.providerExecuted = (_a10 = anyOptions.providerExecuted) != null ? _a10 : part2.providerExecuted;
770
- if (anyOptions.providerMetadata != null && part2.state === "input-available") {
771
- part2.callProviderMetadata = anyOptions.providerMetadata;
769
+ anyPart.providerExecuted = (_a10 = anyOptions.providerExecuted) != null ? _a10 : part.providerExecuted;
770
+ if (anyOptions.providerMetadata != null && part.state === "input-available") {
771
+ part.callProviderMetadata = anyOptions.providerMetadata;
772
772
  }
773
773
  } else {
774
774
  state.message.parts.push({
@@ -795,31 +795,31 @@ function processUIMessageStream({
795
795
  state.message.metadata = mergedMetadata;
796
796
  }
797
797
  }
798
- switch (part.type) {
798
+ switch (chunk.type) {
799
799
  case "text-start": {
800
800
  const textPart = {
801
801
  type: "text",
802
802
  text: "",
803
- providerMetadata: part.providerMetadata,
803
+ providerMetadata: chunk.providerMetadata,
804
804
  state: "streaming"
805
805
  };
806
- state.activeTextParts[part.id] = textPart;
806
+ state.activeTextParts[chunk.id] = textPart;
807
807
  state.message.parts.push(textPart);
808
808
  write();
809
809
  break;
810
810
  }
811
811
  case "text-delta": {
812
- const textPart = state.activeTextParts[part.id];
813
- textPart.text += part.delta;
814
- textPart.providerMetadata = (_a9 = part.providerMetadata) != null ? _a9 : textPart.providerMetadata;
812
+ const textPart = state.activeTextParts[chunk.id];
813
+ textPart.text += chunk.delta;
814
+ textPart.providerMetadata = (_a9 = chunk.providerMetadata) != null ? _a9 : textPart.providerMetadata;
815
815
  write();
816
816
  break;
817
817
  }
818
818
  case "text-end": {
819
- const textPart = state.activeTextParts[part.id];
819
+ const textPart = state.activeTextParts[chunk.id];
820
820
  textPart.state = "done";
821
- textPart.providerMetadata = (_b = part.providerMetadata) != null ? _b : textPart.providerMetadata;
822
- delete state.activeTextParts[part.id];
821
+ textPart.providerMetadata = (_b = chunk.providerMetadata) != null ? _b : textPart.providerMetadata;
822
+ delete state.activeTextParts[chunk.id];
823
823
  write();
824
824
  break;
825
825
  }
@@ -827,34 +827,34 @@ function processUIMessageStream({
827
827
  const reasoningPart = {
828
828
  type: "reasoning",
829
829
  text: "",
830
- providerMetadata: part.providerMetadata,
830
+ providerMetadata: chunk.providerMetadata,
831
831
  state: "streaming"
832
832
  };
833
- state.activeReasoningParts[part.id] = reasoningPart;
833
+ state.activeReasoningParts[chunk.id] = reasoningPart;
834
834
  state.message.parts.push(reasoningPart);
835
835
  write();
836
836
  break;
837
837
  }
838
838
  case "reasoning-delta": {
839
- const reasoningPart = state.activeReasoningParts[part.id];
840
- reasoningPart.text += part.delta;
841
- reasoningPart.providerMetadata = (_c = part.providerMetadata) != null ? _c : reasoningPart.providerMetadata;
839
+ const reasoningPart = state.activeReasoningParts[chunk.id];
840
+ reasoningPart.text += chunk.delta;
841
+ reasoningPart.providerMetadata = (_c = chunk.providerMetadata) != null ? _c : reasoningPart.providerMetadata;
842
842
  write();
843
843
  break;
844
844
  }
845
845
  case "reasoning-end": {
846
- const reasoningPart = state.activeReasoningParts[part.id];
847
- reasoningPart.providerMetadata = (_d = part.providerMetadata) != null ? _d : reasoningPart.providerMetadata;
846
+ const reasoningPart = state.activeReasoningParts[chunk.id];
847
+ reasoningPart.providerMetadata = (_d = chunk.providerMetadata) != null ? _d : reasoningPart.providerMetadata;
848
848
  reasoningPart.state = "done";
849
- delete state.activeReasoningParts[part.id];
849
+ delete state.activeReasoningParts[chunk.id];
850
850
  write();
851
851
  break;
852
852
  }
853
853
  case "file": {
854
854
  state.message.parts.push({
855
855
  type: "file",
856
- mediaType: part.mediaType,
857
- url: part.url
856
+ mediaType: chunk.mediaType,
857
+ url: chunk.url
858
858
  });
859
859
  write();
860
860
  break;
@@ -862,10 +862,10 @@ function processUIMessageStream({
862
862
  case "source-url": {
863
863
  state.message.parts.push({
864
864
  type: "source-url",
865
- sourceId: part.sourceId,
866
- url: part.url,
867
- title: part.title,
868
- providerMetadata: part.providerMetadata
865
+ sourceId: chunk.sourceId,
866
+ url: chunk.url,
867
+ title: chunk.title,
868
+ providerMetadata: chunk.providerMetadata
869
869
  });
870
870
  write();
871
871
  break;
@@ -873,40 +873,40 @@ function processUIMessageStream({
873
873
  case "source-document": {
874
874
  state.message.parts.push({
875
875
  type: "source-document",
876
- sourceId: part.sourceId,
877
- mediaType: part.mediaType,
878
- title: part.title,
879
- filename: part.filename,
880
- providerMetadata: part.providerMetadata
876
+ sourceId: chunk.sourceId,
877
+ mediaType: chunk.mediaType,
878
+ title: chunk.title,
879
+ filename: chunk.filename,
880
+ providerMetadata: chunk.providerMetadata
881
881
  });
882
882
  write();
883
883
  break;
884
884
  }
885
885
  case "tool-input-start": {
886
886
  const toolInvocations = state.message.parts.filter(isToolUIPart);
887
- state.partialToolCalls[part.toolCallId] = {
887
+ state.partialToolCalls[chunk.toolCallId] = {
888
888
  text: "",
889
- toolName: part.toolName,
889
+ toolName: chunk.toolName,
890
890
  index: toolInvocations.length
891
891
  };
892
892
  updateToolInvocationPart({
893
- toolCallId: part.toolCallId,
894
- toolName: part.toolName,
893
+ toolCallId: chunk.toolCallId,
894
+ toolName: chunk.toolName,
895
895
  state: "input-streaming",
896
896
  input: void 0,
897
- providerExecuted: part.providerExecuted
897
+ providerExecuted: chunk.providerExecuted
898
898
  });
899
899
  write();
900
900
  break;
901
901
  }
902
902
  case "tool-input-delta": {
903
- const partialToolCall = state.partialToolCalls[part.toolCallId];
904
- partialToolCall.text += part.inputTextDelta;
903
+ const partialToolCall = state.partialToolCalls[chunk.toolCallId];
904
+ partialToolCall.text += chunk.inputTextDelta;
905
905
  const { value: partialArgs } = await parsePartialJson(
906
906
  partialToolCall.text
907
907
  );
908
908
  updateToolInvocationPart({
909
- toolCallId: part.toolCallId,
909
+ toolCallId: chunk.toolCallId,
910
910
  toolName: partialToolCall.toolName,
911
911
  state: "input-streaming",
912
912
  input: partialArgs
@@ -916,24 +916,24 @@ function processUIMessageStream({
916
916
  }
917
917
  case "tool-input-available": {
918
918
  updateToolInvocationPart({
919
- toolCallId: part.toolCallId,
920
- toolName: part.toolName,
919
+ toolCallId: chunk.toolCallId,
920
+ toolName: chunk.toolName,
921
921
  state: "input-available",
922
- input: part.input,
923
- providerExecuted: part.providerExecuted,
924
- providerMetadata: part.providerMetadata
922
+ input: chunk.input,
923
+ providerExecuted: chunk.providerExecuted,
924
+ providerMetadata: chunk.providerMetadata
925
925
  });
926
926
  write();
927
- if (onToolCall && !part.providerExecuted) {
927
+ if (onToolCall && !chunk.providerExecuted) {
928
928
  const result = await onToolCall({
929
- toolCall: part
929
+ toolCall: chunk
930
930
  });
931
931
  if (result != null) {
932
932
  updateToolInvocationPart({
933
- toolCallId: part.toolCallId,
934
- toolName: part.toolName,
933
+ toolCallId: chunk.toolCallId,
934
+ toolName: chunk.toolName,
935
935
  state: "output-available",
936
- input: part.input,
936
+ input: chunk.input,
937
937
  output: result
938
938
  });
939
939
  write();
@@ -947,7 +947,7 @@ function processUIMessageStream({
947
947
  throw new Error("tool_result must be preceded by a tool_call");
948
948
  }
949
949
  const toolInvocationIndex = toolInvocations.findIndex(
950
- (invocation) => invocation.toolCallId === part.toolCallId
950
+ (invocation) => invocation.toolCallId === chunk.toolCallId
951
951
  );
952
952
  if (toolInvocationIndex === -1) {
953
953
  throw new Error(
@@ -958,12 +958,12 @@ function processUIMessageStream({
958
958
  toolInvocations[toolInvocationIndex]
959
959
  );
960
960
  updateToolInvocationPart({
961
- toolCallId: part.toolCallId,
961
+ toolCallId: chunk.toolCallId,
962
962
  toolName,
963
963
  state: "output-available",
964
964
  input: toolInvocations[toolInvocationIndex].input,
965
- output: part.output,
966
- providerExecuted: part.providerExecuted
965
+ output: chunk.output,
966
+ providerExecuted: chunk.providerExecuted
967
967
  });
968
968
  write();
969
969
  break;
@@ -974,7 +974,7 @@ function processUIMessageStream({
974
974
  throw new Error("tool_result must be preceded by a tool_call");
975
975
  }
976
976
  const toolInvocationIndex = toolInvocations.findIndex(
977
- (invocation) => invocation.toolCallId === part.toolCallId
977
+ (invocation) => invocation.toolCallId === chunk.toolCallId
978
978
  );
979
979
  if (toolInvocationIndex === -1) {
980
980
  throw new Error(
@@ -985,12 +985,12 @@ function processUIMessageStream({
985
985
  toolInvocations[toolInvocationIndex]
986
986
  );
987
987
  updateToolInvocationPart({
988
- toolCallId: part.toolCallId,
988
+ toolCallId: chunk.toolCallId,
989
989
  toolName,
990
990
  state: "output-error",
991
991
  input: toolInvocations[toolInvocationIndex].input,
992
- errorText: part.errorText,
993
- providerExecuted: part.providerExecuted
992
+ errorText: chunk.errorText,
993
+ providerExecuted: chunk.providerExecuted
994
994
  });
995
995
  write();
996
996
  break;
@@ -1005,62 +1005,65 @@ function processUIMessageStream({
1005
1005
  break;
1006
1006
  }
1007
1007
  case "start": {
1008
- if (part.messageId != null) {
1009
- state.message.id = part.messageId;
1008
+ if (chunk.messageId != null) {
1009
+ state.message.id = chunk.messageId;
1010
1010
  }
1011
- await updateMessageMetadata(part.messageMetadata);
1012
- if (part.messageId != null || part.messageMetadata != null) {
1011
+ await updateMessageMetadata(chunk.messageMetadata);
1012
+ if (chunk.messageId != null || chunk.messageMetadata != null) {
1013
1013
  write();
1014
1014
  }
1015
1015
  break;
1016
1016
  }
1017
1017
  case "finish": {
1018
- await updateMessageMetadata(part.messageMetadata);
1019
- if (part.messageMetadata != null) {
1018
+ await updateMessageMetadata(chunk.messageMetadata);
1019
+ if (chunk.messageMetadata != null) {
1020
1020
  write();
1021
1021
  }
1022
1022
  break;
1023
1023
  }
1024
1024
  case "message-metadata": {
1025
- await updateMessageMetadata(part.messageMetadata);
1026
- if (part.messageMetadata != null) {
1025
+ await updateMessageMetadata(chunk.messageMetadata);
1026
+ if (chunk.messageMetadata != null) {
1027
1027
  write();
1028
1028
  }
1029
1029
  break;
1030
1030
  }
1031
1031
  case "error": {
1032
- onError == null ? void 0 : onError(new Error(part.errorText));
1032
+ onError == null ? void 0 : onError(new Error(chunk.errorText));
1033
1033
  break;
1034
1034
  }
1035
1035
  default: {
1036
- if (isDataUIMessageChunk(part)) {
1037
- const dataPart = part;
1038
- if (dataPart.transient) {
1039
- onData == null ? void 0 : onData(dataPart);
1036
+ if (isDataUIMessageChunk(chunk)) {
1037
+ if ((dataPartSchemas == null ? void 0 : dataPartSchemas[chunk.type]) != null) {
1038
+ await (0, import_provider_utils2.validateTypes)({
1039
+ value: chunk.data,
1040
+ schema: dataPartSchemas[chunk.type]
1041
+ });
1042
+ }
1043
+ const dataChunk = chunk;
1044
+ if (dataChunk.transient) {
1045
+ onData == null ? void 0 : onData(dataChunk);
1040
1046
  break;
1041
1047
  }
1042
- const existingPart = dataPart.id != null ? state.message.parts.find(
1043
- (partArg) => dataPart.type === partArg.type && dataPart.id === partArg.id
1048
+ const existingUIPart = dataChunk.id != null ? state.message.parts.find(
1049
+ (chunkArg) => dataChunk.type === chunkArg.type && dataChunk.id === chunkArg.id
1044
1050
  ) : void 0;
1045
- if (existingPart != null) {
1046
- existingPart.data = isObject(existingPart.data) && isObject(dataPart.data) ? mergeObjects(existingPart.data, dataPart.data) : dataPart.data;
1051
+ if (existingUIPart != null) {
1052
+ existingUIPart.data = dataChunk.data;
1047
1053
  } else {
1048
- state.message.parts.push(dataPart);
1054
+ state.message.parts.push(dataChunk);
1049
1055
  }
1050
- onData == null ? void 0 : onData(dataPart);
1056
+ onData == null ? void 0 : onData(dataChunk);
1051
1057
  write();
1052
1058
  }
1053
1059
  }
1054
1060
  }
1055
- controller.enqueue(part);
1061
+ controller.enqueue(chunk);
1056
1062
  });
1057
1063
  }
1058
1064
  })
1059
1065
  );
1060
1066
  }
1061
- function isObject(value) {
1062
- return typeof value === "object" && value !== null;
1063
- }
1064
1067
 
1065
1068
  // src/ui-message-stream/handle-ui-message-stream-finish.ts
1066
1069
  function handleUIMessageStreamFinish({
@@ -4071,10 +4074,10 @@ var DefaultStreamTextResult = class {
4071
4074
  sendFinish = true,
4072
4075
  onError = import_provider16.getErrorMessage
4073
4076
  } = {}) {
4074
- const responseMessageId = getResponseUIMessageId({
4077
+ const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
4075
4078
  originalMessages,
4076
- responseMessageId: this.generateId
4077
- });
4079
+ responseMessageId: generateMessageId
4080
+ }) : void 0;
4078
4081
  const baseStream = this.fullStream.pipeThrough(
4079
4082
  new TransformStream({
4080
4083
  transform: async (part, controller) => {
@@ -4228,8 +4231,8 @@ var DefaultStreamTextResult = class {
4228
4231
  if (sendStart) {
4229
4232
  controller.enqueue({
4230
4233
  type: "start",
4231
- messageId: responseMessageId,
4232
- messageMetadata: messageMetadataValue
4234
+ ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {},
4235
+ ...responseMessageId != null ? { messageId: responseMessageId } : {}
4233
4236
  });
4234
4237
  }
4235
4238
  break;
@@ -4238,7 +4241,7 @@ var DefaultStreamTextResult = class {
4238
4241
  if (sendFinish) {
4239
4242
  controller.enqueue({
4240
4243
  type: "finish",
4241
- messageMetadata: messageMetadataValue
4244
+ ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {}
4242
4245
  });
4243
4246
  }
4244
4247
  break;
@@ -4412,7 +4415,7 @@ function readFileContent(filePath) {
4412
4415
  function parseArgs() {
4413
4416
  const args = process.argv.slice(2);
4414
4417
  const options = {
4415
- model: process.env.AI_MODEL || "openai/gpt-4",
4418
+ model: process.env.AI_DEFAULT_MODEL || "openai/gpt-4",
4416
4419
  files: [],
4417
4420
  help: false,
4418
4421
  version: false,
@@ -4493,10 +4496,9 @@ Options:
4493
4496
 
4494
4497
  Authentication (required):
4495
4498
  export AI_GATEWAY_API_KEY="your-key" # Get from Vercel Dashboard (AI tab)
4496
- export VERCEL_OIDC_TOKEN="your-token" # For Vercel projects (or run: vercel env pull)
4497
-
4499
+
4498
4500
  Environment Variables:
4499
- AI_MODEL: Default model to use
4501
+ AI_DEFAULT_MODEL: Default model to use
4500
4502
  AI_SYSTEM: Default system message
4501
4503
  AI_VERBOSE: Set to 'true' for detailed output
4502
4504
 
@@ -4513,7 +4515,9 @@ Unix-style piping:
4513
4515
  cat README.md | npx ai "Summarize this"
4514
4516
  curl -s https://api.github.com/repos/vercel/ai | npx ai "What is this repository about?"
4515
4517
 
4516
- The gateway supports OpenAI, Anthropic, Google, Groq, and more providers.`);
4518
+ The gateway supports OpenAI, Anthropic, Google, Groq, and more providers.
4519
+
4520
+ For detailed setup instructions, visit: https://ai-sdk.dev/docs/cli/authentication`);
4517
4521
  }
4518
4522
  function showVersion() {
4519
4523
  console.log("1.0.0");
@@ -4597,7 +4601,7 @@ ${prompt}` : stdinContent;
4597
4601
  }
4598
4602
  console.error("");
4599
4603
  }
4600
- const hasApiKey = process.env.AI_GATEWAY_API_KEY || process.env.VERCEL_OIDC_TOKEN;
4604
+ const hasApiKey = process.env.AI_GATEWAY_API_KEY;
4601
4605
  if (!hasApiKey) {
4602
4606
  console.error(`Error: Authentication required.
4603
4607
 
@@ -4605,12 +4609,10 @@ Set up authentication with one of these options:
4605
4609
 
4606
4610
  # Option 1: Export in current session
4607
4611
  export AI_GATEWAY_API_KEY="your-key-here"
4608
- export VERCEL_OIDC_TOKEN="your-oidc-token"
4609
- export AI_MODEL="anthropic/claude-3-5-sonnet"
4612
+ export AI_DEFAULT_MODEL="anthropic/claude-3-5-sonnet"
4610
4613
 
4611
4614
  # Option 2: Add to shell profile (~/.bashrc, ~/.zshrc)
4612
4615
  echo 'export AI_GATEWAY_API_KEY="your-key"' >> ~/.bashrc
4613
- # Or run: vercel env pull
4614
4616
 
4615
4617
  Get your API key from the Vercel Dashboard (AI tab > API keys).
4616
4618
  Use --help for more details and examples.`);