@ai-sdk/openai 3.0.0-beta.60 → 3.0.0-beta.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -2856,6 +2856,8 @@ var openaiResponsesChunkSchema = lazySchema15(
2856
2856
  annotation: z17.discriminatedUnion("type", [
2857
2857
  z17.object({
2858
2858
  type: z17.literal("url_citation"),
2859
+ start_index: z17.number(),
2860
+ end_index: z17.number(),
2859
2861
  url: z17.string(),
2860
2862
  title: z17.string()
2861
2863
  }),
@@ -2902,10 +2904,13 @@ var openaiResponsesChunkSchema = lazySchema15(
2902
2904
  }),
2903
2905
  z17.object({
2904
2906
  type: z17.literal("error"),
2905
- code: z17.string(),
2906
- message: z17.string(),
2907
- param: z17.string().nullish(),
2908
- sequence_number: z17.number()
2907
+ sequence_number: z17.number(),
2908
+ error: z17.object({
2909
+ type: z17.string(),
2910
+ code: z17.string(),
2911
+ message: z17.string(),
2912
+ param: z17.string().nullish()
2913
+ })
2909
2914
  }),
2910
2915
  z17.object({ type: z17.string() }).loose().transform((value) => ({
2911
2916
  type: "unknown_chunk",
@@ -2918,13 +2923,15 @@ var openaiResponsesChunkSchema = lazySchema15(
2918
2923
  var openaiResponsesResponseSchema = lazySchema15(
2919
2924
  () => zodSchema15(
2920
2925
  z17.object({
2921
- id: z17.string(),
2922
- created_at: z17.number(),
2926
+ id: z17.string().optional(),
2927
+ created_at: z17.number().optional(),
2923
2928
  error: z17.object({
2924
- code: z17.string(),
2925
- message: z17.string()
2929
+ message: z17.string(),
2930
+ type: z17.string(),
2931
+ param: z17.string().nullish(),
2932
+ code: z17.string()
2926
2933
  }).nullish(),
2927
- model: z17.string(),
2934
+ model: z17.string().optional(),
2928
2935
  output: z17.array(
2929
2936
  z17.discriminatedUnion("type", [
2930
2937
  z17.object({
@@ -3127,7 +3134,7 @@ var openaiResponsesResponseSchema = lazySchema15(
3127
3134
  approval_request_id: z17.string()
3128
3135
  })
3129
3136
  ])
3130
- ),
3137
+ ).optional(),
3131
3138
  service_tier: z17.string().nullish(),
3132
3139
  incomplete_details: z17.object({ reason: z17.string() }).nullish(),
3133
3140
  usage: z17.object({
@@ -3135,7 +3142,7 @@ var openaiResponsesResponseSchema = lazySchema15(
3135
3142
  input_tokens_details: z17.object({ cached_tokens: z17.number().nullish() }).nullish(),
3136
3143
  output_tokens: z17.number(),
3137
3144
  output_tokens_details: z17.object({ reasoning_tokens: z17.number().nullish() }).nullish()
3138
- })
3145
+ }).optional()
3139
3146
  })
3140
3147
  )
3141
3148
  );
@@ -4017,6 +4024,7 @@ var OpenAIResponsesLanguageModel = class {
4017
4024
  if (typeof response.service_tier === "string") {
4018
4025
  providerMetadata.openai.serviceTier = response.service_tier;
4019
4026
  }
4027
+ const usage = response.usage;
4020
4028
  return {
4021
4029
  content,
4022
4030
  finishReason: mapOpenAIResponseFinishReason({
@@ -4024,11 +4032,11 @@ var OpenAIResponsesLanguageModel = class {
4024
4032
  hasFunctionCall
4025
4033
  }),
4026
4034
  usage: {
4027
- inputTokens: response.usage.input_tokens,
4028
- outputTokens: response.usage.output_tokens,
4029
- totalTokens: response.usage.input_tokens + response.usage.output_tokens,
4030
- reasoningTokens: (_z = (_y = response.usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
4031
- cachedInputTokens: (_B = (_A = response.usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
4035
+ inputTokens: usage.input_tokens,
4036
+ outputTokens: usage.output_tokens,
4037
+ totalTokens: usage.input_tokens + usage.output_tokens,
4038
+ reasoningTokens: (_z = (_y = usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
4039
+ cachedInputTokens: (_B = (_A = usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
4032
4040
  },
4033
4041
  request: { body },
4034
4042
  response: {
@@ -4125,7 +4133,7 @@ var OpenAIResponsesLanguageModel = class {
4125
4133
  controller.enqueue({
4126
4134
  type: "tool-call",
4127
4135
  toolCallId: value.item.id,
4128
- toolName: "web_search",
4136
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
4129
4137
  input: JSON.stringify({}),
4130
4138
  providerExecuted: true
4131
4139
  });
@@ -4233,7 +4241,7 @@ var OpenAIResponsesLanguageModel = class {
4233
4241
  controller.enqueue({
4234
4242
  type: "tool-result",
4235
4243
  toolCallId: value.item.id,
4236
- toolName: "web_search",
4244
+ toolName: webSearchToolName != null ? webSearchToolName : "web_search",
4237
4245
  result: mapWebSearchOutput(value.item.action)
4238
4246
  });
4239
4247
  } else if (value.item.type === "computer_call") {
@@ -5039,7 +5047,7 @@ var OpenAITranscriptionModel = class {
5039
5047
  };
5040
5048
 
5041
5049
  // src/version.ts
5042
- var VERSION = true ? "3.0.0-beta.60" : "0.0.0-test";
5050
+ var VERSION = true ? "3.0.0-beta.62" : "0.0.0-test";
5043
5051
 
5044
5052
  // src/openai-provider.ts
5045
5053
  function createOpenAI(options = {}) {