@ai-sdk/openai 2.0.28 → 2.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2137,36 +2137,35 @@ var OpenAISpeechModel = class {
2137
2137
 
2138
2138
  // src/responses/openai-responses-language-model.ts
2139
2139
  var import_provider8 = require("@ai-sdk/provider");
2140
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
2140
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
2141
2141
  var import_v417 = require("zod/v4");
2142
2142
 
2143
- // src/responses/convert-to-openai-responses-messages.ts
2143
+ // src/responses/convert-to-openai-responses-input.ts
2144
2144
  var import_provider6 = require("@ai-sdk/provider");
2145
2145
  var import_provider_utils11 = require("@ai-sdk/provider-utils");
2146
2146
  var import_v414 = require("zod/v4");
2147
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
2148
2147
  function isFileId(data, prefixes) {
2149
2148
  if (!prefixes) return false;
2150
2149
  return prefixes.some((prefix) => data.startsWith(prefix));
2151
2150
  }
2152
- async function convertToOpenAIResponsesMessages({
2151
+ async function convertToOpenAIResponsesInput({
2153
2152
  prompt,
2154
2153
  systemMessageMode,
2155
2154
  fileIdPrefixes
2156
2155
  }) {
2157
2156
  var _a, _b, _c, _d, _e, _f;
2158
- const messages = [];
2157
+ const input = [];
2159
2158
  const warnings = [];
2160
2159
  for (const { role, content } of prompt) {
2161
2160
  switch (role) {
2162
2161
  case "system": {
2163
2162
  switch (systemMessageMode) {
2164
2163
  case "system": {
2165
- messages.push({ role: "system", content });
2164
+ input.push({ role: "system", content });
2166
2165
  break;
2167
2166
  }
2168
2167
  case "developer": {
2169
- messages.push({ role: "developer", content });
2168
+ input.push({ role: "developer", content });
2170
2169
  break;
2171
2170
  }
2172
2171
  case "remove": {
@@ -2186,7 +2185,7 @@ async function convertToOpenAIResponsesMessages({
2186
2185
  break;
2187
2186
  }
2188
2187
  case "user": {
2189
- messages.push({
2188
+ input.push({
2190
2189
  role: "user",
2191
2190
  content: content.map((part, index) => {
2192
2191
  var _a2, _b2, _c2;
@@ -2200,7 +2199,7 @@ async function convertToOpenAIResponsesMessages({
2200
2199
  return {
2201
2200
  type: "input_image",
2202
2201
  ...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2203
- image_url: `data:${mediaType};base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
2202
+ image_url: `data:${mediaType};base64,${(0, import_provider_utils11.convertToBase64)(part.data)}`
2204
2203
  },
2205
2204
  detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
2206
2205
  };
@@ -2215,7 +2214,7 @@ async function convertToOpenAIResponsesMessages({
2215
2214
  type: "input_file",
2216
2215
  ...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
2217
2216
  filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
2218
- file_data: `data:application/pdf;base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
2217
+ file_data: `data:application/pdf;base64,${(0, import_provider_utils11.convertToBase64)(part.data)}`
2219
2218
  }
2220
2219
  };
2221
2220
  } else {
@@ -2231,10 +2230,11 @@ async function convertToOpenAIResponsesMessages({
2231
2230
  }
2232
2231
  case "assistant": {
2233
2232
  const reasoningMessages = {};
2233
+ const toolCallParts = {};
2234
2234
  for (const part of content) {
2235
2235
  switch (part.type) {
2236
2236
  case "text": {
2237
- messages.push({
2237
+ input.push({
2238
2238
  role: "assistant",
2239
2239
  content: [{ type: "output_text", text: part.text }],
2240
2240
  id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
@@ -2242,10 +2242,11 @@ async function convertToOpenAIResponsesMessages({
2242
2242
  break;
2243
2243
  }
2244
2244
  case "tool-call": {
2245
+ toolCallParts[part.toolCallId] = part;
2245
2246
  if (part.providerExecuted) {
2246
2247
  break;
2247
2248
  }
2248
- messages.push({
2249
+ input.push({
2249
2250
  type: "function_call",
2250
2251
  call_id: part.toolCallId,
2251
2252
  name: part.toolName,
@@ -2286,7 +2287,7 @@ async function convertToOpenAIResponsesMessages({
2286
2287
  encrypted_content: providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent,
2287
2288
  summary: summaryParts
2288
2289
  };
2289
- messages.push(reasoningMessages[reasoningId]);
2290
+ input.push(reasoningMessages[reasoningId]);
2290
2291
  } else {
2291
2292
  existingReasoningMessage.summary.push(...summaryParts);
2292
2293
  }
@@ -2317,7 +2318,7 @@ async function convertToOpenAIResponsesMessages({
2317
2318
  contentValue = JSON.stringify(output.value);
2318
2319
  break;
2319
2320
  }
2320
- messages.push({
2321
+ input.push({
2321
2322
  type: "function_call_output",
2322
2323
  call_id: part.toolCallId,
2323
2324
  output: contentValue
@@ -2331,7 +2332,7 @@ async function convertToOpenAIResponsesMessages({
2331
2332
  }
2332
2333
  }
2333
2334
  }
2334
- return { messages, warnings };
2335
+ return { input, warnings };
2335
2336
  }
2336
2337
  var openaiResponsesReasoningProviderOptionsSchema = import_v414.z.object({
2337
2338
  itemId: import_v414.z.string().nullish(),
@@ -2360,8 +2361,20 @@ function mapOpenAIResponseFinishReason({
2360
2361
  var import_provider7 = require("@ai-sdk/provider");
2361
2362
 
2362
2363
  // src/tool/code-interpreter.ts
2363
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
2364
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
2364
2365
  var import_v415 = require("zod/v4");
2366
+ var codeInterpreterInputSchema = import_v415.z.object({
2367
+ code: import_v415.z.string().nullish(),
2368
+ containerId: import_v415.z.string()
2369
+ });
2370
+ var codeInterpreterOutputSchema = import_v415.z.object({
2371
+ outputs: import_v415.z.array(
2372
+ import_v415.z.discriminatedUnion("type", [
2373
+ import_v415.z.object({ type: import_v415.z.literal("logs"), logs: import_v415.z.string() }),
2374
+ import_v415.z.object({ type: import_v415.z.literal("image"), url: import_v415.z.string() })
2375
+ ])
2376
+ ).nullish()
2377
+ });
2365
2378
  var codeInterpreterArgsSchema = import_v415.z.object({
2366
2379
  container: import_v415.z.union([
2367
2380
  import_v415.z.string(),
@@ -2370,14 +2383,15 @@ var codeInterpreterArgsSchema = import_v415.z.object({
2370
2383
  })
2371
2384
  ]).optional()
2372
2385
  });
2373
- var codeInterpreterToolFactory = (0, import_provider_utils13.createProviderDefinedToolFactory)({
2386
+ var codeInterpreterToolFactory = (0, import_provider_utils12.createProviderDefinedToolFactoryWithOutputSchema)({
2374
2387
  id: "openai.code_interpreter",
2375
2388
  name: "code_interpreter",
2376
- inputSchema: import_v415.z.object({})
2389
+ inputSchema: codeInterpreterInputSchema,
2390
+ outputSchema: codeInterpreterOutputSchema
2377
2391
  });
2378
2392
 
2379
2393
  // src/tool/web-search.ts
2380
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
2394
+ var import_provider_utils13 = require("@ai-sdk/provider-utils");
2381
2395
  var import_v416 = require("zod/v4");
2382
2396
  var webSearchArgsSchema = import_v416.z.object({
2383
2397
  filters: import_v416.z.object({
@@ -2392,7 +2406,7 @@ var webSearchArgsSchema = import_v416.z.object({
2392
2406
  timezone: import_v416.z.string().optional()
2393
2407
  }).optional()
2394
2408
  });
2395
- var webSearchToolFactory = (0, import_provider_utils14.createProviderDefinedToolFactory)({
2409
+ var webSearchToolFactory = (0, import_provider_utils13.createProviderDefinedToolFactory)({
2396
2410
  id: "openai.web_search",
2397
2411
  name: "web_search",
2398
2412
  inputSchema: import_v416.z.object({
@@ -2534,6 +2548,18 @@ var webSearchCallItem = import_v417.z.object({
2534
2548
  })
2535
2549
  ]).nullish()
2536
2550
  });
2551
+ var codeInterpreterCallItem = import_v417.z.object({
2552
+ type: import_v417.z.literal("code_interpreter_call"),
2553
+ id: import_v417.z.string(),
2554
+ code: import_v417.z.string().nullable(),
2555
+ container_id: import_v417.z.string(),
2556
+ outputs: import_v417.z.array(
2557
+ import_v417.z.discriminatedUnion("type", [
2558
+ import_v417.z.object({ type: import_v417.z.literal("logs"), logs: import_v417.z.string() }),
2559
+ import_v417.z.object({ type: import_v417.z.literal("image"), url: import_v417.z.string() })
2560
+ ])
2561
+ ).nullable()
2562
+ });
2537
2563
  var TOP_LOGPROBS_MAX = 20;
2538
2564
  var LOGPROBS_SCHEMA = import_v417.z.array(
2539
2565
  import_v417.z.object({
@@ -2575,7 +2601,7 @@ var OpenAIResponsesLanguageModel = class {
2575
2601
  toolChoice,
2576
2602
  responseFormat
2577
2603
  }) {
2578
- var _a, _b, _c;
2604
+ var _a, _b, _c, _d;
2579
2605
  const warnings = [];
2580
2606
  const modelConfig = getResponsesModelConfig(this.modelId);
2581
2607
  if (topK != null) {
@@ -2599,13 +2625,13 @@ var OpenAIResponsesLanguageModel = class {
2599
2625
  if (stopSequences != null) {
2600
2626
  warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
2601
2627
  }
2602
- const { messages, warnings: messageWarnings } = await convertToOpenAIResponsesMessages({
2628
+ const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
2603
2629
  prompt,
2604
2630
  systemMessageMode: modelConfig.systemMessageMode,
2605
2631
  fileIdPrefixes: this.config.fileIdPrefixes
2606
2632
  });
2607
- warnings.push(...messageWarnings);
2608
- const openaiOptions = await (0, import_provider_utils15.parseProviderOptions)({
2633
+ warnings.push(...inputWarnings);
2634
+ const openaiOptions = await (0, import_provider_utils14.parseProviderOptions)({
2609
2635
  provider: "openai",
2610
2636
  providerOptions,
2611
2637
  schema: openaiResponsesProviderOptionsSchema
@@ -2618,9 +2644,13 @@ var OpenAIResponsesLanguageModel = class {
2618
2644
  (tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
2619
2645
  )) == null ? void 0 : _b.name;
2620
2646
  include = webSearchToolName ? Array.isArray(include) ? [...include, "web_search_call.action.sources"] : ["web_search_call.action.sources"] : include;
2647
+ const codeInterpreterToolName = (_c = tools == null ? void 0 : tools.find(
2648
+ (tool) => tool.type === "provider-defined" && tool.id === "openai.code_interpreter"
2649
+ )) == null ? void 0 : _c.name;
2650
+ include = codeInterpreterToolName ? Array.isArray(include) ? [...include, "code_interpreter_call.outputs"] : ["code_interpreter_call.outputs"] : include;
2621
2651
  const baseArgs = {
2622
2652
  model: this.modelId,
2623
- input: messages,
2653
+ input,
2624
2654
  temperature,
2625
2655
  top_p: topP,
2626
2656
  max_output_tokens: maxOutputTokens,
@@ -2630,7 +2660,7 @@ var OpenAIResponsesLanguageModel = class {
2630
2660
  format: responseFormat.schema != null ? {
2631
2661
  type: "json_schema",
2632
2662
  strict: strictJsonSchema,
2633
- name: (_c = responseFormat.name) != null ? _c : "response",
2663
+ name: (_d = responseFormat.name) != null ? _d : "response",
2634
2664
  description: responseFormat.description,
2635
2665
  schema: responseFormat.schema
2636
2666
  } : { type: "json_object" }
@@ -2750,12 +2780,12 @@ var OpenAIResponsesLanguageModel = class {
2750
2780
  responseHeaders,
2751
2781
  value: response,
2752
2782
  rawValue: rawResponse
2753
- } = await (0, import_provider_utils15.postJsonToApi)({
2783
+ } = await (0, import_provider_utils14.postJsonToApi)({
2754
2784
  url,
2755
- headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
2785
+ headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
2756
2786
  body,
2757
2787
  failedResponseHandler: openaiFailedResponseHandler,
2758
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
2788
+ successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
2759
2789
  import_v417.z.object({
2760
2790
  id: import_v417.z.string(),
2761
2791
  created_at: import_v417.z.number(),
@@ -2801,9 +2831,7 @@ var OpenAIResponsesLanguageModel = class {
2801
2831
  })
2802
2832
  )
2803
2833
  }),
2804
- import_v417.z.object({
2805
- type: import_v417.z.literal("code_interpreter_call")
2806
- }),
2834
+ codeInterpreterCallItem,
2807
2835
  import_v417.z.object({
2808
2836
  type: import_v417.z.literal("function_call"),
2809
2837
  call_id: import_v417.z.string(),
@@ -2907,7 +2935,7 @@ var OpenAIResponsesLanguageModel = class {
2907
2935
  content.push({
2908
2936
  type: "source",
2909
2937
  sourceType: "url",
2910
- id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils15.generateId)(),
2938
+ id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils14.generateId)(),
2911
2939
  url: annotation.url,
2912
2940
  title: annotation.title
2913
2941
  });
@@ -2915,7 +2943,7 @@ var OpenAIResponsesLanguageModel = class {
2915
2943
  content.push({
2916
2944
  type: "source",
2917
2945
  sourceType: "document",
2918
- id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils15.generateId)(),
2946
+ id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
2919
2947
  mediaType: "text/plain",
2920
2948
  title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
2921
2949
  filename: (_l = annotation.filename) != null ? _l : annotation.file_id
@@ -2999,6 +3027,28 @@ var OpenAIResponsesLanguageModel = class {
2999
3027
  });
3000
3028
  break;
3001
3029
  }
3030
+ case "code_interpreter_call": {
3031
+ content.push({
3032
+ type: "tool-call",
3033
+ toolCallId: part.id,
3034
+ toolName: "code_interpreter",
3035
+ input: JSON.stringify({
3036
+ code: part.code,
3037
+ containerId: part.container_id
3038
+ }),
3039
+ providerExecuted: true
3040
+ });
3041
+ content.push({
3042
+ type: "tool-result",
3043
+ toolCallId: part.id,
3044
+ toolName: "code_interpreter",
3045
+ result: {
3046
+ outputs: part.outputs
3047
+ },
3048
+ providerExecuted: true
3049
+ });
3050
+ break;
3051
+ }
3002
3052
  }
3003
3053
  }
3004
3054
  const providerMetadata = {
@@ -3041,18 +3091,18 @@ var OpenAIResponsesLanguageModel = class {
3041
3091
  warnings,
3042
3092
  webSearchToolName
3043
3093
  } = await this.getArgs(options);
3044
- const { responseHeaders, value: response } = await (0, import_provider_utils15.postJsonToApi)({
3094
+ const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
3045
3095
  url: this.config.url({
3046
3096
  path: "/responses",
3047
3097
  modelId: this.modelId
3048
3098
  }),
3049
- headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
3099
+ headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
3050
3100
  body: {
3051
3101
  ...body,
3052
3102
  stream: true
3053
3103
  },
3054
3104
  failedResponseHandler: openaiFailedResponseHandler,
3055
- successfulResponseHandler: (0, import_provider_utils15.createEventSourceResponseHandler)(
3105
+ successfulResponseHandler: (0, import_provider_utils14.createEventSourceResponseHandler)(
3056
3106
  openaiResponsesChunkSchema
3057
3107
  ),
3058
3108
  abortSignal: options.abortSignal,
@@ -3242,6 +3292,26 @@ var OpenAIResponsesLanguageModel = class {
3242
3292
  },
3243
3293
  providerExecuted: true
3244
3294
  });
3295
+ } else if (value.item.type === "code_interpreter_call") {
3296
+ controller.enqueue({
3297
+ type: "tool-call",
3298
+ toolCallId: value.item.id,
3299
+ toolName: "code_interpreter",
3300
+ input: JSON.stringify({
3301
+ code: value.item.code,
3302
+ containerId: value.item.container_id
3303
+ }),
3304
+ providerExecuted: true
3305
+ });
3306
+ controller.enqueue({
3307
+ type: "tool-result",
3308
+ toolCallId: value.item.id,
3309
+ toolName: "code_interpreter",
3310
+ result: {
3311
+ outputs: value.item.outputs
3312
+ },
3313
+ providerExecuted: true
3314
+ });
3245
3315
  } else if (value.item.type === "message") {
3246
3316
  controller.enqueue({
3247
3317
  type: "text-end",
@@ -3334,7 +3404,7 @@ var OpenAIResponsesLanguageModel = class {
3334
3404
  controller.enqueue({
3335
3405
  type: "source",
3336
3406
  sourceType: "url",
3337
- id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils15.generateId)(),
3407
+ id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils14.generateId)(),
3338
3408
  url: value.annotation.url,
3339
3409
  title: value.annotation.title
3340
3410
  });
@@ -3342,7 +3412,7 @@ var OpenAIResponsesLanguageModel = class {
3342
3412
  controller.enqueue({
3343
3413
  type: "source",
3344
3414
  sourceType: "document",
3345
- id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils15.generateId)(),
3415
+ id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils14.generateId)(),
3346
3416
  mediaType: "text/plain",
3347
3417
  title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
3348
3418
  filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
@@ -3487,6 +3557,7 @@ var responseOutputItemDoneSchema = import_v417.z.object({
3487
3557
  arguments: import_v417.z.string(),
3488
3558
  status: import_v417.z.literal("completed")
3489
3559
  }),
3560
+ codeInterpreterCallItem,
3490
3561
  webSearchCallItem,
3491
3562
  import_v417.z.object({
3492
3563
  type: import_v417.z.literal("computer_call"),