@ai-sdk/openai 3.0.0-beta.25 → 3.0.0-beta.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2175,7 +2175,7 @@ async function convertToOpenAIResponsesInput({
2175
2175
  store,
2176
2176
  hasLocalShellTool = false
2177
2177
  }) {
2178
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2178
+ var _a, _b, _c, _d, _e;
2179
2179
  const input = [];
2180
2180
  const warnings = [];
2181
2181
  for (const { role, content } of prompt) {
@@ -2256,10 +2256,15 @@ async function convertToOpenAIResponsesInput({
2256
2256
  for (const part of content) {
2257
2257
  switch (part.type) {
2258
2258
  case "text": {
2259
+ const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId;
2260
+ if (store && id != null) {
2261
+ input.push({ type: "item_reference", id });
2262
+ break;
2263
+ }
2259
2264
  input.push({
2260
2265
  role: "assistant",
2261
2266
  content: [{ type: "output_text", text: part.text }],
2262
- id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
2267
+ id
2263
2268
  });
2264
2269
  break;
2265
2270
  }
@@ -2268,6 +2273,11 @@ async function convertToOpenAIResponsesInput({
2268
2273
  if (part.providerExecuted) {
2269
2274
  break;
2270
2275
  }
2276
+ const id = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId;
2277
+ if (store && id != null) {
2278
+ input.push({ type: "item_reference", id });
2279
+ break;
2280
+ }
2271
2281
  if (hasLocalShellTool && part.toolName === "local_shell") {
2272
2282
  const parsedInput = await (0, import_provider_utils20.validateTypes)({
2273
2283
  value: part.input,
@@ -2276,7 +2286,7 @@ async function convertToOpenAIResponsesInput({
2276
2286
  input.push({
2277
2287
  type: "local_shell_call",
2278
2288
  call_id: part.toolCallId,
2279
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2289
+ id,
2280
2290
  action: {
2281
2291
  type: "exec",
2282
2292
  command: parsedInput.action.command,
@@ -2293,7 +2303,7 @@ async function convertToOpenAIResponsesInput({
2293
2303
  call_id: part.toolCallId,
2294
2304
  name: part.toolName,
2295
2305
  arguments: JSON.stringify(part.input),
2296
- id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2306
+ id
2297
2307
  });
2298
2308
  break;
2299
2309
  }
@@ -2386,7 +2396,7 @@ async function convertToOpenAIResponsesInput({
2386
2396
  contentValue = output.value;
2387
2397
  break;
2388
2398
  case "execution-denied":
2389
- contentValue = (_j = output.reason) != null ? _j : "Tool execution denied.";
2399
+ contentValue = (_e = output.reason) != null ? _e : "Tool execution denied.";
2390
2400
  break;
2391
2401
  case "json":
2392
2402
  case "error-json":
@@ -2940,6 +2950,7 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazySchem
2940
2950
  include: import_v415.z.array(
2941
2951
  import_v415.z.enum([
2942
2952
  "reasoning.encrypted_content",
2953
+ // handled internally by default, only needed for unknown reasoning models
2943
2954
  "file_search_call.results",
2944
2955
  "message.output_text.logprobs"
2945
2956
  ])
@@ -3438,7 +3449,11 @@ var OpenAIResponsesLanguageModel = class {
3438
3449
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
3439
3450
  let include = openaiOptions == null ? void 0 : openaiOptions.include;
3440
3451
  function addInclude(key) {
3441
- include = include != null ? [...include, key] : [key];
3452
+ if (include == null) {
3453
+ include = [key];
3454
+ } else if (!include.includes(key)) {
3455
+ include = [...include, key];
3456
+ }
3442
3457
  }
3443
3458
  function hasOpenAITool(id) {
3444
3459
  return (tools == null ? void 0 : tools.find(
@@ -3458,6 +3473,10 @@ var OpenAIResponsesLanguageModel = class {
3458
3473
  if (hasOpenAITool("openai.code_interpreter")) {
3459
3474
  addInclude("code_interpreter_call.outputs");
3460
3475
  }
3476
+ const store = openaiOptions == null ? void 0 : openaiOptions.store;
3477
+ if (store === false && modelConfig.isReasoningModel) {
3478
+ addInclude("reasoning.encrypted_content");
3479
+ }
3461
3480
  const baseArgs = {
3462
3481
  model: this.modelId,
3463
3482
  input,
@@ -3485,7 +3504,7 @@ var OpenAIResponsesLanguageModel = class {
3485
3504
  metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
3486
3505
  parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
3487
3506
  previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
3488
- store: openaiOptions == null ? void 0 : openaiOptions.store,
3507
+ store,
3489
3508
  user: openaiOptions == null ? void 0 : openaiOptions.user,
3490
3509
  instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
3491
3510
  service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,