@ai-sdk/openai 3.0.0-beta.25 → 3.0.0-beta.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -2179,7 +2179,7 @@ async function convertToOpenAIResponsesInput({
2179
2179
  store,
2180
2180
  hasLocalShellTool = false
2181
2181
  }) {
2182
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2182
+ var _a, _b, _c, _d, _e;
2183
2183
  const input = [];
2184
2184
  const warnings = [];
2185
2185
  for (const { role, content } of prompt) {
@@ -2260,10 +2260,15 @@ async function convertToOpenAIResponsesInput({
2260
2260
  for (const part of content) {
2261
2261
  switch (part.type) {
2262
2262
  case "text": {
2263
+ const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId;
2264
+ if (store && id != null) {
2265
+ input.push({ type: "item_reference", id });
2266
+ break;
2267
+ }
2263
2268
  input.push({
2264
2269
  role: "assistant",
2265
2270
  content: [{ type: "output_text", text: part.text }],
2266
- id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
2271
+ id
2267
2272
  });
2268
2273
  break;
2269
2274
  }
@@ -2272,6 +2277,11 @@ async function convertToOpenAIResponsesInput({
2272
2277
  if (part.providerExecuted) {
2273
2278
  break;
2274
2279
  }
2280
+ const id = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId;
2281
+ if (store && id != null) {
2282
+ input.push({ type: "item_reference", id });
2283
+ break;
2284
+ }
2275
2285
  if (hasLocalShellTool && part.toolName === "local_shell") {
2276
2286
  const parsedInput = await validateTypes({
2277
2287
  value: part.input,
@@ -2280,7 +2290,7 @@ async function convertToOpenAIResponsesInput({
2280
2290
  input.push({
2281
2291
  type: "local_shell_call",
2282
2292
  call_id: part.toolCallId,
2283
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2293
+ id,
2284
2294
  action: {
2285
2295
  type: "exec",
2286
2296
  command: parsedInput.action.command,
@@ -2297,7 +2307,7 @@ async function convertToOpenAIResponsesInput({
2297
2307
  call_id: part.toolCallId,
2298
2308
  name: part.toolName,
2299
2309
  arguments: JSON.stringify(part.input),
2300
- id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2310
+ id
2301
2311
  });
2302
2312
  break;
2303
2313
  }
@@ -2390,7 +2400,7 @@ async function convertToOpenAIResponsesInput({
2390
2400
  contentValue = output.value;
2391
2401
  break;
2392
2402
  case "execution-denied":
2393
- contentValue = (_j = output.reason) != null ? _j : "Tool execution denied.";
2403
+ contentValue = (_e = output.reason) != null ? _e : "Tool execution denied.";
2394
2404
  break;
2395
2405
  case "json":
2396
2406
  case "error-json":
@@ -2944,6 +2954,7 @@ var openaiResponsesProviderOptionsSchema = lazySchema15(
2944
2954
  include: z17.array(
2945
2955
  z17.enum([
2946
2956
  "reasoning.encrypted_content",
2957
+ // handled internally by default, only needed for unknown reasoning models
2947
2958
  "file_search_call.results",
2948
2959
  "message.output_text.logprobs"
2949
2960
  ])
@@ -3196,7 +3207,11 @@ var OpenAIResponsesLanguageModel = class {
3196
3207
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
3197
3208
  let include = openaiOptions == null ? void 0 : openaiOptions.include;
3198
3209
  function addInclude(key) {
3199
- include = include != null ? [...include, key] : [key];
3210
+ if (include == null) {
3211
+ include = [key];
3212
+ } else if (!include.includes(key)) {
3213
+ include = [...include, key];
3214
+ }
3200
3215
  }
3201
3216
  function hasOpenAITool(id) {
3202
3217
  return (tools == null ? void 0 : tools.find(
@@ -3216,6 +3231,10 @@ var OpenAIResponsesLanguageModel = class {
3216
3231
  if (hasOpenAITool("openai.code_interpreter")) {
3217
3232
  addInclude("code_interpreter_call.outputs");
3218
3233
  }
3234
+ const store = openaiOptions == null ? void 0 : openaiOptions.store;
3235
+ if (store === false && modelConfig.isReasoningModel) {
3236
+ addInclude("reasoning.encrypted_content");
3237
+ }
3219
3238
  const baseArgs = {
3220
3239
  model: this.modelId,
3221
3240
  input,
@@ -3243,7 +3262,7 @@ var OpenAIResponsesLanguageModel = class {
3243
3262
  metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
3244
3263
  parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
3245
3264
  previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
3246
- store: openaiOptions == null ? void 0 : openaiOptions.store,
3265
+ store,
3247
3266
  user: openaiOptions == null ? void 0 : openaiOptions.user,
3248
3267
  instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
3249
3268
  service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
@@ -4496,7 +4515,7 @@ var OpenAITranscriptionModel = class {
4496
4515
  };
4497
4516
 
4498
4517
  // src/version.ts
4499
- var VERSION = true ? "3.0.0-beta.25" : "0.0.0-test";
4518
+ var VERSION = true ? "3.0.0-beta.27" : "0.0.0-test";
4500
4519
 
4501
4520
  // src/openai-provider.ts
4502
4521
  function createOpenAI(options = {}) {