@ai-sdk/openai 2.0.48 → 2.0.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 2.0.50
4
+
5
+ ### Patch Changes
6
+
7
+ - c336b43: feat(provider/openai): send assistant text and tool call parts as reference ids when store: true
8
+
9
+ ## 2.0.49
10
+
11
+ ### Patch Changes
12
+
13
+ - f4287d0: feat(provider/openai): automatically add reasoning.encrypted_content include when store = false
14
+
3
15
  ## 2.0.48
4
16
 
5
17
  ### Patch Changes
package/dist/index.js CHANGED
@@ -2120,7 +2120,7 @@ async function convertToOpenAIResponsesInput({
2120
2120
  store,
2121
2121
  hasLocalShellTool = false
2122
2122
  }) {
2123
- var _a, _b, _c, _d, _e, _f, _g, _h, _i;
2123
+ var _a, _b, _c, _d;
2124
2124
  const input = [];
2125
2125
  const warnings = [];
2126
2126
  for (const { role, content } of prompt) {
@@ -2201,10 +2201,15 @@ async function convertToOpenAIResponsesInput({
2201
2201
  for (const part of content) {
2202
2202
  switch (part.type) {
2203
2203
  case "text": {
2204
+ const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId;
2205
+ if (store && id != null) {
2206
+ input.push({ type: "item_reference", id });
2207
+ break;
2208
+ }
2204
2209
  input.push({
2205
2210
  role: "assistant",
2206
2211
  content: [{ type: "output_text", text: part.text }],
2207
- id: (_c = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId) != null ? _c : void 0
2212
+ id
2208
2213
  });
2209
2214
  break;
2210
2215
  }
@@ -2213,6 +2218,11 @@ async function convertToOpenAIResponsesInput({
2213
2218
  if (part.providerExecuted) {
2214
2219
  break;
2215
2220
  }
2221
+ const id = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId;
2222
+ if (store && id != null) {
2223
+ input.push({ type: "item_reference", id });
2224
+ break;
2225
+ }
2216
2226
  if (hasLocalShellTool && part.toolName === "local_shell") {
2217
2227
  const parsedInput = await (0, import_provider_utils20.validateTypes)({
2218
2228
  value: part.input,
@@ -2221,7 +2231,7 @@ async function convertToOpenAIResponsesInput({
2221
2231
  input.push({
2222
2232
  type: "local_shell_call",
2223
2233
  call_id: part.toolCallId,
2224
- id: (_f = (_e = (_d = part.providerOptions) == null ? void 0 : _d.openai) == null ? void 0 : _e.itemId) != null ? _f : void 0,
2234
+ id,
2225
2235
  action: {
2226
2236
  type: "exec",
2227
2237
  command: parsedInput.action.command,
@@ -2238,7 +2248,7 @@ async function convertToOpenAIResponsesInput({
2238
2248
  call_id: part.toolCallId,
2239
2249
  name: part.toolName,
2240
2250
  arguments: JSON.stringify(part.input),
2241
- id: (_i = (_h = (_g = part.providerOptions) == null ? void 0 : _g.openai) == null ? void 0 : _h.itemId) != null ? _i : void 0
2251
+ id
2242
2252
  });
2243
2253
  break;
2244
2254
  }
@@ -2882,6 +2892,7 @@ var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValid
2882
2892
  include: import_v417.z.array(
2883
2893
  import_v417.z.enum([
2884
2894
  "reasoning.encrypted_content",
2895
+ // handled internally by default, only needed for unknown reasoning models
2885
2896
  "file_search_call.results",
2886
2897
  "message.output_text.logprobs"
2887
2898
  ])
@@ -3131,7 +3142,11 @@ var OpenAIResponsesLanguageModel = class {
3131
3142
  const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
3132
3143
  let include = openaiOptions == null ? void 0 : openaiOptions.include;
3133
3144
  function addInclude(key) {
3134
- include = include != null ? [...include, key] : [key];
3145
+ if (include == null) {
3146
+ include = [key];
3147
+ } else if (!include.includes(key)) {
3148
+ include = [...include, key];
3149
+ }
3135
3150
  }
3136
3151
  function hasOpenAITool(id) {
3137
3152
  return (tools == null ? void 0 : tools.find(
@@ -3151,6 +3166,10 @@ var OpenAIResponsesLanguageModel = class {
3151
3166
  if (hasOpenAITool("openai.code_interpreter")) {
3152
3167
  addInclude("code_interpreter_call.outputs");
3153
3168
  }
3169
+ const store = openaiOptions == null ? void 0 : openaiOptions.store;
3170
+ if (store === false && modelConfig.isReasoningModel) {
3171
+ addInclude("reasoning.encrypted_content");
3172
+ }
3154
3173
  const baseArgs = {
3155
3174
  model: this.modelId,
3156
3175
  input,
@@ -3178,7 +3197,7 @@ var OpenAIResponsesLanguageModel = class {
3178
3197
  metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
3179
3198
  parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
3180
3199
  previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
3181
- store: openaiOptions == null ? void 0 : openaiOptions.store,
3200
+ store,
3182
3201
  user: openaiOptions == null ? void 0 : openaiOptions.user,
3183
3202
  instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
3184
3203
  service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
@@ -4405,7 +4424,7 @@ var OpenAITranscriptionModel = class {
4405
4424
  };
4406
4425
 
4407
4426
  // src/version.ts
4408
- var VERSION = true ? "2.0.48" : "0.0.0-test";
4427
+ var VERSION = true ? "2.0.50" : "0.0.0-test";
4409
4428
 
4410
4429
  // src/openai-provider.ts
4411
4430
  function createOpenAI(options = {}) {