@ai-sdk/openai 3.0.6 → 3.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1758,11 +1758,16 @@ var modelMaxImagesPerCall = {
1758
1758
  "gpt-image-1-mini": 10,
1759
1759
  "gpt-image-1.5": 10
1760
1760
  };
1761
- var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
1762
- "gpt-image-1",
1761
+ var defaultResponseFormatPrefixes = [
1763
1762
  "gpt-image-1-mini",
1764
- "gpt-image-1.5"
1765
- ]);
1763
+ "gpt-image-1.5",
1764
+ "gpt-image-1"
1765
+ ];
1766
+ function hasDefaultResponseFormat(modelId) {
1767
+ return defaultResponseFormatPrefixes.some(
1768
+ (prefix) => modelId.startsWith(prefix)
1769
+ );
1770
+ }
1766
1771
 
1767
1772
  // src/image/openai-image-model.ts
1768
1773
  var OpenAIImageModel = class {
@@ -1881,7 +1886,7 @@ var OpenAIImageModel = class {
1881
1886
  n,
1882
1887
  size,
1883
1888
  ...(_h = providerOptions.openai) != null ? _h : {},
1884
- ...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
1889
+ ...!hasDefaultResponseFormat(this.modelId) ? { response_format: "b64_json" } : {}
1885
1890
  },
1886
1891
  failedResponseHandler: openaiFailedResponseHandler,
1887
1892
  successfulResponseHandler: createJsonResponseHandler4(
@@ -3243,25 +3248,21 @@ var openaiResponsesChunkSchema = lazySchema14(
3243
3248
  z16.object({
3244
3249
  type: z16.literal("file_citation"),
3245
3250
  file_id: z16.string(),
3246
- filename: z16.string().nullish(),
3247
- index: z16.number().nullish(),
3248
- start_index: z16.number().nullish(),
3249
- end_index: z16.number().nullish(),
3250
- quote: z16.string().nullish()
3251
+ filename: z16.string(),
3252
+ index: z16.number()
3251
3253
  }),
3252
3254
  z16.object({
3253
3255
  type: z16.literal("container_file_citation"),
3254
3256
  container_id: z16.string(),
3255
3257
  file_id: z16.string(),
3256
- filename: z16.string().nullish(),
3257
- start_index: z16.number().nullish(),
3258
- end_index: z16.number().nullish(),
3259
- index: z16.number().nullish()
3258
+ filename: z16.string(),
3259
+ start_index: z16.number(),
3260
+ end_index: z16.number()
3260
3261
  }),
3261
3262
  z16.object({
3262
3263
  type: z16.literal("file_path"),
3263
3264
  file_id: z16.string(),
3264
- index: z16.number().nullish()
3265
+ index: z16.number()
3265
3266
  })
3266
3267
  ])
3267
3268
  }),
@@ -3358,25 +3359,21 @@ var openaiResponsesResponseSchema = lazySchema14(
3358
3359
  z16.object({
3359
3360
  type: z16.literal("file_citation"),
3360
3361
  file_id: z16.string(),
3361
- filename: z16.string().nullish(),
3362
- index: z16.number().nullish(),
3363
- start_index: z16.number().nullish(),
3364
- end_index: z16.number().nullish(),
3365
- quote: z16.string().nullish()
3362
+ filename: z16.string(),
3363
+ index: z16.number()
3366
3364
  }),
3367
3365
  z16.object({
3368
3366
  type: z16.literal("container_file_citation"),
3369
3367
  container_id: z16.string(),
3370
3368
  file_id: z16.string(),
3371
- filename: z16.string().nullish(),
3372
- start_index: z16.number().nullish(),
3373
- end_index: z16.number().nullish(),
3374
- index: z16.number().nullish()
3369
+ filename: z16.string(),
3370
+ start_index: z16.number(),
3371
+ end_index: z16.number()
3375
3372
  }),
3376
3373
  z16.object({
3377
3374
  type: z16.literal("file_path"),
3378
3375
  file_id: z16.string(),
3379
- index: z16.number().nullish()
3376
+ index: z16.number()
3380
3377
  })
3381
3378
  ])
3382
3379
  )
@@ -4560,7 +4557,7 @@ var OpenAIResponsesLanguageModel = class {
4560
4557
  };
4561
4558
  }
4562
4559
  async doGenerate(options) {
4563
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
4560
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y;
4564
4561
  const {
4565
4562
  args: body,
4566
4563
  warnings,
@@ -4707,29 +4704,29 @@ var OpenAIResponsesLanguageModel = class {
4707
4704
  sourceType: "document",
4708
4705
  id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId2(),
4709
4706
  mediaType: "text/plain",
4710
- title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
4711
- filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4712
- ...annotation.file_id ? {
4713
- providerMetadata: {
4714
- [providerOptionsName]: {
4715
- fileId: annotation.file_id
4716
- }
4707
+ title: annotation.filename,
4708
+ filename: annotation.filename,
4709
+ providerMetadata: {
4710
+ [providerOptionsName]: {
4711
+ type: annotation.type,
4712
+ fileId: annotation.file_id,
4713
+ index: annotation.index
4717
4714
  }
4718
- } : {}
4715
+ }
4719
4716
  });
4720
4717
  } else if (annotation.type === "container_file_citation") {
4721
4718
  content.push({
4722
4719
  type: "source",
4723
4720
  sourceType: "document",
4724
- id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
4721
+ id: (_l = (_k = (_j = this.config).generateId) == null ? void 0 : _k.call(_j)) != null ? _l : generateId2(),
4725
4722
  mediaType: "text/plain",
4726
- title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4727
- filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4723
+ title: annotation.filename,
4724
+ filename: annotation.filename,
4728
4725
  providerMetadata: {
4729
4726
  [providerOptionsName]: {
4727
+ type: annotation.type,
4730
4728
  fileId: annotation.file_id,
4731
- containerId: annotation.container_id,
4732
- ...annotation.index != null ? { index: annotation.index } : {}
4729
+ containerId: annotation.container_id
4733
4730
  }
4734
4731
  }
4735
4732
  });
@@ -4737,14 +4734,15 @@ var OpenAIResponsesLanguageModel = class {
4737
4734
  content.push({
4738
4735
  type: "source",
4739
4736
  sourceType: "document",
4740
- id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : generateId2(),
4737
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
4741
4738
  mediaType: "application/octet-stream",
4742
4739
  title: annotation.file_id,
4743
4740
  filename: annotation.file_id,
4744
4741
  providerMetadata: {
4745
4742
  [providerOptionsName]: {
4743
+ type: annotation.type,
4746
4744
  fileId: annotation.file_id,
4747
- ...annotation.index != null ? { index: annotation.index } : {}
4745
+ index: annotation.index
4748
4746
  }
4749
4747
  }
4750
4748
  });
@@ -4789,7 +4787,7 @@ var OpenAIResponsesLanguageModel = class {
4789
4787
  break;
4790
4788
  }
4791
4789
  case "mcp_call": {
4792
- const toolCallId = part.approval_request_id != null ? (_v = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _v : part.id : part.id;
4790
+ const toolCallId = part.approval_request_id != null ? (_p = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _p : part.id : part.id;
4793
4791
  const toolName = `mcp.${part.name}`;
4794
4792
  content.push({
4795
4793
  type: "tool-call",
@@ -4823,8 +4821,8 @@ var OpenAIResponsesLanguageModel = class {
4823
4821
  break;
4824
4822
  }
4825
4823
  case "mcp_approval_request": {
4826
- const approvalRequestId = (_w = part.approval_request_id) != null ? _w : part.id;
4827
- const dummyToolCallId = (_z = (_y = (_x = this.config).generateId) == null ? void 0 : _y.call(_x)) != null ? _z : generateId2();
4824
+ const approvalRequestId = (_q = part.approval_request_id) != null ? _q : part.id;
4825
+ const dummyToolCallId = (_t = (_s = (_r = this.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : generateId2();
4828
4826
  const toolName = `mcp.${part.name}`;
4829
4827
  content.push({
4830
4828
  type: "tool-call",
@@ -4874,13 +4872,13 @@ var OpenAIResponsesLanguageModel = class {
4874
4872
  toolName: toolNameMapping.toCustomToolName("file_search"),
4875
4873
  result: {
4876
4874
  queries: part.queries,
4877
- results: (_B = (_A = part.results) == null ? void 0 : _A.map((result) => ({
4875
+ results: (_v = (_u = part.results) == null ? void 0 : _u.map((result) => ({
4878
4876
  attributes: result.attributes,
4879
4877
  fileId: result.file_id,
4880
4878
  filename: result.filename,
4881
4879
  score: result.score,
4882
4880
  text: result.text
4883
- }))) != null ? _B : null
4881
+ }))) != null ? _v : null
4884
4882
  }
4885
4883
  });
4886
4884
  break;
@@ -4939,10 +4937,10 @@ var OpenAIResponsesLanguageModel = class {
4939
4937
  content,
4940
4938
  finishReason: {
4941
4939
  unified: mapOpenAIResponseFinishReason({
4942
- finishReason: (_C = response.incomplete_details) == null ? void 0 : _C.reason,
4940
+ finishReason: (_w = response.incomplete_details) == null ? void 0 : _w.reason,
4943
4941
  hasFunctionCall
4944
4942
  }),
4945
- raw: (_E = (_D = response.incomplete_details) == null ? void 0 : _D.reason) != null ? _E : void 0
4943
+ raw: (_y = (_x = response.incomplete_details) == null ? void 0 : _x.reason) != null ? _y : void 0
4946
4944
  },
4947
4945
  usage: convertOpenAIResponsesUsage(usage),
4948
4946
  request: { body },
@@ -5005,7 +5003,7 @@ var OpenAIResponsesLanguageModel = class {
5005
5003
  controller.enqueue({ type: "stream-start", warnings });
5006
5004
  },
5007
5005
  transform(chunk, controller) {
5008
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H, _I, _J;
5006
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D;
5009
5007
  if (options.includeRawChunks) {
5010
5008
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
5011
5009
  }
@@ -5602,29 +5600,29 @@ var OpenAIResponsesLanguageModel = class {
5602
5600
  sourceType: "document",
5603
5601
  id: (_x = (_w = (_v = self.config).generateId) == null ? void 0 : _w.call(_v)) != null ? _x : generateId2(),
5604
5602
  mediaType: "text/plain",
5605
- title: (_z = (_y = value.annotation.quote) != null ? _y : value.annotation.filename) != null ? _z : "Document",
5606
- filename: (_A = value.annotation.filename) != null ? _A : value.annotation.file_id,
5607
- ...value.annotation.file_id ? {
5608
- providerMetadata: {
5609
- [providerOptionsName]: {
5610
- fileId: value.annotation.file_id
5611
- }
5603
+ title: value.annotation.filename,
5604
+ filename: value.annotation.filename,
5605
+ providerMetadata: {
5606
+ [providerOptionsName]: {
5607
+ type: value.annotation.type,
5608
+ fileId: value.annotation.file_id,
5609
+ index: value.annotation.index
5612
5610
  }
5613
- } : {}
5611
+ }
5614
5612
  });
5615
5613
  } else if (value.annotation.type === "container_file_citation") {
5616
5614
  controller.enqueue({
5617
5615
  type: "source",
5618
5616
  sourceType: "document",
5619
- id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : generateId2(),
5617
+ id: (_A = (_z = (_y = self.config).generateId) == null ? void 0 : _z.call(_y)) != null ? _A : generateId2(),
5620
5618
  mediaType: "text/plain",
5621
- title: (_F = (_E = value.annotation.filename) != null ? _E : value.annotation.file_id) != null ? _F : "Document",
5622
- filename: (_G = value.annotation.filename) != null ? _G : value.annotation.file_id,
5619
+ title: value.annotation.filename,
5620
+ filename: value.annotation.filename,
5623
5621
  providerMetadata: {
5624
5622
  [providerOptionsName]: {
5623
+ type: value.annotation.type,
5625
5624
  fileId: value.annotation.file_id,
5626
- containerId: value.annotation.container_id,
5627
- ...value.annotation.index != null ? { index: value.annotation.index } : {}
5625
+ containerId: value.annotation.container_id
5628
5626
  }
5629
5627
  }
5630
5628
  });
@@ -5632,14 +5630,15 @@ var OpenAIResponsesLanguageModel = class {
5632
5630
  controller.enqueue({
5633
5631
  type: "source",
5634
5632
  sourceType: "document",
5635
- id: (_J = (_I = (_H = self.config).generateId) == null ? void 0 : _I.call(_H)) != null ? _J : generateId2(),
5633
+ id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : generateId2(),
5636
5634
  mediaType: "application/octet-stream",
5637
5635
  title: value.annotation.file_id,
5638
5636
  filename: value.annotation.file_id,
5639
5637
  providerMetadata: {
5640
5638
  [providerOptionsName]: {
5639
+ type: value.annotation.type,
5641
5640
  fileId: value.annotation.file_id,
5642
- ...value.annotation.index != null ? { index: value.annotation.index } : {}
5641
+ index: value.annotation.index
5643
5642
  }
5644
5643
  }
5645
5644
  });