@ai-sdk/openai 3.0.6 → 3.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1766,11 +1766,16 @@ var modelMaxImagesPerCall = {
1766
1766
  "gpt-image-1-mini": 10,
1767
1767
  "gpt-image-1.5": 10
1768
1768
  };
1769
- var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
1770
- "gpt-image-1",
1769
+ var defaultResponseFormatPrefixes = [
1771
1770
  "gpt-image-1-mini",
1772
- "gpt-image-1.5"
1773
- ]);
1771
+ "gpt-image-1.5",
1772
+ "gpt-image-1"
1773
+ ];
1774
+ function hasDefaultResponseFormat(modelId) {
1775
+ return defaultResponseFormatPrefixes.some(
1776
+ (prefix) => modelId.startsWith(prefix)
1777
+ );
1778
+ }
1774
1779
 
1775
1780
  // src/image/openai-image-model.ts
1776
1781
  var OpenAIImageModel = class {
@@ -1889,7 +1894,7 @@ var OpenAIImageModel = class {
1889
1894
  n,
1890
1895
  size,
1891
1896
  ...(_h = providerOptions.openai) != null ? _h : {},
1892
- ...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
1897
+ ...!hasDefaultResponseFormat(this.modelId) ? { response_format: "b64_json" } : {}
1893
1898
  },
1894
1899
  failedResponseHandler: openaiFailedResponseHandler,
1895
1900
  successfulResponseHandler: createJsonResponseHandler4(
@@ -3299,25 +3304,21 @@ var openaiResponsesChunkSchema = lazySchema17(
3299
3304
  z19.object({
3300
3305
  type: z19.literal("file_citation"),
3301
3306
  file_id: z19.string(),
3302
- filename: z19.string().nullish(),
3303
- index: z19.number().nullish(),
3304
- start_index: z19.number().nullish(),
3305
- end_index: z19.number().nullish(),
3306
- quote: z19.string().nullish()
3307
+ filename: z19.string(),
3308
+ index: z19.number()
3307
3309
  }),
3308
3310
  z19.object({
3309
3311
  type: z19.literal("container_file_citation"),
3310
3312
  container_id: z19.string(),
3311
3313
  file_id: z19.string(),
3312
- filename: z19.string().nullish(),
3313
- start_index: z19.number().nullish(),
3314
- end_index: z19.number().nullish(),
3315
- index: z19.number().nullish()
3314
+ filename: z19.string(),
3315
+ start_index: z19.number(),
3316
+ end_index: z19.number()
3316
3317
  }),
3317
3318
  z19.object({
3318
3319
  type: z19.literal("file_path"),
3319
3320
  file_id: z19.string(),
3320
- index: z19.number().nullish()
3321
+ index: z19.number()
3321
3322
  })
3322
3323
  ])
3323
3324
  }),
@@ -3414,25 +3415,21 @@ var openaiResponsesResponseSchema = lazySchema17(
3414
3415
  z19.object({
3415
3416
  type: z19.literal("file_citation"),
3416
3417
  file_id: z19.string(),
3417
- filename: z19.string().nullish(),
3418
- index: z19.number().nullish(),
3419
- start_index: z19.number().nullish(),
3420
- end_index: z19.number().nullish(),
3421
- quote: z19.string().nullish()
3418
+ filename: z19.string(),
3419
+ index: z19.number()
3422
3420
  }),
3423
3421
  z19.object({
3424
3422
  type: z19.literal("container_file_citation"),
3425
3423
  container_id: z19.string(),
3426
3424
  file_id: z19.string(),
3427
- filename: z19.string().nullish(),
3428
- start_index: z19.number().nullish(),
3429
- end_index: z19.number().nullish(),
3430
- index: z19.number().nullish()
3425
+ filename: z19.string(),
3426
+ start_index: z19.number(),
3427
+ end_index: z19.number()
3431
3428
  }),
3432
3429
  z19.object({
3433
3430
  type: z19.literal("file_path"),
3434
3431
  file_id: z19.string(),
3435
- index: z19.number().nullish()
3432
+ index: z19.number()
3436
3433
  })
3437
3434
  ])
3438
3435
  )
@@ -4300,7 +4297,7 @@ var OpenAIResponsesLanguageModel = class {
4300
4297
  };
4301
4298
  }
4302
4299
  async doGenerate(options) {
4303
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
4300
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y;
4304
4301
  const {
4305
4302
  args: body,
4306
4303
  warnings,
@@ -4447,29 +4444,29 @@ var OpenAIResponsesLanguageModel = class {
4447
4444
  sourceType: "document",
4448
4445
  id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId2(),
4449
4446
  mediaType: "text/plain",
4450
- title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
4451
- filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4452
- ...annotation.file_id ? {
4453
- providerMetadata: {
4454
- [providerOptionsName]: {
4455
- fileId: annotation.file_id
4456
- }
4447
+ title: annotation.filename,
4448
+ filename: annotation.filename,
4449
+ providerMetadata: {
4450
+ [providerOptionsName]: {
4451
+ type: annotation.type,
4452
+ fileId: annotation.file_id,
4453
+ index: annotation.index
4457
4454
  }
4458
- } : {}
4455
+ }
4459
4456
  });
4460
4457
  } else if (annotation.type === "container_file_citation") {
4461
4458
  content.push({
4462
4459
  type: "source",
4463
4460
  sourceType: "document",
4464
- id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
4461
+ id: (_l = (_k = (_j = this.config).generateId) == null ? void 0 : _k.call(_j)) != null ? _l : generateId2(),
4465
4462
  mediaType: "text/plain",
4466
- title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4467
- filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4463
+ title: annotation.filename,
4464
+ filename: annotation.filename,
4468
4465
  providerMetadata: {
4469
4466
  [providerOptionsName]: {
4467
+ type: annotation.type,
4470
4468
  fileId: annotation.file_id,
4471
- containerId: annotation.container_id,
4472
- ...annotation.index != null ? { index: annotation.index } : {}
4469
+ containerId: annotation.container_id
4473
4470
  }
4474
4471
  }
4475
4472
  });
@@ -4477,14 +4474,15 @@ var OpenAIResponsesLanguageModel = class {
4477
4474
  content.push({
4478
4475
  type: "source",
4479
4476
  sourceType: "document",
4480
- id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : generateId2(),
4477
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
4481
4478
  mediaType: "application/octet-stream",
4482
4479
  title: annotation.file_id,
4483
4480
  filename: annotation.file_id,
4484
4481
  providerMetadata: {
4485
4482
  [providerOptionsName]: {
4483
+ type: annotation.type,
4486
4484
  fileId: annotation.file_id,
4487
- ...annotation.index != null ? { index: annotation.index } : {}
4485
+ index: annotation.index
4488
4486
  }
4489
4487
  }
4490
4488
  });
@@ -4529,7 +4527,7 @@ var OpenAIResponsesLanguageModel = class {
4529
4527
  break;
4530
4528
  }
4531
4529
  case "mcp_call": {
4532
- const toolCallId = part.approval_request_id != null ? (_v = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _v : part.id : part.id;
4530
+ const toolCallId = part.approval_request_id != null ? (_p = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _p : part.id : part.id;
4533
4531
  const toolName = `mcp.${part.name}`;
4534
4532
  content.push({
4535
4533
  type: "tool-call",
@@ -4563,8 +4561,8 @@ var OpenAIResponsesLanguageModel = class {
4563
4561
  break;
4564
4562
  }
4565
4563
  case "mcp_approval_request": {
4566
- const approvalRequestId = (_w = part.approval_request_id) != null ? _w : part.id;
4567
- const dummyToolCallId = (_z = (_y = (_x = this.config).generateId) == null ? void 0 : _y.call(_x)) != null ? _z : generateId2();
4564
+ const approvalRequestId = (_q = part.approval_request_id) != null ? _q : part.id;
4565
+ const dummyToolCallId = (_t = (_s = (_r = this.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : generateId2();
4568
4566
  const toolName = `mcp.${part.name}`;
4569
4567
  content.push({
4570
4568
  type: "tool-call",
@@ -4614,13 +4612,13 @@ var OpenAIResponsesLanguageModel = class {
4614
4612
  toolName: toolNameMapping.toCustomToolName("file_search"),
4615
4613
  result: {
4616
4614
  queries: part.queries,
4617
- results: (_B = (_A = part.results) == null ? void 0 : _A.map((result) => ({
4615
+ results: (_v = (_u = part.results) == null ? void 0 : _u.map((result) => ({
4618
4616
  attributes: result.attributes,
4619
4617
  fileId: result.file_id,
4620
4618
  filename: result.filename,
4621
4619
  score: result.score,
4622
4620
  text: result.text
4623
- }))) != null ? _B : null
4621
+ }))) != null ? _v : null
4624
4622
  }
4625
4623
  });
4626
4624
  break;
@@ -4679,10 +4677,10 @@ var OpenAIResponsesLanguageModel = class {
4679
4677
  content,
4680
4678
  finishReason: {
4681
4679
  unified: mapOpenAIResponseFinishReason({
4682
- finishReason: (_C = response.incomplete_details) == null ? void 0 : _C.reason,
4680
+ finishReason: (_w = response.incomplete_details) == null ? void 0 : _w.reason,
4683
4681
  hasFunctionCall
4684
4682
  }),
4685
- raw: (_E = (_D = response.incomplete_details) == null ? void 0 : _D.reason) != null ? _E : void 0
4683
+ raw: (_y = (_x = response.incomplete_details) == null ? void 0 : _x.reason) != null ? _y : void 0
4686
4684
  },
4687
4685
  usage: convertOpenAIResponsesUsage(usage),
4688
4686
  request: { body },
@@ -4745,7 +4743,7 @@ var OpenAIResponsesLanguageModel = class {
4745
4743
  controller.enqueue({ type: "stream-start", warnings });
4746
4744
  },
4747
4745
  transform(chunk, controller) {
4748
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H, _I, _J;
4746
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D;
4749
4747
  if (options.includeRawChunks) {
4750
4748
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
4751
4749
  }
@@ -5342,29 +5340,29 @@ var OpenAIResponsesLanguageModel = class {
5342
5340
  sourceType: "document",
5343
5341
  id: (_x = (_w = (_v = self.config).generateId) == null ? void 0 : _w.call(_v)) != null ? _x : generateId2(),
5344
5342
  mediaType: "text/plain",
5345
- title: (_z = (_y = value.annotation.quote) != null ? _y : value.annotation.filename) != null ? _z : "Document",
5346
- filename: (_A = value.annotation.filename) != null ? _A : value.annotation.file_id,
5347
- ...value.annotation.file_id ? {
5348
- providerMetadata: {
5349
- [providerOptionsName]: {
5350
- fileId: value.annotation.file_id
5351
- }
5343
+ title: value.annotation.filename,
5344
+ filename: value.annotation.filename,
5345
+ providerMetadata: {
5346
+ [providerOptionsName]: {
5347
+ type: value.annotation.type,
5348
+ fileId: value.annotation.file_id,
5349
+ index: value.annotation.index
5352
5350
  }
5353
- } : {}
5351
+ }
5354
5352
  });
5355
5353
  } else if (value.annotation.type === "container_file_citation") {
5356
5354
  controller.enqueue({
5357
5355
  type: "source",
5358
5356
  sourceType: "document",
5359
- id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : generateId2(),
5357
+ id: (_A = (_z = (_y = self.config).generateId) == null ? void 0 : _z.call(_y)) != null ? _A : generateId2(),
5360
5358
  mediaType: "text/plain",
5361
- title: (_F = (_E = value.annotation.filename) != null ? _E : value.annotation.file_id) != null ? _F : "Document",
5362
- filename: (_G = value.annotation.filename) != null ? _G : value.annotation.file_id,
5359
+ title: value.annotation.filename,
5360
+ filename: value.annotation.filename,
5363
5361
  providerMetadata: {
5364
5362
  [providerOptionsName]: {
5363
+ type: value.annotation.type,
5365
5364
  fileId: value.annotation.file_id,
5366
- containerId: value.annotation.container_id,
5367
- ...value.annotation.index != null ? { index: value.annotation.index } : {}
5365
+ containerId: value.annotation.container_id
5368
5366
  }
5369
5367
  }
5370
5368
  });
@@ -5372,14 +5370,15 @@ var OpenAIResponsesLanguageModel = class {
5372
5370
  controller.enqueue({
5373
5371
  type: "source",
5374
5372
  sourceType: "document",
5375
- id: (_J = (_I = (_H = self.config).generateId) == null ? void 0 : _I.call(_H)) != null ? _J : generateId2(),
5373
+ id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : generateId2(),
5376
5374
  mediaType: "application/octet-stream",
5377
5375
  title: value.annotation.file_id,
5378
5376
  filename: value.annotation.file_id,
5379
5377
  providerMetadata: {
5380
5378
  [providerOptionsName]: {
5379
+ type: value.annotation.type,
5381
5380
  fileId: value.annotation.file_id,
5382
- ...value.annotation.index != null ? { index: value.annotation.index } : {}
5381
+ index: value.annotation.index
5383
5382
  }
5384
5383
  }
5385
5384
  });
@@ -5843,7 +5842,7 @@ var OpenAITranscriptionModel = class {
5843
5842
  };
5844
5843
 
5845
5844
  // src/version.ts
5846
- var VERSION = true ? "3.0.6" : "0.0.0-test";
5845
+ var VERSION = true ? "3.0.8" : "0.0.0-test";
5847
5846
 
5848
5847
  // src/openai-provider.ts
5849
5848
  function createOpenAI(options = {}) {