@ai-sdk/openai 3.0.0-beta.107 → 3.0.0-beta.109

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -304,7 +304,7 @@ function mapOpenAIFinishReason(finishReason) {
304
304
  case "tool_calls":
305
305
  return "tool-calls";
306
306
  default:
307
- return "unknown";
307
+ return "other";
308
308
  }
309
309
  }
310
310
 
@@ -828,7 +828,7 @@ var OpenAIChatLanguageModel = class {
828
828
  };
829
829
  }
830
830
  async doGenerate(options) {
831
- var _a, _b, _c, _d, _e, _f;
831
+ var _a, _b, _c, _d, _e, _f, _g;
832
832
  const { args: body, warnings } = await this.getArgs(options);
833
833
  const {
834
834
  responseHeaders,
@@ -885,7 +885,10 @@ var OpenAIChatLanguageModel = class {
885
885
  }
886
886
  return {
887
887
  content,
888
- finishReason: mapOpenAIFinishReason(choice.finish_reason),
888
+ finishReason: {
889
+ unified: mapOpenAIFinishReason(choice.finish_reason),
890
+ raw: (_g = choice.finish_reason) != null ? _g : void 0
891
+ },
889
892
  usage: convertOpenAIChatUsage(response.usage),
890
893
  request: { body },
891
894
  response: {
@@ -921,7 +924,10 @@ var OpenAIChatLanguageModel = class {
921
924
  fetch: this.config.fetch
922
925
  });
923
926
  const toolCalls = [];
924
- let finishReason = "unknown";
927
+ let finishReason = {
928
+ unified: "other",
929
+ raw: void 0
930
+ };
925
931
  let usage = void 0;
926
932
  let metadataExtracted = false;
927
933
  let isActiveText = false;
@@ -938,13 +944,13 @@ var OpenAIChatLanguageModel = class {
938
944
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
939
945
  }
940
946
  if (!chunk.success) {
941
- finishReason = "error";
947
+ finishReason = { unified: "error", raw: void 0 };
942
948
  controller.enqueue({ type: "error", error: chunk.error });
943
949
  return;
944
950
  }
945
951
  const value = chunk.value;
946
952
  if ("error" in value) {
947
- finishReason = "error";
953
+ finishReason = { unified: "error", raw: void 0 };
948
954
  controller.enqueue({ type: "error", error: value.error });
949
955
  return;
950
956
  }
@@ -969,7 +975,10 @@ var OpenAIChatLanguageModel = class {
969
975
  }
970
976
  const choice = value.choices[0];
971
977
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
972
- finishReason = mapOpenAIFinishReason(choice.finish_reason);
978
+ finishReason = {
979
+ unified: mapOpenAIFinishReason(choice.finish_reason),
980
+ raw: choice.finish_reason
981
+ };
973
982
  }
974
983
  if (((_e = choice == null ? void 0 : choice.logprobs) == null ? void 0 : _e.content) != null) {
975
984
  providerMetadata.openai.logprobs = choice.logprobs.content;
@@ -1258,7 +1267,7 @@ function mapOpenAIFinishReason2(finishReason) {
1258
1267
  case "tool_calls":
1259
1268
  return "tool-calls";
1260
1269
  default:
1261
- return "unknown";
1270
+ return "other";
1262
1271
  }
1263
1272
  }
1264
1273
 
@@ -1456,6 +1465,7 @@ var OpenAICompletionLanguageModel = class {
1456
1465
  };
1457
1466
  }
1458
1467
  async doGenerate(options) {
1468
+ var _a;
1459
1469
  const { args, warnings } = await this.getArgs(options);
1460
1470
  const {
1461
1471
  responseHeaders,
@@ -1483,7 +1493,10 @@ var OpenAICompletionLanguageModel = class {
1483
1493
  return {
1484
1494
  content: [{ type: "text", text: choice.text }],
1485
1495
  usage: convertOpenAICompletionUsage(response.usage),
1486
- finishReason: mapOpenAIFinishReason2(choice.finish_reason),
1496
+ finishReason: {
1497
+ unified: mapOpenAIFinishReason2(choice.finish_reason),
1498
+ raw: (_a = choice.finish_reason) != null ? _a : void 0
1499
+ },
1487
1500
  request: { body: args },
1488
1501
  response: {
1489
1502
  ...getResponseMetadata2(response),
@@ -1517,7 +1530,10 @@ var OpenAICompletionLanguageModel = class {
1517
1530
  abortSignal: options.abortSignal,
1518
1531
  fetch: this.config.fetch
1519
1532
  });
1520
- let finishReason = "unknown";
1533
+ let finishReason = {
1534
+ unified: "other",
1535
+ raw: void 0
1536
+ };
1521
1537
  const providerMetadata = { openai: {} };
1522
1538
  let usage = void 0;
1523
1539
  let isFirstChunk = true;
@@ -1532,13 +1548,13 @@ var OpenAICompletionLanguageModel = class {
1532
1548
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1533
1549
  }
1534
1550
  if (!chunk.success) {
1535
- finishReason = "error";
1551
+ finishReason = { unified: "error", raw: void 0 };
1536
1552
  controller.enqueue({ type: "error", error: chunk.error });
1537
1553
  return;
1538
1554
  }
1539
1555
  const value = chunk.value;
1540
1556
  if ("error" in value) {
1541
- finishReason = "error";
1557
+ finishReason = { unified: "error", raw: void 0 };
1542
1558
  controller.enqueue({ type: "error", error: value.error });
1543
1559
  return;
1544
1560
  }
@@ -1555,7 +1571,10 @@ var OpenAICompletionLanguageModel = class {
1555
1571
  }
1556
1572
  const choice = value.choices[0];
1557
1573
  if ((choice == null ? void 0 : choice.finish_reason) != null) {
1558
- finishReason = mapOpenAIFinishReason2(choice.finish_reason);
1574
+ finishReason = {
1575
+ unified: mapOpenAIFinishReason2(choice.finish_reason),
1576
+ raw: choice.finish_reason
1577
+ };
1559
1578
  }
1560
1579
  if ((choice == null ? void 0 : choice.logprobs) != null) {
1561
1580
  providerMetadata.openai.logprobs = choice.logprobs;
@@ -2898,7 +2917,7 @@ function mapOpenAIResponseFinishReason({
2898
2917
  case "content_filter":
2899
2918
  return "content-filter";
2900
2919
  default:
2901
- return hasFunctionCall ? "tool-calls" : "unknown";
2920
+ return hasFunctionCall ? "tool-calls" : "other";
2902
2921
  }
2903
2922
  }
2904
2923
 
@@ -4216,7 +4235,7 @@ var OpenAIResponsesLanguageModel = class {
4216
4235
  };
4217
4236
  }
4218
4237
  async doGenerate(options) {
4219
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
4238
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z;
4220
4239
  const {
4221
4240
  args: body,
4222
4241
  warnings,
@@ -4613,10 +4632,13 @@ var OpenAIResponsesLanguageModel = class {
4613
4632
  const usage = response.usage;
4614
4633
  return {
4615
4634
  content,
4616
- finishReason: mapOpenAIResponseFinishReason({
4617
- finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
4618
- hasFunctionCall
4619
- }),
4635
+ finishReason: {
4636
+ unified: mapOpenAIResponseFinishReason({
4637
+ finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
4638
+ hasFunctionCall
4639
+ }),
4640
+ raw: (_z = (_y = response.incomplete_details) == null ? void 0 : _y.reason) != null ? _z : void 0
4641
+ },
4620
4642
  usage: convertOpenAIResponsesUsage(usage),
4621
4643
  request: { body },
4622
4644
  response: {
@@ -4657,7 +4679,10 @@ var OpenAIResponsesLanguageModel = class {
4657
4679
  });
4658
4680
  const self = this;
4659
4681
  const providerKey = this.config.provider.replace(".responses", "");
4660
- let finishReason = "unknown";
4682
+ let finishReason = {
4683
+ unified: "other",
4684
+ raw: void 0
4685
+ };
4661
4686
  let usage = void 0;
4662
4687
  const logprobs = [];
4663
4688
  let responseId = null;
@@ -4673,12 +4698,12 @@ var OpenAIResponsesLanguageModel = class {
4673
4698
  controller.enqueue({ type: "stream-start", warnings });
4674
4699
  },
4675
4700
  transform(chunk, controller) {
4676
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A;
4701
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C;
4677
4702
  if (options.includeRawChunks) {
4678
4703
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
4679
4704
  }
4680
4705
  if (!chunk.success) {
4681
- finishReason = "error";
4706
+ finishReason = { unified: "error", raw: void 0 };
4682
4707
  controller.enqueue({ type: "error", error: chunk.error });
4683
4708
  return;
4684
4709
  }
@@ -5177,10 +5202,13 @@ var OpenAIResponsesLanguageModel = class {
5177
5202
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "can-conclude";
5178
5203
  }
5179
5204
  } else if (isResponseFinishedChunk(value)) {
5180
- finishReason = mapOpenAIResponseFinishReason({
5181
- finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
5182
- hasFunctionCall
5183
- });
5205
+ finishReason = {
5206
+ unified: mapOpenAIResponseFinishReason({
5207
+ finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
5208
+ hasFunctionCall
5209
+ }),
5210
+ raw: (_k = (_j = value.response.incomplete_details) == null ? void 0 : _j.reason) != null ? _k : void 0
5211
+ };
5184
5212
  usage = value.response.usage;
5185
5213
  if (typeof value.response.service_tier === "string") {
5186
5214
  serviceTier = value.response.service_tier;
@@ -5191,7 +5219,7 @@ var OpenAIResponsesLanguageModel = class {
5191
5219
  controller.enqueue({
5192
5220
  type: "source",
5193
5221
  sourceType: "url",
5194
- id: (_l = (_k = (_j = self.config).generateId) == null ? void 0 : _k.call(_j)) != null ? _l : generateId2(),
5222
+ id: (_n = (_m = (_l = self.config).generateId) == null ? void 0 : _m.call(_l)) != null ? _n : generateId2(),
5195
5223
  url: value.annotation.url,
5196
5224
  title: value.annotation.title
5197
5225
  });
@@ -5199,10 +5227,10 @@ var OpenAIResponsesLanguageModel = class {
5199
5227
  controller.enqueue({
5200
5228
  type: "source",
5201
5229
  sourceType: "document",
5202
- id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
5230
+ id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : generateId2(),
5203
5231
  mediaType: "text/plain",
5204
- title: (_q = (_p = value.annotation.quote) != null ? _p : value.annotation.filename) != null ? _q : "Document",
5205
- filename: (_r = value.annotation.filename) != null ? _r : value.annotation.file_id,
5232
+ title: (_s = (_r = value.annotation.quote) != null ? _r : value.annotation.filename) != null ? _s : "Document",
5233
+ filename: (_t = value.annotation.filename) != null ? _t : value.annotation.file_id,
5206
5234
  ...value.annotation.file_id ? {
5207
5235
  providerMetadata: {
5208
5236
  [providerKey]: {
@@ -5215,10 +5243,10 @@ var OpenAIResponsesLanguageModel = class {
5215
5243
  controller.enqueue({
5216
5244
  type: "source",
5217
5245
  sourceType: "document",
5218
- id: (_u = (_t = (_s = self.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : generateId2(),
5246
+ id: (_w = (_v = (_u = self.config).generateId) == null ? void 0 : _v.call(_u)) != null ? _w : generateId2(),
5219
5247
  mediaType: "text/plain",
5220
- title: (_w = (_v = value.annotation.filename) != null ? _v : value.annotation.file_id) != null ? _w : "Document",
5221
- filename: (_x = value.annotation.filename) != null ? _x : value.annotation.file_id,
5248
+ title: (_y = (_x = value.annotation.filename) != null ? _x : value.annotation.file_id) != null ? _y : "Document",
5249
+ filename: (_z = value.annotation.filename) != null ? _z : value.annotation.file_id,
5222
5250
  providerMetadata: {
5223
5251
  [providerKey]: {
5224
5252
  fileId: value.annotation.file_id,
@@ -5231,7 +5259,7 @@ var OpenAIResponsesLanguageModel = class {
5231
5259
  controller.enqueue({
5232
5260
  type: "source",
5233
5261
  sourceType: "document",
5234
- id: (_A = (_z = (_y = self.config).generateId) == null ? void 0 : _z.call(_y)) != null ? _A : generateId2(),
5262
+ id: (_C = (_B = (_A = self.config).generateId) == null ? void 0 : _B.call(_A)) != null ? _C : generateId2(),
5235
5263
  mediaType: "application/octet-stream",
5236
5264
  title: value.annotation.file_id,
5237
5265
  filename: value.annotation.file_id,
@@ -5693,7 +5721,7 @@ var OpenAITranscriptionModel = class {
5693
5721
  };
5694
5722
 
5695
5723
  // src/version.ts
5696
- var VERSION = true ? "3.0.0-beta.107" : "0.0.0-test";
5724
+ var VERSION = true ? "3.0.0-beta.109" : "0.0.0-test";
5697
5725
 
5698
5726
  // src/openai-provider.ts
5699
5727
  function createOpenAI(options = {}) {