@zenning/openai 1.4.4 → 1.4.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
5
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
6
  var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
7
+ for (var name14 in all)
8
+ __defProp(target, name14, { get: all[name14], enumerable: true });
9
9
  };
10
10
  var __copyProps = (to, from, except, desc) => {
11
11
  if (from && typeof from === "object" || typeof from === "function") {
@@ -28,13 +28,171 @@ module.exports = __toCommonJS(src_exports);
28
28
  // src/openai-provider.ts
29
29
  var import_provider_utils11 = require("@ai-sdk/provider-utils");
30
30
 
31
+ // ../provider/dist/index.mjs
32
+ var marker = "vercel.ai.error";
33
+ var symbol = Symbol.for(marker);
34
+ var _a;
35
+ var _AISDKError = class _AISDKError2 extends Error {
36
+ /**
37
+ * Creates an AI SDK Error.
38
+ *
39
+ * @param {Object} params - The parameters for creating the error.
40
+ * @param {string} params.name - The name of the error.
41
+ * @param {string} params.message - The error message.
42
+ * @param {unknown} [params.cause] - The underlying cause of the error.
43
+ */
44
+ constructor({
45
+ name: name14,
46
+ message,
47
+ cause
48
+ }) {
49
+ super(message);
50
+ this[_a] = true;
51
+ this.name = name14;
52
+ this.cause = cause;
53
+ }
54
+ /**
55
+ * Checks if the given error is an AI SDK Error.
56
+ * @param {unknown} error - The error to check.
57
+ * @returns {boolean} True if the error is an AI SDK Error, false otherwise.
58
+ */
59
+ static isInstance(error) {
60
+ return _AISDKError2.hasMarker(error, marker);
61
+ }
62
+ static hasMarker(error, marker15) {
63
+ const markerSymbol = Symbol.for(marker15);
64
+ return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
65
+ }
66
+ };
67
+ _a = symbol;
68
+ var AISDKError = _AISDKError;
69
+ var name = "AI_APICallError";
70
+ var marker2 = `vercel.ai.error.${name}`;
71
+ var symbol2 = Symbol.for(marker2);
72
+ var _a2;
73
+ _a2 = symbol2;
74
+ var name2 = "AI_EmptyResponseBodyError";
75
+ var marker3 = `vercel.ai.error.${name2}`;
76
+ var symbol3 = Symbol.for(marker3);
77
+ var _a3;
78
+ _a3 = symbol3;
79
+ var name3 = "AI_InvalidArgumentError";
80
+ var marker4 = `vercel.ai.error.${name3}`;
81
+ var symbol4 = Symbol.for(marker4);
82
+ var _a4;
83
+ _a4 = symbol4;
84
+ var name4 = "AI_InvalidPromptError";
85
+ var marker5 = `vercel.ai.error.${name4}`;
86
+ var symbol5 = Symbol.for(marker5);
87
+ var _a5;
88
+ var InvalidPromptError = class extends AISDKError {
89
+ constructor({
90
+ prompt,
91
+ message,
92
+ cause
93
+ }) {
94
+ super({ name: name4, message: `Invalid prompt: ${message}`, cause });
95
+ this[_a5] = true;
96
+ this.prompt = prompt;
97
+ }
98
+ static isInstance(error) {
99
+ return AISDKError.hasMarker(error, marker5);
100
+ }
101
+ };
102
+ _a5 = symbol5;
103
+ var name5 = "AI_InvalidResponseDataError";
104
+ var marker6 = `vercel.ai.error.${name5}`;
105
+ var symbol6 = Symbol.for(marker6);
106
+ var _a6;
107
+ var InvalidResponseDataError = class extends AISDKError {
108
+ constructor({
109
+ data,
110
+ message = `Invalid response data: ${JSON.stringify(data)}.`
111
+ }) {
112
+ super({ name: name5, message });
113
+ this[_a6] = true;
114
+ this.data = data;
115
+ }
116
+ static isInstance(error) {
117
+ return AISDKError.hasMarker(error, marker6);
118
+ }
119
+ };
120
+ _a6 = symbol6;
121
+ var name6 = "AI_JSONParseError";
122
+ var marker7 = `vercel.ai.error.${name6}`;
123
+ var symbol7 = Symbol.for(marker7);
124
+ var _a7;
125
+ _a7 = symbol7;
126
+ var name7 = "AI_LoadAPIKeyError";
127
+ var marker8 = `vercel.ai.error.${name7}`;
128
+ var symbol8 = Symbol.for(marker8);
129
+ var _a8;
130
+ _a8 = symbol8;
131
+ var name8 = "AI_LoadSettingError";
132
+ var marker9 = `vercel.ai.error.${name8}`;
133
+ var symbol9 = Symbol.for(marker9);
134
+ var _a9;
135
+ _a9 = symbol9;
136
+ var name9 = "AI_NoContentGeneratedError";
137
+ var marker10 = `vercel.ai.error.${name9}`;
138
+ var symbol10 = Symbol.for(marker10);
139
+ var _a10;
140
+ _a10 = symbol10;
141
+ var name10 = "AI_NoSuchModelError";
142
+ var marker11 = `vercel.ai.error.${name10}`;
143
+ var symbol11 = Symbol.for(marker11);
144
+ var _a11;
145
+ _a11 = symbol11;
146
+ var name11 = "AI_TooManyEmbeddingValuesForCallError";
147
+ var marker12 = `vercel.ai.error.${name11}`;
148
+ var symbol12 = Symbol.for(marker12);
149
+ var _a12;
150
+ var TooManyEmbeddingValuesForCallError = class extends AISDKError {
151
+ constructor(options) {
152
+ super({
153
+ name: name11,
154
+ message: `Too many values for a single embedding call. The ${options.provider} model "${options.modelId}" can only embed up to ${options.maxEmbeddingsPerCall} values per call, but ${options.values.length} values were provided.`
155
+ });
156
+ this[_a12] = true;
157
+ this.provider = options.provider;
158
+ this.modelId = options.modelId;
159
+ this.maxEmbeddingsPerCall = options.maxEmbeddingsPerCall;
160
+ this.values = options.values;
161
+ }
162
+ static isInstance(error) {
163
+ return AISDKError.hasMarker(error, marker12);
164
+ }
165
+ };
166
+ _a12 = symbol12;
167
+ var name12 = "AI_TypeValidationError";
168
+ var marker13 = `vercel.ai.error.${name12}`;
169
+ var symbol13 = Symbol.for(marker13);
170
+ var _a13;
171
+ _a13 = symbol13;
172
+ var name13 = "AI_UnsupportedFunctionalityError";
173
+ var marker14 = `vercel.ai.error.${name13}`;
174
+ var symbol14 = Symbol.for(marker14);
175
+ var _a14;
176
+ var UnsupportedFunctionalityError = class extends AISDKError {
177
+ constructor({
178
+ functionality,
179
+ message = `'${functionality}' functionality not supported.`
180
+ }) {
181
+ super({ name: name13, message });
182
+ this[_a14] = true;
183
+ this.functionality = functionality;
184
+ }
185
+ static isInstance(error) {
186
+ return AISDKError.hasMarker(error, marker14);
187
+ }
188
+ };
189
+ _a14 = symbol14;
190
+
31
191
  // src/openai-chat-language-model.ts
32
- var import_provider3 = require("@ai-sdk/provider");
33
192
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
34
193
  var import_zod2 = require("zod");
35
194
 
36
195
  // src/convert-to-openai-chat-messages.ts
37
- var import_provider = require("@ai-sdk/provider");
38
196
  var import_provider_utils = require("@ai-sdk/provider-utils");
39
197
  function convertToOpenAIChatMessages({
40
198
  prompt,
@@ -79,7 +237,7 @@ function convertToOpenAIChatMessages({
79
237
  messages.push({
80
238
  role: "user",
81
239
  content: content.map((part, index) => {
82
- var _a, _b, _c, _d;
240
+ var _a15, _b, _c, _d;
83
241
  switch (part.type) {
84
242
  case "text": {
85
243
  return { type: "text", text: part.text };
@@ -88,7 +246,7 @@ function convertToOpenAIChatMessages({
88
246
  return {
89
247
  type: "image_url",
90
248
  image_url: {
91
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
249
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a15 = part.mimeType) != null ? _a15 : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
92
250
  // OpenAI specific extension: image detail
93
251
  detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
94
252
  }
@@ -96,7 +254,7 @@ function convertToOpenAIChatMessages({
96
254
  }
97
255
  case "file": {
98
256
  if (part.data instanceof URL) {
99
- throw new import_provider.UnsupportedFunctionalityError({
257
+ throw new UnsupportedFunctionalityError({
100
258
  functionality: "'File content parts with URL data' functionality not supported."
101
259
  });
102
260
  }
@@ -124,7 +282,7 @@ function convertToOpenAIChatMessages({
124
282
  };
125
283
  }
126
284
  default: {
127
- throw new import_provider.UnsupportedFunctionalityError({
285
+ throw new UnsupportedFunctionalityError({
128
286
  functionality: `File content part type ${part.mimeType} in user messages`
129
287
  });
130
288
  }
@@ -159,7 +317,7 @@ function convertToOpenAIChatMessages({
159
317
  }
160
318
  if (useLegacyFunctionCalling) {
161
319
  if (toolCalls.length > 1) {
162
- throw new import_provider.UnsupportedFunctionalityError({
320
+ throw new UnsupportedFunctionalityError({
163
321
  functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
164
322
  });
165
323
  }
@@ -206,8 +364,8 @@ function convertToOpenAIChatMessages({
206
364
 
207
365
  // src/map-openai-chat-logprobs.ts
208
366
  function mapOpenAIChatLogProbsOutput(logprobs) {
209
- var _a, _b;
210
- return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
367
+ var _a15, _b;
368
+ return (_b = (_a15 = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a15.map(({ token, logprob, top_logprobs }) => ({
211
369
  token,
212
370
  logprob,
213
371
  topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
@@ -267,14 +425,13 @@ function getResponseMetadata({
267
425
  }
268
426
 
269
427
  // src/openai-prepare-tools.ts
270
- var import_provider2 = require("@ai-sdk/provider");
271
428
  function prepareTools({
272
429
  mode,
273
430
  useLegacyFunctionCalling = false,
274
431
  structuredOutputs
275
432
  }) {
276
- var _a;
277
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
433
+ var _a15;
434
+ const tools = ((_a15 = mode.tools) == null ? void 0 : _a15.length) ? mode.tools : void 0;
278
435
  const toolWarnings = [];
279
436
  if (tools == null) {
280
437
  return { tools: void 0, tool_choice: void 0, toolWarnings };
@@ -311,7 +468,7 @@ function prepareTools({
311
468
  toolWarnings
312
469
  };
313
470
  case "required":
314
- throw new import_provider2.UnsupportedFunctionalityError({
471
+ throw new UnsupportedFunctionalityError({
315
472
  functionality: "useLegacyFunctionCalling and toolChoice: required"
316
473
  });
317
474
  default:
@@ -360,7 +517,7 @@ function prepareTools({
360
517
  };
361
518
  default: {
362
519
  const _exhaustiveCheck = type;
363
- throw new import_provider2.UnsupportedFunctionalityError({
520
+ throw new UnsupportedFunctionalityError({
364
521
  functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
365
522
  });
366
523
  }
@@ -376,8 +533,8 @@ var OpenAIChatLanguageModel = class {
376
533
  this.config = config;
377
534
  }
378
535
  get supportsStructuredOutputs() {
379
- var _a;
380
- return (_a = this.settings.structuredOutputs) != null ? _a : isReasoningModel(this.modelId);
536
+ var _a15;
537
+ return (_a15 = this.settings.structuredOutputs) != null ? _a15 : isReasoningModel(this.modelId);
381
538
  }
382
539
  get defaultObjectGenerationMode() {
383
540
  if (isAudioModel(this.modelId)) {
@@ -405,7 +562,7 @@ var OpenAIChatLanguageModel = class {
405
562
  seed,
406
563
  providerMetadata
407
564
  }) {
408
- var _a, _b, _c, _d, _e, _f, _g, _h;
565
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
409
566
  const type = mode.type;
410
567
  const warnings = [];
411
568
  if (topK != null) {
@@ -423,12 +580,12 @@ var OpenAIChatLanguageModel = class {
423
580
  }
424
581
  const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
425
582
  if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
426
- throw new import_provider3.UnsupportedFunctionalityError({
583
+ throw new UnsupportedFunctionalityError({
427
584
  functionality: "useLegacyFunctionCalling with parallelToolCalls"
428
585
  });
429
586
  }
430
587
  if (useLegacyFunctionCalling && this.supportsStructuredOutputs) {
431
- throw new import_provider3.UnsupportedFunctionalityError({
588
+ throw new UnsupportedFunctionalityError({
432
589
  functionality: "structuredOutputs with useLegacyFunctionCalling"
433
590
  });
434
591
  }
@@ -460,7 +617,7 @@ var OpenAIChatLanguageModel = class {
460
617
  json_schema: {
461
618
  schema: responseFormat.schema,
462
619
  strict: true,
463
- name: (_a = responseFormat.name) != null ? _a : "response",
620
+ name: (_a15 = responseFormat.name) != null ? _a15 : "response",
464
621
  description: responseFormat.description
465
622
  }
466
623
  } : { type: "json_object" } : void 0,
@@ -623,7 +780,7 @@ var OpenAIChatLanguageModel = class {
623
780
  }
624
781
  }
625
782
  async doGenerate(options) {
626
- var _a, _b, _c, _d, _e, _f, _g, _h;
783
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
627
784
  const { args: body, warnings } = this.getArgs(options);
628
785
  const {
629
786
  responseHeaders,
@@ -645,7 +802,7 @@ var OpenAIChatLanguageModel = class {
645
802
  });
646
803
  const { messages: rawPrompt, ...rawSettings } = body;
647
804
  const choice = response.choices[0];
648
- const completionTokenDetails = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details;
805
+ const completionTokenDetails = (_a15 = response.usage) == null ? void 0 : _a15.completion_tokens_details;
649
806
  const promptTokenDetails = (_b = response.usage) == null ? void 0 : _b.prompt_tokens_details;
650
807
  const providerMetadata = { openai: {} };
651
808
  if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) {
@@ -670,10 +827,10 @@ var OpenAIChatLanguageModel = class {
670
827
  args: choice.message.function_call.arguments
671
828
  }
672
829
  ] : (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
673
- var _a2;
830
+ var _a16;
674
831
  return {
675
832
  toolCallType: "function",
676
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
833
+ toolCallId: (_a16 = toolCall.id) != null ? _a16 : (0, import_provider_utils3.generateId)(),
677
834
  toolName: toolCall.function.name,
678
835
  args: toolCall.function.arguments
679
836
  };
@@ -772,7 +929,7 @@ var OpenAIChatLanguageModel = class {
772
929
  stream: response.pipeThrough(
773
930
  new TransformStream({
774
931
  transform(chunk, controller) {
775
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
932
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
776
933
  if (!chunk.success) {
777
934
  finishReason = "error";
778
935
  controller.enqueue({ type: "error", error: chunk.error });
@@ -849,19 +1006,19 @@ var OpenAIChatLanguageModel = class {
849
1006
  const index = toolCallDelta.index;
850
1007
  if (toolCalls[index] == null) {
851
1008
  if (toolCallDelta.type !== "function") {
852
- throw new import_provider3.InvalidResponseDataError({
1009
+ throw new InvalidResponseDataError({
853
1010
  data: toolCallDelta,
854
1011
  message: `Expected 'function' type.`
855
1012
  });
856
1013
  }
857
1014
  if (toolCallDelta.id == null) {
858
- throw new import_provider3.InvalidResponseDataError({
1015
+ throw new InvalidResponseDataError({
859
1016
  data: toolCallDelta,
860
1017
  message: `Expected 'id' to be a string.`
861
1018
  });
862
1019
  }
863
- if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
864
- throw new import_provider3.InvalidResponseDataError({
1020
+ if (((_a15 = toolCallDelta.function) == null ? void 0 : _a15.name) == null) {
1021
+ throw new InvalidResponseDataError({
865
1022
  data: toolCallDelta,
866
1023
  message: `Expected 'function.name' to be a string.`
867
1024
  });
@@ -927,13 +1084,13 @@ var OpenAIChatLanguageModel = class {
927
1084
  }
928
1085
  },
929
1086
  flush(controller) {
930
- var _a, _b;
1087
+ var _a15, _b;
931
1088
  controller.enqueue({
932
1089
  type: "finish",
933
1090
  finishReason,
934
1091
  logprobs,
935
1092
  usage: {
936
- promptTokens: (_a = usage.promptTokens) != null ? _a : NaN,
1093
+ promptTokens: (_a15 = usage.promptTokens) != null ? _a15 : NaN,
937
1094
  completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
938
1095
  },
939
1096
  ...providerMetadata != null ? { providerMetadata } : {}
@@ -1059,11 +1216,11 @@ function isAudioModel(modelId) {
1059
1216
  return modelId.startsWith("gpt-4o-audio-preview");
1060
1217
  }
1061
1218
  function getSystemMessageMode(modelId) {
1062
- var _a, _b;
1219
+ var _a15, _b;
1063
1220
  if (!isReasoningModel(modelId)) {
1064
1221
  return "system";
1065
1222
  }
1066
- return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
1223
+ return (_b = (_a15 = reasoningModels[modelId]) == null ? void 0 : _a15.systemMessageMode) != null ? _b : "developer";
1067
1224
  }
1068
1225
  var reasoningModels = {
1069
1226
  "o1-mini": {
@@ -1099,12 +1256,10 @@ var reasoningModels = {
1099
1256
  };
1100
1257
 
1101
1258
  // src/openai-completion-language-model.ts
1102
- var import_provider5 = require("@ai-sdk/provider");
1103
1259
  var import_provider_utils4 = require("@ai-sdk/provider-utils");
1104
1260
  var import_zod3 = require("zod");
1105
1261
 
1106
1262
  // src/convert-to-openai-completion-prompt.ts
1107
- var import_provider4 = require("@ai-sdk/provider");
1108
1263
  function convertToOpenAICompletionPrompt({
1109
1264
  prompt,
1110
1265
  inputFormat,
@@ -1124,7 +1279,7 @@ function convertToOpenAICompletionPrompt({
1124
1279
  for (const { role, content } of prompt) {
1125
1280
  switch (role) {
1126
1281
  case "system": {
1127
- throw new import_provider4.InvalidPromptError({
1282
+ throw new InvalidPromptError({
1128
1283
  message: "Unexpected system message in prompt: ${content}",
1129
1284
  prompt
1130
1285
  });
@@ -1136,7 +1291,7 @@ function convertToOpenAICompletionPrompt({
1136
1291
  return part.text;
1137
1292
  }
1138
1293
  case "image": {
1139
- throw new import_provider4.UnsupportedFunctionalityError({
1294
+ throw new UnsupportedFunctionalityError({
1140
1295
  functionality: "images"
1141
1296
  });
1142
1297
  }
@@ -1155,7 +1310,7 @@ ${userMessage}
1155
1310
  return part.text;
1156
1311
  }
1157
1312
  case "tool-call": {
1158
- throw new import_provider4.UnsupportedFunctionalityError({
1313
+ throw new UnsupportedFunctionalityError({
1159
1314
  functionality: "tool-call messages"
1160
1315
  });
1161
1316
  }
@@ -1168,7 +1323,7 @@ ${assistantMessage}
1168
1323
  break;
1169
1324
  }
1170
1325
  case "tool": {
1171
- throw new import_provider4.UnsupportedFunctionalityError({
1326
+ throw new UnsupportedFunctionalityError({
1172
1327
  functionality: "tool messages"
1173
1328
  });
1174
1329
  }
@@ -1227,7 +1382,7 @@ var OpenAICompletionLanguageModel = class {
1227
1382
  responseFormat,
1228
1383
  seed
1229
1384
  }) {
1230
- var _a;
1385
+ var _a15;
1231
1386
  const type = mode.type;
1232
1387
  const warnings = [];
1233
1388
  if (topK != null) {
@@ -1268,25 +1423,25 @@ var OpenAICompletionLanguageModel = class {
1268
1423
  };
1269
1424
  switch (type) {
1270
1425
  case "regular": {
1271
- if ((_a = mode.tools) == null ? void 0 : _a.length) {
1272
- throw new import_provider5.UnsupportedFunctionalityError({
1426
+ if ((_a15 = mode.tools) == null ? void 0 : _a15.length) {
1427
+ throw new UnsupportedFunctionalityError({
1273
1428
  functionality: "tools"
1274
1429
  });
1275
1430
  }
1276
1431
  if (mode.toolChoice) {
1277
- throw new import_provider5.UnsupportedFunctionalityError({
1432
+ throw new UnsupportedFunctionalityError({
1278
1433
  functionality: "toolChoice"
1279
1434
  });
1280
1435
  }
1281
1436
  return { args: baseArgs, warnings };
1282
1437
  }
1283
1438
  case "object-json": {
1284
- throw new import_provider5.UnsupportedFunctionalityError({
1439
+ throw new UnsupportedFunctionalityError({
1285
1440
  functionality: "object-json mode"
1286
1441
  });
1287
1442
  }
1288
1443
  case "object-tool": {
1289
- throw new import_provider5.UnsupportedFunctionalityError({
1444
+ throw new UnsupportedFunctionalityError({
1290
1445
  functionality: "object-tool mode"
1291
1446
  });
1292
1447
  }
@@ -1472,7 +1627,6 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
1472
1627
  ]);
1473
1628
 
1474
1629
  // src/openai-embedding-model.ts
1475
- var import_provider6 = require("@ai-sdk/provider");
1476
1630
  var import_provider_utils5 = require("@ai-sdk/provider-utils");
1477
1631
  var import_zod4 = require("zod");
1478
1632
  var OpenAIEmbeddingModel = class {
@@ -1486,12 +1640,12 @@ var OpenAIEmbeddingModel = class {
1486
1640
  return this.config.provider;
1487
1641
  }
1488
1642
  get maxEmbeddingsPerCall() {
1489
- var _a;
1490
- return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
1643
+ var _a15;
1644
+ return (_a15 = this.settings.maxEmbeddingsPerCall) != null ? _a15 : 2048;
1491
1645
  }
1492
1646
  get supportsParallelCalls() {
1493
- var _a;
1494
- return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
1647
+ var _a15;
1648
+ return (_a15 = this.settings.supportsParallelCalls) != null ? _a15 : true;
1495
1649
  }
1496
1650
  async doEmbed({
1497
1651
  values,
@@ -1499,7 +1653,7 @@ var OpenAIEmbeddingModel = class {
1499
1653
  abortSignal
1500
1654
  }) {
1501
1655
  if (values.length > this.maxEmbeddingsPerCall) {
1502
- throw new import_provider6.TooManyEmbeddingValuesForCallError({
1656
+ throw new TooManyEmbeddingValuesForCallError({
1503
1657
  provider: this.provider,
1504
1658
  modelId: this.modelId,
1505
1659
  maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
@@ -1559,8 +1713,8 @@ var OpenAIImageModel = class {
1559
1713
  this.specificationVersion = "v1";
1560
1714
  }
1561
1715
  get maxImagesPerCall() {
1562
- var _a, _b;
1563
- return (_b = (_a = this.settings.maxImagesPerCall) != null ? _a : modelMaxImagesPerCall[this.modelId]) != null ? _b : 1;
1716
+ var _a15, _b;
1717
+ return (_b = (_a15 = this.settings.maxImagesPerCall) != null ? _a15 : modelMaxImagesPerCall[this.modelId]) != null ? _b : 1;
1564
1718
  }
1565
1719
  get provider() {
1566
1720
  return this.config.provider;
@@ -1575,7 +1729,7 @@ var OpenAIImageModel = class {
1575
1729
  headers,
1576
1730
  abortSignal
1577
1731
  }) {
1578
- var _a, _b, _c, _d;
1732
+ var _a15, _b, _c, _d;
1579
1733
  const warnings = [];
1580
1734
  if (aspectRatio != null) {
1581
1735
  warnings.push({
@@ -1587,7 +1741,7 @@ var OpenAIImageModel = class {
1587
1741
  if (seed != null) {
1588
1742
  warnings.push({ type: "unsupported-setting", setting: "seed" });
1589
1743
  }
1590
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1744
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
1591
1745
  const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
1592
1746
  url: this.config.url({
1593
1747
  path: "/images/generations",
@@ -1707,7 +1861,7 @@ var OpenAITranscriptionModel = class {
1707
1861
  mediaType,
1708
1862
  providerOptions
1709
1863
  }) {
1710
- var _a, _b, _c, _d, _e;
1864
+ var _a15, _b, _c, _d, _e;
1711
1865
  const warnings = [];
1712
1866
  const openAIOptions = (0, import_provider_utils7.parseProviderOptions)({
1713
1867
  provider: "openai",
@@ -1720,7 +1874,7 @@ var OpenAITranscriptionModel = class {
1720
1874
  formData.append("file", new File([blob], "audio", { type: mediaType }));
1721
1875
  if (openAIOptions) {
1722
1876
  const transcriptionModelOptions = {
1723
- include: (_a = openAIOptions.include) != null ? _a : void 0,
1877
+ include: (_a15 = openAIOptions.include) != null ? _a15 : void 0,
1724
1878
  language: (_b = openAIOptions.language) != null ? _b : void 0,
1725
1879
  prompt: (_c = openAIOptions.prompt) != null ? _c : void 0,
1726
1880
  temperature: (_d = openAIOptions.temperature) != null ? _d : void 0,
@@ -1739,8 +1893,8 @@ var OpenAITranscriptionModel = class {
1739
1893
  };
1740
1894
  }
1741
1895
  async doGenerate(options) {
1742
- var _a, _b, _c, _d, _e, _f;
1743
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1896
+ var _a15, _b, _c, _d, _e, _f;
1897
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
1744
1898
  const { formData, warnings } = this.getArgs(options);
1745
1899
  const {
1746
1900
  value: response,
@@ -1798,7 +1952,6 @@ var import_provider_utils9 = require("@ai-sdk/provider-utils");
1798
1952
  var import_zod11 = require("zod");
1799
1953
 
1800
1954
  // src/responses/convert-to-openai-responses-messages.ts
1801
- var import_provider7 = require("@ai-sdk/provider");
1802
1955
  var import_provider_utils8 = require("@ai-sdk/provider-utils");
1803
1956
  function convertToOpenAIResponsesMessages({
1804
1957
  prompt,
@@ -1838,7 +1991,7 @@ function convertToOpenAIResponsesMessages({
1838
1991
  messages.push({
1839
1992
  role: "user",
1840
1993
  content: content.map((part, index) => {
1841
- var _a, _b, _c, _d;
1994
+ var _a15, _b, _c, _d;
1842
1995
  switch (part.type) {
1843
1996
  case "text": {
1844
1997
  return { type: "input_text", text: part.text };
@@ -1846,14 +1999,14 @@ function convertToOpenAIResponsesMessages({
1846
1999
  case "image": {
1847
2000
  return {
1848
2001
  type: "input_image",
1849
- image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils8.convertUint8ArrayToBase64)(part.image)}`,
2002
+ image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a15 = part.mimeType) != null ? _a15 : "image/jpeg"};base64,${(0, import_provider_utils8.convertUint8ArrayToBase64)(part.image)}`,
1850
2003
  // OpenAI specific extension: image detail
1851
2004
  detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1852
2005
  };
1853
2006
  }
1854
2007
  case "file": {
1855
2008
  if (part.data instanceof URL) {
1856
- throw new import_provider7.UnsupportedFunctionalityError({
2009
+ throw new UnsupportedFunctionalityError({
1857
2010
  functionality: "File URLs in user messages"
1858
2011
  });
1859
2012
  }
@@ -1866,7 +2019,7 @@ function convertToOpenAIResponsesMessages({
1866
2019
  };
1867
2020
  }
1868
2021
  default: {
1869
- throw new import_provider7.UnsupportedFunctionalityError({
2022
+ throw new UnsupportedFunctionalityError({
1870
2023
  functionality: "Only PDF files are supported in user messages"
1871
2024
  });
1872
2025
  }
@@ -1937,9 +2090,6 @@ function mapOpenAIResponseFinishReason({
1937
2090
  }
1938
2091
  }
1939
2092
 
1940
- // src/responses/openai-responses-prepare-tools.ts
1941
- var import_provider8 = require("@ai-sdk/provider");
1942
-
1943
2093
  // src/tool/code-interpreter.ts
1944
2094
  var import_zod7 = require("zod");
1945
2095
  var codeInterpreterArgsSchema = import_zod7.z.object({
@@ -2040,8 +2190,8 @@ function prepareResponsesTools({
2040
2190
  mode,
2041
2191
  strict
2042
2192
  }) {
2043
- var _a;
2044
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
2193
+ var _a15;
2194
+ const tools = ((_a15 = mode.tools) == null ? void 0 : _a15.length) ? mode.tools : void 0;
2045
2195
  const toolWarnings = [];
2046
2196
  if (tools == null) {
2047
2197
  return { tools: void 0, tool_choice: void 0, toolWarnings };
@@ -2127,7 +2277,7 @@ function prepareResponsesTools({
2127
2277
  };
2128
2278
  default: {
2129
2279
  const _exhaustiveCheck = type;
2130
- throw new import_provider8.UnsupportedFunctionalityError({
2280
+ throw new UnsupportedFunctionalityError({
2131
2281
  functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
2132
2282
  });
2133
2283
  }
@@ -2160,7 +2310,7 @@ var OpenAIResponsesLanguageModel = class {
2160
2310
  providerMetadata,
2161
2311
  responseFormat
2162
2312
  }) {
2163
- var _a, _b, _c;
2313
+ var _a15, _b, _c;
2164
2314
  const warnings = [];
2165
2315
  const modelConfig = getResponsesModelConfig(this.modelId);
2166
2316
  const type = mode.type;
@@ -2205,7 +2355,7 @@ var OpenAIResponsesLanguageModel = class {
2205
2355
  providerOptions: providerMetadata,
2206
2356
  schema: openaiResponsesProviderOptionsSchema
2207
2357
  });
2208
- const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
2358
+ const isStrict = (_a15 = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a15 : true;
2209
2359
  console.log("openaiOptions", JSON.stringify(openaiOptions));
2210
2360
  const baseArgs = {
2211
2361
  model: this.modelId,
@@ -2323,7 +2473,7 @@ var OpenAIResponsesLanguageModel = class {
2323
2473
  }
2324
2474
  }
2325
2475
  async doGenerate(options) {
2326
- var _a, _b, _c, _d, _e, _f, _g;
2476
+ var _a15, _b, _c, _d, _e, _f, _g;
2327
2477
  const { args: body, warnings } = this.getArgs(options);
2328
2478
  const {
2329
2479
  responseHeaders,
@@ -2478,36 +2628,59 @@ var OpenAIResponsesLanguageModel = class {
2478
2628
  });
2479
2629
  }
2480
2630
  }
2481
- const reasoningSummary = (_b = (_a = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a.summary) != null ? _b : null;
2482
- console.log(JSON.stringify({
2631
+ const reasoningSummary = (_b = (_a15 = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a15.summary) != null ? _b : null;
2632
+ const allAnnotations = outputTextElements.flatMap((content) => content.annotations);
2633
+ console.log("\u{1F4CB} Processing annotations in doGenerate:", JSON.stringify({
2483
2634
  msg: "ai-sdk: content annotations",
2484
- annotations: outputTextElements.flatMap((content) => content.annotations)
2485
- }));
2635
+ count: allAnnotations.length,
2636
+ annotations: allAnnotations
2637
+ }, null, 2));
2486
2638
  return {
2487
2639
  text: outputTextElements.map((content) => content.text).join("\n"),
2488
2640
  sources: outputTextElements.flatMap(
2489
2641
  (content) => content.annotations.map((annotation) => {
2490
- var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h, _i, _j, _k;
2491
- if (annotation.type === "url_citation") {
2492
- return {
2493
- sourceType: "url",
2494
- id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils9.generateId)(),
2495
- url: annotation.url,
2496
- title: annotation.title
2497
- };
2498
- } else if (annotation.type === "file_citation") {
2499
- return {
2500
- sourceType: "url",
2501
- id: (_f2 = (_e2 = (_d2 = this.config).generateId) == null ? void 0 : _e2.call(_d2)) != null ? _f2 : (0, import_provider_utils9.generateId)(),
2502
- url: `file://${annotation.file_id}`,
2503
- title: (_h = (_g2 = annotation.quote) != null ? _g2 : annotation.filename) != null ? _h : "Document"
2504
- };
2505
- } else {
2642
+ var _a16, _b2, _c2, _d2, _e2, _f2, _g2, _h, _i, _j, _k, _l;
2643
+ console.log("\u{1F517} Processing annotation for source:", JSON.stringify(annotation, null, 2));
2644
+ try {
2645
+ if (annotation.type === "url_citation") {
2646
+ const urlSource = {
2647
+ sourceType: "url",
2648
+ id: (_c2 = (_b2 = (_a16 = this.config).generateId) == null ? void 0 : _b2.call(_a16)) != null ? _c2 : (0, import_provider_utils9.generateId)(),
2649
+ url: annotation.url,
2650
+ title: annotation.title
2651
+ };
2652
+ console.log("\u2705 Created URL source:", JSON.stringify(urlSource, null, 2));
2653
+ return urlSource;
2654
+ } else if (annotation.type === "file_citation") {
2655
+ const documentSource = {
2656
+ sourceType: "document",
2657
+ id: (_f2 = (_e2 = (_d2 = this.config).generateId) == null ? void 0 : _e2.call(_d2)) != null ? _f2 : (0, import_provider_utils9.generateId)(),
2658
+ mediaType: "text/plain",
2659
+ title: annotation.quote || annotation.filename || "Document",
2660
+ filename: annotation.filename,
2661
+ quote: annotation.quote
2662
+ };
2663
+ console.log("\u{1F4C4} Created document source:", JSON.stringify(documentSource, null, 2));
2664
+ return documentSource;
2665
+ } else {
2666
+ console.log("\u26A0\uFE0F Unknown annotation type in doGenerate:", annotation.type);
2667
+ return {
2668
+ sourceType: "url",
2669
+ id: (_i = (_h = (_g2 = this.config).generateId) == null ? void 0 : _h.call(_g2)) != null ? _i : (0, import_provider_utils9.generateId)(),
2670
+ url: "",
2671
+ title: "Unknown Source"
2672
+ };
2673
+ }
2674
+ } catch (error) {
2675
+ console.error("\u274C Error creating source in doGenerate:", {
2676
+ annotation,
2677
+ error: error instanceof Error ? error.message : String(error)
2678
+ });
2506
2679
  return {
2507
2680
  sourceType: "url",
2508
- id: (_k = (_j = (_i = this.config).generateId) == null ? void 0 : _j.call(_i)) != null ? _k : (0, import_provider_utils9.generateId)(),
2681
+ id: (_l = (_k = (_j = this.config).generateId) == null ? void 0 : _k.call(_j)) != null ? _l : (0, import_provider_utils9.generateId)(),
2509
2682
  url: "",
2510
- title: "Unknown Source"
2683
+ title: "Error Source"
2511
2684
  };
2512
2685
  }
2513
2686
  })
@@ -2552,24 +2725,40 @@ var OpenAIResponsesLanguageModel = class {
2552
2725
  };
2553
2726
  }
2554
2727
  async doStream(options) {
2728
+ console.log("\u{1F680} Starting doStream with options:", JSON.stringify({
2729
+ modelId: this.modelId,
2730
+ hasAbortSignal: !!options.abortSignal
2731
+ }, null, 2));
2555
2732
  const { args: body, warnings } = this.getArgs(options);
2556
- const { responseHeaders, value: response } = await (0, import_provider_utils9.postJsonToApi)({
2557
- url: this.config.url({
2558
- path: "/responses",
2559
- modelId: this.modelId
2560
- }),
2561
- headers: (0, import_provider_utils9.combineHeaders)(this.config.headers(), options.headers),
2562
- body: {
2563
- ...body,
2564
- stream: true
2565
- },
2566
- failedResponseHandler: openaiFailedResponseHandler,
2567
- successfulResponseHandler: (0, import_provider_utils9.createEventSourceResponseHandler)(
2568
- openaiResponsesChunkSchema
2569
- ),
2570
- abortSignal: options.abortSignal,
2571
- fetch: this.config.fetch
2572
- });
2733
+ console.log("\u{1F4E4} Request body:", JSON.stringify(body, null, 2));
2734
+ let response;
2735
+ let responseHeaders;
2736
+ try {
2737
+ console.log("\u{1F4E1} Making API request...");
2738
+ const result = await (0, import_provider_utils9.postJsonToApi)({
2739
+ url: this.config.url({
2740
+ path: "/responses",
2741
+ modelId: this.modelId
2742
+ }),
2743
+ headers: (0, import_provider_utils9.combineHeaders)(this.config.headers(), options.headers),
2744
+ body: {
2745
+ ...body,
2746
+ stream: true
2747
+ },
2748
+ failedResponseHandler: openaiFailedResponseHandler,
2749
+ successfulResponseHandler: (0, import_provider_utils9.createEventSourceResponseHandler)(
2750
+ openaiResponsesChunkSchema
2751
+ ),
2752
+ abortSignal: options.abortSignal,
2753
+ fetch: this.config.fetch
2754
+ });
2755
+ response = result.value;
2756
+ responseHeaders = result.responseHeaders;
2757
+ console.log("\u2705 API request successful, starting stream processing");
2758
+ } catch (error) {
2759
+ console.error("\u274C API request failed:", error);
2760
+ throw error;
2761
+ }
2573
2762
  const self = this;
2574
2763
  let finishReason = "unknown";
2575
2764
  let promptTokens = NaN;
@@ -2583,172 +2772,214 @@ var OpenAIResponsesLanguageModel = class {
2583
2772
  stream: response.pipeThrough(
2584
2773
  new TransformStream({
2585
2774
  transform(chunk, controller) {
2586
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2587
- if (!chunk.success) {
2588
- finishReason = "error";
2589
- controller.enqueue({ type: "error", error: chunk.error });
2590
- return;
2591
- }
2592
- const value = chunk.value;
2593
- if (isResponseOutputItemAddedChunk(value)) {
2594
- if (value.item.type === "function_call") {
2595
- ongoingToolCalls[value.output_index] = {
2596
- toolName: value.item.name,
2597
- toolCallId: value.item.call_id
2598
- };
2599
- controller.enqueue({
2600
- type: "tool-call-delta",
2601
- toolCallType: "function",
2602
- toolCallId: value.item.call_id,
2603
- toolName: value.item.name,
2604
- argsTextDelta: value.item.arguments
2605
- });
2606
- } else if (value.item.type === "web_search_call") {
2607
- ongoingToolCalls[value.output_index] = {
2608
- toolName: "web_search_preview",
2609
- toolCallId: value.item.id
2610
- };
2775
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
2776
+ try {
2777
+ console.log("\u{1F4E6} Processing chunk:", JSON.stringify(chunk, null, 2));
2778
+ if (!chunk.success) {
2779
+ console.error("\u274C Chunk parsing failed:", chunk.error);
2780
+ finishReason = "error";
2781
+ controller.enqueue({ type: "error", error: chunk.error });
2782
+ return;
2783
+ }
2784
+ const value = chunk.value;
2785
+ console.log("\u{1F4E5} Chunk value type:", value.type);
2786
+ if (isResponseOutputItemAddedChunk(value)) {
2787
+ console.log("\u{1F4DD} Output item added:", JSON.stringify(value, null, 2));
2788
+ if (value.item.type === "function_call") {
2789
+ ongoingToolCalls[value.output_index] = {
2790
+ toolName: value.item.name,
2791
+ toolCallId: value.item.call_id
2792
+ };
2793
+ controller.enqueue({
2794
+ type: "tool-call-delta",
2795
+ toolCallType: "function",
2796
+ toolCallId: value.item.call_id,
2797
+ toolName: value.item.name,
2798
+ argsTextDelta: value.item.arguments
2799
+ });
2800
+ } else if (value.item.type === "web_search_call") {
2801
+ ongoingToolCalls[value.output_index] = {
2802
+ toolName: "web_search_preview",
2803
+ toolCallId: value.item.id
2804
+ };
2805
+ controller.enqueue({
2806
+ type: "tool-call-delta",
2807
+ toolCallType: "function",
2808
+ toolCallId: value.item.id,
2809
+ toolName: "web_search_preview",
2810
+ argsTextDelta: JSON.stringify({ action: value.item.action })
2811
+ });
2812
+ } else if (value.item.type === "computer_call") {
2813
+ ongoingToolCalls[value.output_index] = {
2814
+ toolName: "computer_use",
2815
+ toolCallId: value.item.id
2816
+ };
2817
+ controller.enqueue({
2818
+ type: "tool-call-delta",
2819
+ toolCallType: "function",
2820
+ toolCallId: value.item.id,
2821
+ toolName: "computer_use",
2822
+ argsTextDelta: ""
2823
+ });
2824
+ } else if (value.item.type === "file_search_call") {
2825
+ ongoingToolCalls[value.output_index] = {
2826
+ toolName: "file_search",
2827
+ toolCallId: value.item.id
2828
+ };
2829
+ controller.enqueue({
2830
+ type: "tool-call-delta",
2831
+ toolCallType: "function",
2832
+ toolCallId: value.item.id,
2833
+ toolName: "file_search",
2834
+ argsTextDelta: ""
2835
+ });
2836
+ }
2837
+ } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2838
+ console.log("\u{1F527} Function call arguments delta:", JSON.stringify(value, null, 2));
2839
+ const toolCall = ongoingToolCalls[value.output_index];
2840
+ if (toolCall != null) {
2841
+ controller.enqueue({
2842
+ type: "tool-call-delta",
2843
+ toolCallType: "function",
2844
+ toolCallId: toolCall.toolCallId,
2845
+ toolName: toolCall.toolName,
2846
+ argsTextDelta: value.delta
2847
+ });
2848
+ }
2849
+ } else if (isResponseCreatedChunk(value)) {
2850
+ console.log("\u{1F680} Response created:", JSON.stringify(value, null, 2));
2851
+ responseId = value.response.id;
2611
2852
  controller.enqueue({
2612
- type: "tool-call-delta",
2613
- toolCallType: "function",
2614
- toolCallId: value.item.id,
2615
- toolName: "web_search_preview",
2616
- argsTextDelta: JSON.stringify({ action: value.item.action })
2853
+ type: "response-metadata",
2854
+ id: value.response.id,
2855
+ timestamp: new Date(value.response.created_at * 1e3),
2856
+ modelId: value.response.model
2617
2857
  });
2618
- } else if (value.item.type === "computer_call") {
2619
- ongoingToolCalls[value.output_index] = {
2620
- toolName: "computer_use",
2621
- toolCallId: value.item.id
2622
- };
2858
+ } else if (isTextDeltaChunk(value)) {
2859
+ console.log("\u{1F4DD} Text delta chunk:", JSON.stringify(value, null, 2));
2623
2860
  controller.enqueue({
2624
- type: "tool-call-delta",
2625
- toolCallType: "function",
2626
- toolCallId: value.item.id,
2627
- toolName: "computer_use",
2628
- argsTextDelta: ""
2861
+ type: "text-delta",
2862
+ textDelta: value.delta
2629
2863
  });
2630
- } else if (value.item.type === "file_search_call") {
2631
- ongoingToolCalls[value.output_index] = {
2632
- toolName: "file_search",
2633
- toolCallId: value.item.id
2634
- };
2864
+ } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2865
+ console.log("\u{1F9E0} Reasoning summary delta:", JSON.stringify(value, null, 2));
2635
2866
  controller.enqueue({
2636
- type: "tool-call-delta",
2637
- toolCallType: "function",
2638
- toolCallId: value.item.id,
2639
- toolName: "file_search",
2640
- argsTextDelta: ""
2867
+ type: "reasoning",
2868
+ textDelta: value.delta
2641
2869
  });
2642
- }
2643
- } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2644
- const toolCall = ongoingToolCalls[value.output_index];
2645
- if (toolCall != null) {
2646
- controller.enqueue({
2647
- type: "tool-call-delta",
2648
- toolCallType: "function",
2649
- toolCallId: toolCall.toolCallId,
2650
- toolName: toolCall.toolName,
2651
- argsTextDelta: value.delta
2870
+ } else if (isResponseOutputItemDoneChunk(value)) {
2871
+ console.log("\u2705 Output item done:", JSON.stringify(value, null, 2));
2872
+ if (value.item.type === "function_call") {
2873
+ ongoingToolCalls[value.output_index] = void 0;
2874
+ hasToolCalls = true;
2875
+ controller.enqueue({
2876
+ type: "tool-call",
2877
+ toolCallType: "function",
2878
+ toolCallId: value.item.call_id,
2879
+ toolName: value.item.name,
2880
+ args: value.item.arguments
2881
+ });
2882
+ } else if (value.item.type === "web_search_call") {
2883
+ ongoingToolCalls[value.output_index] = void 0;
2884
+ hasToolCalls = true;
2885
+ controller.enqueue({
2886
+ type: "tool-call",
2887
+ toolCallType: "function",
2888
+ toolCallId: value.item.id,
2889
+ toolName: "web_search_preview",
2890
+ args: JSON.stringify({ action: value.item.action })
2891
+ });
2892
+ } else if (value.item.type === "computer_call") {
2893
+ ongoingToolCalls[value.output_index] = void 0;
2894
+ hasToolCalls = true;
2895
+ controller.enqueue({
2896
+ type: "tool-call",
2897
+ toolCallType: "function",
2898
+ toolCallId: value.item.id,
2899
+ toolName: "computer_use",
2900
+ args: ""
2901
+ });
2902
+ } else if (value.item.type === "file_search_call") {
2903
+ ongoingToolCalls[value.output_index] = void 0;
2904
+ hasToolCalls = true;
2905
+ controller.enqueue({
2906
+ type: "tool-call",
2907
+ toolCallType: "function",
2908
+ toolCallId: value.item.id,
2909
+ toolName: "file_search",
2910
+ args: JSON.stringify({
2911
+ queries: value.item.queries,
2912
+ results: value.item.results
2913
+ })
2914
+ });
2915
+ }
2916
+ } else if (isResponseFinishedChunk(value)) {
2917
+ console.log("\u{1F3C1} Response finished:", JSON.stringify(value, null, 2));
2918
+ finishReason = mapOpenAIResponseFinishReason({
2919
+ finishReason: (_a15 = value.response.incomplete_details) == null ? void 0 : _a15.reason,
2920
+ hasToolCalls
2652
2921
  });
2922
+ promptTokens = value.response.usage.input_tokens;
2923
+ completionTokens = value.response.usage.output_tokens;
2924
+ cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
2925
+ reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
2926
+ } else if (isResponseAnnotationAddedChunk(value)) {
2927
+ console.log("\u{1F50D} Processing annotation chunk:", JSON.stringify({
2928
+ type: value.type,
2929
+ annotation: value.annotation
2930
+ }, null, 2));
2931
+ try {
2932
+ if (value.annotation.type === "url_citation") {
2933
+ const urlSource = {
2934
+ sourceType: "url",
2935
+ id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils9.generateId)(),
2936
+ url: value.annotation.url,
2937
+ title: value.annotation.title
2938
+ };
2939
+ console.log("\u2705 Creating URL source:", JSON.stringify(urlSource, null, 2));
2940
+ controller.enqueue({
2941
+ type: "source",
2942
+ source: urlSource
2943
+ });
2944
+ console.log("\u2705 URL source enqueued successfully");
2945
+ } else if (value.annotation.type === "file_citation") {
2946
+ const documentSource = {
2947
+ sourceType: "document",
2948
+ id: (_k = (_j = (_i = self.config).generateId) == null ? void 0 : _j.call(_i)) != null ? _k : (0, import_provider_utils9.generateId)(),
2949
+ mediaType: "text/plain",
2950
+ title: value.annotation.quote || value.annotation.filename || "Document",
2951
+ filename: value.annotation.filename,
2952
+ quote: value.annotation.quote
2953
+ };
2954
+ console.log("\u{1F4C4} Creating document source:", JSON.stringify(documentSource, null, 2));
2955
+ controller.enqueue({
2956
+ type: "source",
2957
+ source: documentSource
2958
+ });
2959
+ console.log("\u2705 Document source enqueued successfully");
2960
+ } else {
2961
+ console.log("\u26A0\uFE0F Unknown annotation type:", value.annotation.type);
2962
+ }
2963
+ } catch (error) {
2964
+ console.error("\u274C Error processing annotation:", {
2965
+ annotation: value.annotation,
2966
+ error: error instanceof Error ? error.message : String(error),
2967
+ stack: error instanceof Error ? error.stack : void 0
2968
+ });
2969
+ }
2970
+ } else {
2971
+ console.log("\u2753 Unhandled chunk type:", value.type, JSON.stringify(value, null, 2));
2653
2972
  }
2654
- } else if (isResponseCreatedChunk(value)) {
2655
- responseId = value.response.id;
2656
- controller.enqueue({
2657
- type: "response-metadata",
2658
- id: value.response.id,
2659
- timestamp: new Date(value.response.created_at * 1e3),
2660
- modelId: value.response.model
2661
- });
2662
- } else if (isTextDeltaChunk(value)) {
2663
- controller.enqueue({
2664
- type: "text-delta",
2665
- textDelta: value.delta
2973
+ } catch (error) {
2974
+ console.error("\u{1F4A5} FATAL ERROR in chunk processing:", {
2975
+ error: error instanceof Error ? error.message : String(error),
2976
+ stack: error instanceof Error ? error.stack : void 0
2666
2977
  });
2667
- } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2978
+ finishReason = "error";
2668
2979
  controller.enqueue({
2669
- type: "reasoning",
2670
- textDelta: value.delta
2980
+ type: "error",
2981
+ error: error instanceof Error ? error : new Error(String(error))
2671
2982
  });
2672
- } else if (isResponseOutputItemDoneChunk(value)) {
2673
- if (value.item.type === "function_call") {
2674
- ongoingToolCalls[value.output_index] = void 0;
2675
- hasToolCalls = true;
2676
- controller.enqueue({
2677
- type: "tool-call",
2678
- toolCallType: "function",
2679
- toolCallId: value.item.call_id,
2680
- toolName: value.item.name,
2681
- args: value.item.arguments
2682
- });
2683
- } else if (value.item.type === "web_search_call") {
2684
- ongoingToolCalls[value.output_index] = void 0;
2685
- hasToolCalls = true;
2686
- controller.enqueue({
2687
- type: "tool-call",
2688
- toolCallType: "function",
2689
- toolCallId: value.item.id,
2690
- toolName: "web_search_preview",
2691
- args: JSON.stringify({ action: value.item.action })
2692
- });
2693
- } else if (value.item.type === "computer_call") {
2694
- ongoingToolCalls[value.output_index] = void 0;
2695
- hasToolCalls = true;
2696
- controller.enqueue({
2697
- type: "tool-call",
2698
- toolCallType: "function",
2699
- toolCallId: value.item.id,
2700
- toolName: "computer_use",
2701
- args: ""
2702
- });
2703
- } else if (value.item.type === "file_search_call") {
2704
- ongoingToolCalls[value.output_index] = void 0;
2705
- hasToolCalls = true;
2706
- controller.enqueue({
2707
- type: "tool-call",
2708
- toolCallType: "function",
2709
- toolCallId: value.item.id,
2710
- toolName: "file_search",
2711
- args: JSON.stringify({
2712
- queries: value.item.queries,
2713
- results: value.item.results
2714
- })
2715
- });
2716
- }
2717
- } else if (isResponseFinishedChunk(value)) {
2718
- finishReason = mapOpenAIResponseFinishReason({
2719
- finishReason: (_a = value.response.incomplete_details) == null ? void 0 : _a.reason,
2720
- hasToolCalls
2721
- });
2722
- promptTokens = value.response.usage.input_tokens;
2723
- completionTokens = value.response.usage.output_tokens;
2724
- cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
2725
- reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
2726
- } else if (isResponseAnnotationAddedChunk(value)) {
2727
- console.log(JSON.stringify({
2728
- msg: "ai-sdk: source (stream)",
2729
- source: value.annotation
2730
- }));
2731
- if (value.annotation.type === "url_citation") {
2732
- controller.enqueue({
2733
- type: "source",
2734
- source: {
2735
- sourceType: "url",
2736
- id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils9.generateId)(),
2737
- url: value.annotation.url,
2738
- title: value.annotation.title
2739
- }
2740
- });
2741
- } else if (value.annotation.type === "file_citation") {
2742
- controller.enqueue({
2743
- type: "source",
2744
- source: {
2745
- sourceType: "url",
2746
- id: (_k = (_j = (_i = self.config).generateId) == null ? void 0 : _j.call(_i)) != null ? _k : (0, import_provider_utils9.generateId)(),
2747
- url: `file://${value.annotation.file_id}`,
2748
- title: (_m = (_l = value.annotation.quote) != null ? _l : value.annotation.filename) != null ? _m : "Document"
2749
- }
2750
- });
2751
- }
2752
2983
  }
2753
2984
  },
2754
2985
  flush(controller) {
@@ -3125,8 +3356,8 @@ var OpenAISpeechModel = class {
3125
3356
  };
3126
3357
  }
3127
3358
  async doGenerate(options) {
3128
- var _a, _b, _c;
3129
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
3359
+ var _a15, _b, _c;
3360
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
3130
3361
  const { requestBody, warnings } = this.getArgs(options);
3131
3362
  const {
3132
3363
  value: audio,
@@ -3162,8 +3393,8 @@ var OpenAISpeechModel = class {
3162
3393
 
3163
3394
  // src/openai-provider.ts
3164
3395
  function createOpenAI(options = {}) {
3165
- var _a, _b, _c;
3166
- const baseURL = (_a = (0, import_provider_utils11.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
3396
+ var _a15, _b, _c;
3397
+ const baseURL = (_a15 = (0, import_provider_utils11.withoutTrailingSlash)(options.baseURL)) != null ? _a15 : "https://api.openai.com/v1";
3167
3398
  const compatibility = (_b = options.compatibility) != null ? _b : "compatible";
3168
3399
  const providerName = (_c = options.name) != null ? _c : "openai";
3169
3400
  const getHeaders = () => ({