@ai-sdk/openai 3.0.6 → 3.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -160,6 +160,352 @@ type OpenAIResponsesFileSearchToolCompoundFilter = {
160
160
  */
161
161
  filters: Array<OpenAIResponsesFileSearchToolComparisonFilter | OpenAIResponsesFileSearchToolCompoundFilter>;
162
162
  };
163
+ declare const openaiResponsesChunkSchema: _ai_sdk_provider_utils.LazySchema<{
164
+ type: "unknown_chunk";
165
+ message: string;
166
+ } | {
167
+ type: "response.output_text.delta";
168
+ item_id: string;
169
+ delta: string;
170
+ logprobs?: {
171
+ token: string;
172
+ logprob: number;
173
+ top_logprobs: {
174
+ token: string;
175
+ logprob: number;
176
+ }[];
177
+ }[] | null | undefined;
178
+ } | {
179
+ type: "response.completed" | "response.incomplete";
180
+ response: {
181
+ usage: {
182
+ input_tokens: number;
183
+ output_tokens: number;
184
+ input_tokens_details?: {
185
+ cached_tokens?: number | null | undefined;
186
+ } | null | undefined;
187
+ output_tokens_details?: {
188
+ reasoning_tokens?: number | null | undefined;
189
+ } | null | undefined;
190
+ };
191
+ incomplete_details?: {
192
+ reason: string;
193
+ } | null | undefined;
194
+ service_tier?: string | null | undefined;
195
+ };
196
+ } | {
197
+ type: "response.created";
198
+ response: {
199
+ id: string;
200
+ created_at: number;
201
+ model: string;
202
+ service_tier?: string | null | undefined;
203
+ };
204
+ } | {
205
+ type: "response.output_item.added";
206
+ output_index: number;
207
+ item: {
208
+ type: "message";
209
+ id: string;
210
+ } | {
211
+ type: "reasoning";
212
+ id: string;
213
+ encrypted_content?: string | null | undefined;
214
+ } | {
215
+ type: "function_call";
216
+ id: string;
217
+ call_id: string;
218
+ name: string;
219
+ arguments: string;
220
+ } | {
221
+ type: "web_search_call";
222
+ id: string;
223
+ status: string;
224
+ } | {
225
+ type: "computer_call";
226
+ id: string;
227
+ status: string;
228
+ } | {
229
+ type: "file_search_call";
230
+ id: string;
231
+ } | {
232
+ type: "image_generation_call";
233
+ id: string;
234
+ } | {
235
+ type: "code_interpreter_call";
236
+ id: string;
237
+ container_id: string;
238
+ code: string | null;
239
+ outputs: ({
240
+ type: "logs";
241
+ logs: string;
242
+ } | {
243
+ type: "image";
244
+ url: string;
245
+ })[] | null;
246
+ status: string;
247
+ } | {
248
+ type: "mcp_call";
249
+ id: string;
250
+ status: string;
251
+ approval_request_id?: string | null | undefined;
252
+ } | {
253
+ type: "mcp_list_tools";
254
+ id: string;
255
+ } | {
256
+ type: "mcp_approval_request";
257
+ id: string;
258
+ } | {
259
+ type: "apply_patch_call";
260
+ id: string;
261
+ call_id: string;
262
+ status: "completed" | "in_progress";
263
+ operation: {
264
+ type: "create_file";
265
+ path: string;
266
+ diff: string;
267
+ } | {
268
+ type: "delete_file";
269
+ path: string;
270
+ } | {
271
+ type: "update_file";
272
+ path: string;
273
+ diff: string;
274
+ };
275
+ } | {
276
+ type: "shell_call";
277
+ id: string;
278
+ call_id: string;
279
+ status: "completed" | "in_progress" | "incomplete";
280
+ action: {
281
+ commands: string[];
282
+ };
283
+ };
284
+ } | {
285
+ type: "response.output_item.done";
286
+ output_index: number;
287
+ item: {
288
+ type: "message";
289
+ id: string;
290
+ } | {
291
+ type: "reasoning";
292
+ id: string;
293
+ encrypted_content?: string | null | undefined;
294
+ } | {
295
+ type: "function_call";
296
+ id: string;
297
+ call_id: string;
298
+ name: string;
299
+ arguments: string;
300
+ status: "completed";
301
+ } | {
302
+ type: "code_interpreter_call";
303
+ id: string;
304
+ code: string | null;
305
+ container_id: string;
306
+ outputs: ({
307
+ type: "logs";
308
+ logs: string;
309
+ } | {
310
+ type: "image";
311
+ url: string;
312
+ })[] | null;
313
+ } | {
314
+ type: "image_generation_call";
315
+ id: string;
316
+ result: string;
317
+ } | {
318
+ type: "web_search_call";
319
+ id: string;
320
+ status: string;
321
+ action: {
322
+ type: "search";
323
+ query?: string | null | undefined;
324
+ sources?: ({
325
+ type: "url";
326
+ url: string;
327
+ } | {
328
+ type: "api";
329
+ name: string;
330
+ })[] | null | undefined;
331
+ } | {
332
+ type: "open_page";
333
+ url?: string | null | undefined;
334
+ } | {
335
+ type: "find_in_page";
336
+ url?: string | null | undefined;
337
+ pattern?: string | null | undefined;
338
+ };
339
+ } | {
340
+ type: "file_search_call";
341
+ id: string;
342
+ queries: string[];
343
+ results?: {
344
+ attributes: Record<string, string | number | boolean>;
345
+ file_id: string;
346
+ filename: string;
347
+ score: number;
348
+ text: string;
349
+ }[] | null | undefined;
350
+ } | {
351
+ type: "local_shell_call";
352
+ id: string;
353
+ call_id: string;
354
+ action: {
355
+ type: "exec";
356
+ command: string[];
357
+ timeout_ms?: number | undefined;
358
+ user?: string | undefined;
359
+ working_directory?: string | undefined;
360
+ env?: Record<string, string> | undefined;
361
+ };
362
+ } | {
363
+ type: "computer_call";
364
+ id: string;
365
+ status: "completed";
366
+ } | {
367
+ type: "mcp_call";
368
+ id: string;
369
+ status: string;
370
+ arguments: string;
371
+ name: string;
372
+ server_label: string;
373
+ output?: string | null | undefined;
374
+ error?: string | {
375
+ [x: string]: unknown;
376
+ type?: string | undefined;
377
+ code?: string | number | undefined;
378
+ message?: string | undefined;
379
+ } | null | undefined;
380
+ approval_request_id?: string | null | undefined;
381
+ } | {
382
+ type: "mcp_list_tools";
383
+ id: string;
384
+ server_label: string;
385
+ tools: {
386
+ name: string;
387
+ input_schema: any;
388
+ description?: string | undefined;
389
+ annotations?: Record<string, unknown> | undefined;
390
+ }[];
391
+ error?: string | {
392
+ [x: string]: unknown;
393
+ type?: string | undefined;
394
+ code?: string | number | undefined;
395
+ message?: string | undefined;
396
+ } | undefined;
397
+ } | {
398
+ type: "mcp_approval_request";
399
+ id: string;
400
+ server_label: string;
401
+ name: string;
402
+ arguments: string;
403
+ approval_request_id?: string | undefined;
404
+ } | {
405
+ type: "apply_patch_call";
406
+ id: string;
407
+ call_id: string;
408
+ status: "completed" | "in_progress";
409
+ operation: {
410
+ type: "create_file";
411
+ path: string;
412
+ diff: string;
413
+ } | {
414
+ type: "delete_file";
415
+ path: string;
416
+ } | {
417
+ type: "update_file";
418
+ path: string;
419
+ diff: string;
420
+ };
421
+ } | {
422
+ type: "shell_call";
423
+ id: string;
424
+ call_id: string;
425
+ status: "completed" | "in_progress" | "incomplete";
426
+ action: {
427
+ commands: string[];
428
+ };
429
+ };
430
+ } | {
431
+ type: "response.function_call_arguments.delta";
432
+ item_id: string;
433
+ output_index: number;
434
+ delta: string;
435
+ } | {
436
+ type: "response.image_generation_call.partial_image";
437
+ item_id: string;
438
+ output_index: number;
439
+ partial_image_b64: string;
440
+ } | {
441
+ type: "response.code_interpreter_call_code.delta";
442
+ item_id: string;
443
+ output_index: number;
444
+ delta: string;
445
+ } | {
446
+ type: "response.code_interpreter_call_code.done";
447
+ item_id: string;
448
+ output_index: number;
449
+ code: string;
450
+ } | {
451
+ type: "response.output_text.annotation.added";
452
+ annotation: {
453
+ type: "url_citation";
454
+ start_index: number;
455
+ end_index: number;
456
+ url: string;
457
+ title: string;
458
+ } | {
459
+ type: "file_citation";
460
+ file_id: string;
461
+ filename: string;
462
+ index: number;
463
+ } | {
464
+ type: "container_file_citation";
465
+ container_id: string;
466
+ file_id: string;
467
+ filename: string;
468
+ start_index: number;
469
+ end_index: number;
470
+ } | {
471
+ type: "file_path";
472
+ file_id: string;
473
+ index: number;
474
+ };
475
+ } | {
476
+ type: "response.reasoning_summary_part.added";
477
+ item_id: string;
478
+ summary_index: number;
479
+ } | {
480
+ type: "response.reasoning_summary_text.delta";
481
+ item_id: string;
482
+ summary_index: number;
483
+ delta: string;
484
+ } | {
485
+ type: "response.reasoning_summary_part.done";
486
+ item_id: string;
487
+ summary_index: number;
488
+ } | {
489
+ type: "response.apply_patch_call_operation_diff.delta";
490
+ item_id: string;
491
+ output_index: number;
492
+ delta: string;
493
+ obfuscation?: string | null | undefined;
494
+ } | {
495
+ type: "response.apply_patch_call_operation_diff.done";
496
+ item_id: string;
497
+ output_index: number;
498
+ diff: string;
499
+ } | {
500
+ type: "error";
501
+ sequence_number: number;
502
+ error: {
503
+ type: string;
504
+ code: string;
505
+ message: string;
506
+ param?: string | null | undefined;
507
+ };
508
+ }>;
163
509
 
164
510
  /**
165
511
  * Type definitions for the apply_patch operations.
@@ -559,6 +905,34 @@ Default OpenAI provider instance.
559
905
  */
560
906
  declare const openai: OpenAIProvider;
561
907
 
908
+ type OpenaiResponsesChunk = InferSchema<typeof openaiResponsesChunkSchema>;
909
+ type ResponsesOutputTextAnnotationProviderMetadata = Extract<OpenaiResponsesChunk, {
910
+ type: 'response.output_text.annotation.added';
911
+ }>['annotation'];
912
+ type ResponsesTextProviderMetadata = {
913
+ itemId: string;
914
+ annotations?: Array<ResponsesOutputTextAnnotationProviderMetadata>;
915
+ };
916
+ type OpenaiResponsesTextProviderMetadata = {
917
+ openai: ResponsesTextProviderMetadata;
918
+ };
919
+ type ResponsesSourceDocumentProviderMetadata = {
920
+ type: 'file_citation';
921
+ fileId: string;
922
+ index: number;
923
+ } | {
924
+ type: 'container_file_citation';
925
+ fileId: string;
926
+ containerId: string;
927
+ } | {
928
+ type: 'file_path';
929
+ fileId: string;
930
+ index: number;
931
+ };
932
+ type OpenaiResponsesSourceDocumentProviderMetadata = {
933
+ openai: ResponsesSourceDocumentProviderMetadata;
934
+ };
935
+
562
936
  declare const VERSION: string;
563
937
 
564
- export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, VERSION, createOpenAI, openai };
938
+ export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, type OpenaiResponsesSourceDocumentProviderMetadata, type OpenaiResponsesTextProviderMetadata, VERSION, createOpenAI, openai };
package/dist/index.js CHANGED
@@ -1751,11 +1751,16 @@ var modelMaxImagesPerCall = {
1751
1751
  "gpt-image-1-mini": 10,
1752
1752
  "gpt-image-1.5": 10
1753
1753
  };
1754
- var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
1755
- "gpt-image-1",
1754
+ var defaultResponseFormatPrefixes = [
1756
1755
  "gpt-image-1-mini",
1757
- "gpt-image-1.5"
1758
- ]);
1756
+ "gpt-image-1.5",
1757
+ "gpt-image-1"
1758
+ ];
1759
+ function hasDefaultResponseFormat(modelId) {
1760
+ return defaultResponseFormatPrefixes.some(
1761
+ (prefix) => modelId.startsWith(prefix)
1762
+ );
1763
+ }
1759
1764
 
1760
1765
  // src/image/openai-image-model.ts
1761
1766
  var OpenAIImageModel = class {
@@ -1874,7 +1879,7 @@ var OpenAIImageModel = class {
1874
1879
  n,
1875
1880
  size,
1876
1881
  ...(_h = providerOptions.openai) != null ? _h : {},
1877
- ...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
1882
+ ...!hasDefaultResponseFormat(this.modelId) ? { response_format: "b64_json" } : {}
1878
1883
  },
1879
1884
  failedResponseHandler: openaiFailedResponseHandler,
1880
1885
  successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
@@ -3231,25 +3236,21 @@ var openaiResponsesChunkSchema = (0, import_provider_utils24.lazySchema)(
3231
3236
  import_v419.z.object({
3232
3237
  type: import_v419.z.literal("file_citation"),
3233
3238
  file_id: import_v419.z.string(),
3234
- filename: import_v419.z.string().nullish(),
3235
- index: import_v419.z.number().nullish(),
3236
- start_index: import_v419.z.number().nullish(),
3237
- end_index: import_v419.z.number().nullish(),
3238
- quote: import_v419.z.string().nullish()
3239
+ filename: import_v419.z.string(),
3240
+ index: import_v419.z.number()
3239
3241
  }),
3240
3242
  import_v419.z.object({
3241
3243
  type: import_v419.z.literal("container_file_citation"),
3242
3244
  container_id: import_v419.z.string(),
3243
3245
  file_id: import_v419.z.string(),
3244
- filename: import_v419.z.string().nullish(),
3245
- start_index: import_v419.z.number().nullish(),
3246
- end_index: import_v419.z.number().nullish(),
3247
- index: import_v419.z.number().nullish()
3246
+ filename: import_v419.z.string(),
3247
+ start_index: import_v419.z.number(),
3248
+ end_index: import_v419.z.number()
3248
3249
  }),
3249
3250
  import_v419.z.object({
3250
3251
  type: import_v419.z.literal("file_path"),
3251
3252
  file_id: import_v419.z.string(),
3252
- index: import_v419.z.number().nullish()
3253
+ index: import_v419.z.number()
3253
3254
  })
3254
3255
  ])
3255
3256
  }),
@@ -3346,25 +3347,21 @@ var openaiResponsesResponseSchema = (0, import_provider_utils24.lazySchema)(
3346
3347
  import_v419.z.object({
3347
3348
  type: import_v419.z.literal("file_citation"),
3348
3349
  file_id: import_v419.z.string(),
3349
- filename: import_v419.z.string().nullish(),
3350
- index: import_v419.z.number().nullish(),
3351
- start_index: import_v419.z.number().nullish(),
3352
- end_index: import_v419.z.number().nullish(),
3353
- quote: import_v419.z.string().nullish()
3350
+ filename: import_v419.z.string(),
3351
+ index: import_v419.z.number()
3354
3352
  }),
3355
3353
  import_v419.z.object({
3356
3354
  type: import_v419.z.literal("container_file_citation"),
3357
3355
  container_id: import_v419.z.string(),
3358
3356
  file_id: import_v419.z.string(),
3359
- filename: import_v419.z.string().nullish(),
3360
- start_index: import_v419.z.number().nullish(),
3361
- end_index: import_v419.z.number().nullish(),
3362
- index: import_v419.z.number().nullish()
3357
+ filename: import_v419.z.string(),
3358
+ start_index: import_v419.z.number(),
3359
+ end_index: import_v419.z.number()
3363
3360
  }),
3364
3361
  import_v419.z.object({
3365
3362
  type: import_v419.z.literal("file_path"),
3366
3363
  file_id: import_v419.z.string(),
3367
- index: import_v419.z.number().nullish()
3364
+ index: import_v419.z.number()
3368
3365
  })
3369
3366
  ])
3370
3367
  )
@@ -4230,7 +4227,7 @@ var OpenAIResponsesLanguageModel = class {
4230
4227
  };
4231
4228
  }
4232
4229
  async doGenerate(options) {
4233
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
4230
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y;
4234
4231
  const {
4235
4232
  args: body,
4236
4233
  warnings,
@@ -4377,29 +4374,29 @@ var OpenAIResponsesLanguageModel = class {
4377
4374
  sourceType: "document",
4378
4375
  id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils27.generateId)(),
4379
4376
  mediaType: "text/plain",
4380
- title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
4381
- filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
4382
- ...annotation.file_id ? {
4383
- providerMetadata: {
4384
- [providerOptionsName]: {
4385
- fileId: annotation.file_id
4386
- }
4377
+ title: annotation.filename,
4378
+ filename: annotation.filename,
4379
+ providerMetadata: {
4380
+ [providerOptionsName]: {
4381
+ type: annotation.type,
4382
+ fileId: annotation.file_id,
4383
+ index: annotation.index
4387
4384
  }
4388
- } : {}
4385
+ }
4389
4386
  });
4390
4387
  } else if (annotation.type === "container_file_citation") {
4391
4388
  content.push({
4392
4389
  type: "source",
4393
4390
  sourceType: "document",
4394
- id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils27.generateId)(),
4391
+ id: (_l = (_k = (_j = this.config).generateId) == null ? void 0 : _k.call(_j)) != null ? _l : (0, import_provider_utils27.generateId)(),
4395
4392
  mediaType: "text/plain",
4396
- title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
4397
- filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
4393
+ title: annotation.filename,
4394
+ filename: annotation.filename,
4398
4395
  providerMetadata: {
4399
4396
  [providerOptionsName]: {
4397
+ type: annotation.type,
4400
4398
  fileId: annotation.file_id,
4401
- containerId: annotation.container_id,
4402
- ...annotation.index != null ? { index: annotation.index } : {}
4399
+ containerId: annotation.container_id
4403
4400
  }
4404
4401
  }
4405
4402
  });
@@ -4407,14 +4404,15 @@ var OpenAIResponsesLanguageModel = class {
4407
4404
  content.push({
4408
4405
  type: "source",
4409
4406
  sourceType: "document",
4410
- id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : (0, import_provider_utils27.generateId)(),
4407
+ id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils27.generateId)(),
4411
4408
  mediaType: "application/octet-stream",
4412
4409
  title: annotation.file_id,
4413
4410
  filename: annotation.file_id,
4414
4411
  providerMetadata: {
4415
4412
  [providerOptionsName]: {
4413
+ type: annotation.type,
4416
4414
  fileId: annotation.file_id,
4417
- ...annotation.index != null ? { index: annotation.index } : {}
4415
+ index: annotation.index
4418
4416
  }
4419
4417
  }
4420
4418
  });
@@ -4459,7 +4457,7 @@ var OpenAIResponsesLanguageModel = class {
4459
4457
  break;
4460
4458
  }
4461
4459
  case "mcp_call": {
4462
- const toolCallId = part.approval_request_id != null ? (_v = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _v : part.id : part.id;
4460
+ const toolCallId = part.approval_request_id != null ? (_p = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _p : part.id : part.id;
4463
4461
  const toolName = `mcp.${part.name}`;
4464
4462
  content.push({
4465
4463
  type: "tool-call",
@@ -4493,8 +4491,8 @@ var OpenAIResponsesLanguageModel = class {
4493
4491
  break;
4494
4492
  }
4495
4493
  case "mcp_approval_request": {
4496
- const approvalRequestId = (_w = part.approval_request_id) != null ? _w : part.id;
4497
- const dummyToolCallId = (_z = (_y = (_x = this.config).generateId) == null ? void 0 : _y.call(_x)) != null ? _z : (0, import_provider_utils27.generateId)();
4494
+ const approvalRequestId = (_q = part.approval_request_id) != null ? _q : part.id;
4495
+ const dummyToolCallId = (_t = (_s = (_r = this.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils27.generateId)();
4498
4496
  const toolName = `mcp.${part.name}`;
4499
4497
  content.push({
4500
4498
  type: "tool-call",
@@ -4544,13 +4542,13 @@ var OpenAIResponsesLanguageModel = class {
4544
4542
  toolName: toolNameMapping.toCustomToolName("file_search"),
4545
4543
  result: {
4546
4544
  queries: part.queries,
4547
- results: (_B = (_A = part.results) == null ? void 0 : _A.map((result) => ({
4545
+ results: (_v = (_u = part.results) == null ? void 0 : _u.map((result) => ({
4548
4546
  attributes: result.attributes,
4549
4547
  fileId: result.file_id,
4550
4548
  filename: result.filename,
4551
4549
  score: result.score,
4552
4550
  text: result.text
4553
- }))) != null ? _B : null
4551
+ }))) != null ? _v : null
4554
4552
  }
4555
4553
  });
4556
4554
  break;
@@ -4609,10 +4607,10 @@ var OpenAIResponsesLanguageModel = class {
4609
4607
  content,
4610
4608
  finishReason: {
4611
4609
  unified: mapOpenAIResponseFinishReason({
4612
- finishReason: (_C = response.incomplete_details) == null ? void 0 : _C.reason,
4610
+ finishReason: (_w = response.incomplete_details) == null ? void 0 : _w.reason,
4613
4611
  hasFunctionCall
4614
4612
  }),
4615
- raw: (_E = (_D = response.incomplete_details) == null ? void 0 : _D.reason) != null ? _E : void 0
4613
+ raw: (_y = (_x = response.incomplete_details) == null ? void 0 : _x.reason) != null ? _y : void 0
4616
4614
  },
4617
4615
  usage: convertOpenAIResponsesUsage(usage),
4618
4616
  request: { body },
@@ -4675,7 +4673,7 @@ var OpenAIResponsesLanguageModel = class {
4675
4673
  controller.enqueue({ type: "stream-start", warnings });
4676
4674
  },
4677
4675
  transform(chunk, controller) {
4678
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H, _I, _J;
4676
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D;
4679
4677
  if (options.includeRawChunks) {
4680
4678
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
4681
4679
  }
@@ -5272,29 +5270,29 @@ var OpenAIResponsesLanguageModel = class {
5272
5270
  sourceType: "document",
5273
5271
  id: (_x = (_w = (_v = self.config).generateId) == null ? void 0 : _w.call(_v)) != null ? _x : (0, import_provider_utils27.generateId)(),
5274
5272
  mediaType: "text/plain",
5275
- title: (_z = (_y = value.annotation.quote) != null ? _y : value.annotation.filename) != null ? _z : "Document",
5276
- filename: (_A = value.annotation.filename) != null ? _A : value.annotation.file_id,
5277
- ...value.annotation.file_id ? {
5278
- providerMetadata: {
5279
- [providerOptionsName]: {
5280
- fileId: value.annotation.file_id
5281
- }
5273
+ title: value.annotation.filename,
5274
+ filename: value.annotation.filename,
5275
+ providerMetadata: {
5276
+ [providerOptionsName]: {
5277
+ type: value.annotation.type,
5278
+ fileId: value.annotation.file_id,
5279
+ index: value.annotation.index
5282
5280
  }
5283
- } : {}
5281
+ }
5284
5282
  });
5285
5283
  } else if (value.annotation.type === "container_file_citation") {
5286
5284
  controller.enqueue({
5287
5285
  type: "source",
5288
5286
  sourceType: "document",
5289
- id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : (0, import_provider_utils27.generateId)(),
5287
+ id: (_A = (_z = (_y = self.config).generateId) == null ? void 0 : _z.call(_y)) != null ? _A : (0, import_provider_utils27.generateId)(),
5290
5288
  mediaType: "text/plain",
5291
- title: (_F = (_E = value.annotation.filename) != null ? _E : value.annotation.file_id) != null ? _F : "Document",
5292
- filename: (_G = value.annotation.filename) != null ? _G : value.annotation.file_id,
5289
+ title: value.annotation.filename,
5290
+ filename: value.annotation.filename,
5293
5291
  providerMetadata: {
5294
5292
  [providerOptionsName]: {
5293
+ type: value.annotation.type,
5295
5294
  fileId: value.annotation.file_id,
5296
- containerId: value.annotation.container_id,
5297
- ...value.annotation.index != null ? { index: value.annotation.index } : {}
5295
+ containerId: value.annotation.container_id
5298
5296
  }
5299
5297
  }
5300
5298
  });
@@ -5302,14 +5300,15 @@ var OpenAIResponsesLanguageModel = class {
5302
5300
  controller.enqueue({
5303
5301
  type: "source",
5304
5302
  sourceType: "document",
5305
- id: (_J = (_I = (_H = self.config).generateId) == null ? void 0 : _I.call(_H)) != null ? _J : (0, import_provider_utils27.generateId)(),
5303
+ id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : (0, import_provider_utils27.generateId)(),
5306
5304
  mediaType: "application/octet-stream",
5307
5305
  title: value.annotation.file_id,
5308
5306
  filename: value.annotation.file_id,
5309
5307
  providerMetadata: {
5310
5308
  [providerOptionsName]: {
5309
+ type: value.annotation.type,
5311
5310
  fileId: value.annotation.file_id,
5312
- ...value.annotation.index != null ? { index: value.annotation.index } : {}
5311
+ index: value.annotation.index
5313
5312
  }
5314
5313
  }
5315
5314
  });
@@ -5761,7 +5760,7 @@ var OpenAITranscriptionModel = class {
5761
5760
  };
5762
5761
 
5763
5762
  // src/version.ts
5764
- var VERSION = true ? "3.0.6" : "0.0.0-test";
5763
+ var VERSION = true ? "3.0.8" : "0.0.0-test";
5765
5764
 
5766
5765
  // src/openai-provider.ts
5767
5766
  function createOpenAI(options = {}) {