@ai-sdk/openai 2.0.0-alpha.9 → 2.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +83 -0
- package/dist/index.d.mts +77 -75
- package/dist/index.d.ts +77 -75
- package/dist/index.js +745 -389
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +690 -334
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +20 -8
- package/dist/internal/index.d.ts +20 -8
- package/dist/internal/index.js +739 -365
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +687 -313
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -11,7 +11,7 @@ import {
|
|
|
11
11
|
parseProviderOptions,
|
|
12
12
|
postJsonToApi
|
|
13
13
|
} from "@ai-sdk/provider-utils";
|
|
14
|
-
import { z as
|
|
14
|
+
import { z as z5 } from "zod";
|
|
15
15
|
|
|
16
16
|
// src/convert-to-openai-chat-messages.ts
|
|
17
17
|
import {
|
|
@@ -147,7 +147,7 @@ function convertToOpenAIChatMessages({
|
|
|
147
147
|
type: "function",
|
|
148
148
|
function: {
|
|
149
149
|
name: part.toolName,
|
|
150
|
-
arguments: JSON.stringify(part.
|
|
150
|
+
arguments: JSON.stringify(part.input)
|
|
151
151
|
}
|
|
152
152
|
});
|
|
153
153
|
break;
|
|
@@ -163,10 +163,23 @@ function convertToOpenAIChatMessages({
|
|
|
163
163
|
}
|
|
164
164
|
case "tool": {
|
|
165
165
|
for (const toolResponse of content) {
|
|
166
|
+
const output = toolResponse.output;
|
|
167
|
+
let contentValue;
|
|
168
|
+
switch (output.type) {
|
|
169
|
+
case "text":
|
|
170
|
+
case "error-text":
|
|
171
|
+
contentValue = output.value;
|
|
172
|
+
break;
|
|
173
|
+
case "content":
|
|
174
|
+
case "json":
|
|
175
|
+
case "error-json":
|
|
176
|
+
contentValue = JSON.stringify(output.value);
|
|
177
|
+
break;
|
|
178
|
+
}
|
|
166
179
|
messages.push({
|
|
167
180
|
role: "tool",
|
|
168
181
|
tool_call_id: toolResponse.toolCallId,
|
|
169
|
-
content:
|
|
182
|
+
content: contentValue
|
|
170
183
|
});
|
|
171
184
|
}
|
|
172
185
|
break;
|
|
@@ -264,7 +277,14 @@ var openaiProviderOptions = z.object({
|
|
|
264
277
|
*
|
|
265
278
|
* @default true
|
|
266
279
|
*/
|
|
267
|
-
structuredOutputs: z.boolean().optional()
|
|
280
|
+
structuredOutputs: z.boolean().optional(),
|
|
281
|
+
/**
|
|
282
|
+
* Service tier for the request. Set to 'flex' for 50% cheaper processing
|
|
283
|
+
* at the cost of increased latency. Only available for o3 and o4-mini models.
|
|
284
|
+
*
|
|
285
|
+
* @default 'auto'
|
|
286
|
+
*/
|
|
287
|
+
serviceTier: z.enum(["auto", "flex"]).optional()
|
|
268
288
|
});
|
|
269
289
|
|
|
270
290
|
// src/openai-error.ts
|
|
@@ -290,6 +310,76 @@ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
290
310
|
import {
|
|
291
311
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
292
312
|
} from "@ai-sdk/provider";
|
|
313
|
+
|
|
314
|
+
// src/tool/file-search.ts
|
|
315
|
+
import { createProviderDefinedToolFactory } from "@ai-sdk/provider-utils";
|
|
316
|
+
import { z as z3 } from "zod";
|
|
317
|
+
var fileSearchArgsSchema = z3.object({
|
|
318
|
+
/**
|
|
319
|
+
* List of vector store IDs to search through. If not provided, searches all available vector stores.
|
|
320
|
+
*/
|
|
321
|
+
vectorStoreIds: z3.array(z3.string()).optional(),
|
|
322
|
+
/**
|
|
323
|
+
* Maximum number of search results to return. Defaults to 10.
|
|
324
|
+
*/
|
|
325
|
+
maxResults: z3.number().optional(),
|
|
326
|
+
/**
|
|
327
|
+
* Type of search to perform. Defaults to 'auto'.
|
|
328
|
+
*/
|
|
329
|
+
searchType: z3.enum(["auto", "keyword", "semantic"]).optional()
|
|
330
|
+
});
|
|
331
|
+
var fileSearch = createProviderDefinedToolFactory({
|
|
332
|
+
id: "openai.file_search",
|
|
333
|
+
name: "file_search",
|
|
334
|
+
inputSchema: z3.object({
|
|
335
|
+
query: z3.string()
|
|
336
|
+
})
|
|
337
|
+
});
|
|
338
|
+
|
|
339
|
+
// src/tool/web-search-preview.ts
|
|
340
|
+
import { createProviderDefinedToolFactory as createProviderDefinedToolFactory2 } from "@ai-sdk/provider-utils";
|
|
341
|
+
import { z as z4 } from "zod";
|
|
342
|
+
var webSearchPreviewArgsSchema = z4.object({
|
|
343
|
+
/**
|
|
344
|
+
* Search context size to use for the web search.
|
|
345
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
346
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
347
|
+
* - low: Least context, lowest cost, fastest response
|
|
348
|
+
*/
|
|
349
|
+
searchContextSize: z4.enum(["low", "medium", "high"]).optional(),
|
|
350
|
+
/**
|
|
351
|
+
* User location information to provide geographically relevant search results.
|
|
352
|
+
*/
|
|
353
|
+
userLocation: z4.object({
|
|
354
|
+
/**
|
|
355
|
+
* Type of location (always 'approximate')
|
|
356
|
+
*/
|
|
357
|
+
type: z4.literal("approximate"),
|
|
358
|
+
/**
|
|
359
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
360
|
+
*/
|
|
361
|
+
country: z4.string().optional(),
|
|
362
|
+
/**
|
|
363
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
364
|
+
*/
|
|
365
|
+
city: z4.string().optional(),
|
|
366
|
+
/**
|
|
367
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
368
|
+
*/
|
|
369
|
+
region: z4.string().optional(),
|
|
370
|
+
/**
|
|
371
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
372
|
+
*/
|
|
373
|
+
timezone: z4.string().optional()
|
|
374
|
+
}).optional()
|
|
375
|
+
});
|
|
376
|
+
var webSearchPreview = createProviderDefinedToolFactory2({
|
|
377
|
+
id: "openai.web_search_preview",
|
|
378
|
+
name: "web_search_preview",
|
|
379
|
+
inputSchema: z4.object({})
|
|
380
|
+
});
|
|
381
|
+
|
|
382
|
+
// src/openai-prepare-tools.ts
|
|
293
383
|
function prepareTools({
|
|
294
384
|
tools,
|
|
295
385
|
toolChoice,
|
|
@@ -302,18 +392,47 @@ function prepareTools({
|
|
|
302
392
|
}
|
|
303
393
|
const openaiTools = [];
|
|
304
394
|
for (const tool of tools) {
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
395
|
+
switch (tool.type) {
|
|
396
|
+
case "function":
|
|
397
|
+
openaiTools.push({
|
|
398
|
+
type: "function",
|
|
399
|
+
function: {
|
|
400
|
+
name: tool.name,
|
|
401
|
+
description: tool.description,
|
|
402
|
+
parameters: tool.inputSchema,
|
|
403
|
+
strict: structuredOutputs ? true : void 0
|
|
404
|
+
}
|
|
405
|
+
});
|
|
406
|
+
break;
|
|
407
|
+
case "provider-defined":
|
|
408
|
+
switch (tool.id) {
|
|
409
|
+
case "openai.file_search": {
|
|
410
|
+
const args = fileSearchArgsSchema.parse(tool.args);
|
|
411
|
+
openaiTools.push({
|
|
412
|
+
type: "file_search",
|
|
413
|
+
vector_store_ids: args.vectorStoreIds,
|
|
414
|
+
max_results: args.maxResults,
|
|
415
|
+
search_type: args.searchType
|
|
416
|
+
});
|
|
417
|
+
break;
|
|
418
|
+
}
|
|
419
|
+
case "openai.web_search_preview": {
|
|
420
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
421
|
+
openaiTools.push({
|
|
422
|
+
type: "web_search_preview",
|
|
423
|
+
search_context_size: args.searchContextSize,
|
|
424
|
+
user_location: args.userLocation
|
|
425
|
+
});
|
|
426
|
+
break;
|
|
427
|
+
}
|
|
428
|
+
default:
|
|
429
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
430
|
+
break;
|
|
315
431
|
}
|
|
316
|
-
|
|
432
|
+
break;
|
|
433
|
+
default:
|
|
434
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
435
|
+
break;
|
|
317
436
|
}
|
|
318
437
|
}
|
|
319
438
|
if (toolChoice == null) {
|
|
@@ -437,6 +556,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
437
556
|
metadata: openaiOptions.metadata,
|
|
438
557
|
prediction: openaiOptions.prediction,
|
|
439
558
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
559
|
+
service_tier: openaiOptions.serviceTier,
|
|
440
560
|
// messages:
|
|
441
561
|
messages
|
|
442
562
|
};
|
|
@@ -510,6 +630,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
510
630
|
});
|
|
511
631
|
}
|
|
512
632
|
}
|
|
633
|
+
if (openaiOptions.serviceTier === "flex" && !supportsFlexProcessing(this.modelId)) {
|
|
634
|
+
warnings.push({
|
|
635
|
+
type: "unsupported-setting",
|
|
636
|
+
setting: "serviceTier",
|
|
637
|
+
details: "flex processing is only available for o3 and o4-mini models"
|
|
638
|
+
});
|
|
639
|
+
baseArgs.service_tier = void 0;
|
|
640
|
+
}
|
|
513
641
|
const {
|
|
514
642
|
tools: openaiTools,
|
|
515
643
|
toolChoice: openaiToolChoice,
|
|
@@ -558,10 +686,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
558
686
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
559
687
|
content.push({
|
|
560
688
|
type: "tool-call",
|
|
561
|
-
toolCallType: "function",
|
|
562
689
|
toolCallId: (_b = toolCall.id) != null ? _b : generateId(),
|
|
563
690
|
toolName: toolCall.function.name,
|
|
564
|
-
|
|
691
|
+
input: toolCall.function.arguments
|
|
565
692
|
});
|
|
566
693
|
}
|
|
567
694
|
const completionTokenDetails = (_c = response.usage) == null ? void 0 : _c.completion_tokens_details;
|
|
@@ -627,6 +754,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
627
754
|
totalTokens: void 0
|
|
628
755
|
};
|
|
629
756
|
let isFirstChunk = true;
|
|
757
|
+
let isActiveText = false;
|
|
630
758
|
const providerMetadata = { openai: {} };
|
|
631
759
|
return {
|
|
632
760
|
stream: response.pipeThrough(
|
|
@@ -636,6 +764,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
636
764
|
},
|
|
637
765
|
transform(chunk, controller) {
|
|
638
766
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
|
|
767
|
+
if (options.includeRawChunks) {
|
|
768
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
769
|
+
}
|
|
639
770
|
if (!chunk.success) {
|
|
640
771
|
finishReason = "error";
|
|
641
772
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -679,9 +810,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
679
810
|
}
|
|
680
811
|
const delta = choice.delta;
|
|
681
812
|
if (delta.content != null) {
|
|
813
|
+
if (!isActiveText) {
|
|
814
|
+
controller.enqueue({ type: "text-start", id: "0" });
|
|
815
|
+
isActiveText = true;
|
|
816
|
+
}
|
|
682
817
|
controller.enqueue({
|
|
683
|
-
type: "text",
|
|
684
|
-
|
|
818
|
+
type: "text-delta",
|
|
819
|
+
id: "0",
|
|
820
|
+
delta: delta.content
|
|
685
821
|
});
|
|
686
822
|
}
|
|
687
823
|
if (delta.tool_calls != null) {
|
|
@@ -706,6 +842,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
706
842
|
message: `Expected 'function.name' to be a string.`
|
|
707
843
|
});
|
|
708
844
|
}
|
|
845
|
+
controller.enqueue({
|
|
846
|
+
type: "tool-input-start",
|
|
847
|
+
id: toolCallDelta.id,
|
|
848
|
+
toolName: toolCallDelta.function.name
|
|
849
|
+
});
|
|
709
850
|
toolCalls[index] = {
|
|
710
851
|
id: toolCallDelta.id,
|
|
711
852
|
type: "function",
|
|
@@ -719,20 +860,21 @@ var OpenAIChatLanguageModel = class {
|
|
|
719
860
|
if (((_o = toolCall2.function) == null ? void 0 : _o.name) != null && ((_p = toolCall2.function) == null ? void 0 : _p.arguments) != null) {
|
|
720
861
|
if (toolCall2.function.arguments.length > 0) {
|
|
721
862
|
controller.enqueue({
|
|
722
|
-
type: "tool-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
toolName: toolCall2.function.name,
|
|
726
|
-
argsTextDelta: toolCall2.function.arguments
|
|
863
|
+
type: "tool-input-delta",
|
|
864
|
+
id: toolCall2.id,
|
|
865
|
+
delta: toolCall2.function.arguments
|
|
727
866
|
});
|
|
728
867
|
}
|
|
729
868
|
if (isParsableJson(toolCall2.function.arguments)) {
|
|
869
|
+
controller.enqueue({
|
|
870
|
+
type: "tool-input-end",
|
|
871
|
+
id: toolCall2.id
|
|
872
|
+
});
|
|
730
873
|
controller.enqueue({
|
|
731
874
|
type: "tool-call",
|
|
732
|
-
toolCallType: "function",
|
|
733
875
|
toolCallId: (_q = toolCall2.id) != null ? _q : generateId(),
|
|
734
876
|
toolName: toolCall2.function.name,
|
|
735
|
-
|
|
877
|
+
input: toolCall2.function.arguments
|
|
736
878
|
});
|
|
737
879
|
toolCall2.hasFinished = true;
|
|
738
880
|
}
|
|
@@ -747,19 +889,20 @@ var OpenAIChatLanguageModel = class {
|
|
|
747
889
|
toolCall.function.arguments += (_t = (_s = toolCallDelta.function) == null ? void 0 : _s.arguments) != null ? _t : "";
|
|
748
890
|
}
|
|
749
891
|
controller.enqueue({
|
|
750
|
-
type: "tool-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
toolName: toolCall.function.name,
|
|
754
|
-
argsTextDelta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
892
|
+
type: "tool-input-delta",
|
|
893
|
+
id: toolCall.id,
|
|
894
|
+
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
755
895
|
});
|
|
756
896
|
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
897
|
+
controller.enqueue({
|
|
898
|
+
type: "tool-input-end",
|
|
899
|
+
id: toolCall.id
|
|
900
|
+
});
|
|
757
901
|
controller.enqueue({
|
|
758
902
|
type: "tool-call",
|
|
759
|
-
toolCallType: "function",
|
|
760
903
|
toolCallId: (_x = toolCall.id) != null ? _x : generateId(),
|
|
761
904
|
toolName: toolCall.function.name,
|
|
762
|
-
|
|
905
|
+
input: toolCall.function.arguments
|
|
763
906
|
});
|
|
764
907
|
toolCall.hasFinished = true;
|
|
765
908
|
}
|
|
@@ -767,6 +910,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
767
910
|
}
|
|
768
911
|
},
|
|
769
912
|
flush(controller) {
|
|
913
|
+
if (isActiveText) {
|
|
914
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
915
|
+
}
|
|
770
916
|
controller.enqueue({
|
|
771
917
|
type: "finish",
|
|
772
918
|
finishReason,
|
|
@@ -781,97 +927,97 @@ var OpenAIChatLanguageModel = class {
|
|
|
781
927
|
};
|
|
782
928
|
}
|
|
783
929
|
};
|
|
784
|
-
var openaiTokenUsageSchema =
|
|
785
|
-
prompt_tokens:
|
|
786
|
-
completion_tokens:
|
|
787
|
-
total_tokens:
|
|
788
|
-
prompt_tokens_details:
|
|
789
|
-
cached_tokens:
|
|
930
|
+
var openaiTokenUsageSchema = z5.object({
|
|
931
|
+
prompt_tokens: z5.number().nullish(),
|
|
932
|
+
completion_tokens: z5.number().nullish(),
|
|
933
|
+
total_tokens: z5.number().nullish(),
|
|
934
|
+
prompt_tokens_details: z5.object({
|
|
935
|
+
cached_tokens: z5.number().nullish()
|
|
790
936
|
}).nullish(),
|
|
791
|
-
completion_tokens_details:
|
|
792
|
-
reasoning_tokens:
|
|
793
|
-
accepted_prediction_tokens:
|
|
794
|
-
rejected_prediction_tokens:
|
|
937
|
+
completion_tokens_details: z5.object({
|
|
938
|
+
reasoning_tokens: z5.number().nullish(),
|
|
939
|
+
accepted_prediction_tokens: z5.number().nullish(),
|
|
940
|
+
rejected_prediction_tokens: z5.number().nullish()
|
|
795
941
|
}).nullish()
|
|
796
942
|
}).nullish();
|
|
797
|
-
var openaiChatResponseSchema =
|
|
798
|
-
id:
|
|
799
|
-
created:
|
|
800
|
-
model:
|
|
801
|
-
choices:
|
|
802
|
-
|
|
803
|
-
message:
|
|
804
|
-
role:
|
|
805
|
-
content:
|
|
806
|
-
tool_calls:
|
|
807
|
-
|
|
808
|
-
id:
|
|
809
|
-
type:
|
|
810
|
-
function:
|
|
811
|
-
name:
|
|
812
|
-
arguments:
|
|
943
|
+
var openaiChatResponseSchema = z5.object({
|
|
944
|
+
id: z5.string().nullish(),
|
|
945
|
+
created: z5.number().nullish(),
|
|
946
|
+
model: z5.string().nullish(),
|
|
947
|
+
choices: z5.array(
|
|
948
|
+
z5.object({
|
|
949
|
+
message: z5.object({
|
|
950
|
+
role: z5.literal("assistant").nullish(),
|
|
951
|
+
content: z5.string().nullish(),
|
|
952
|
+
tool_calls: z5.array(
|
|
953
|
+
z5.object({
|
|
954
|
+
id: z5.string().nullish(),
|
|
955
|
+
type: z5.literal("function"),
|
|
956
|
+
function: z5.object({
|
|
957
|
+
name: z5.string(),
|
|
958
|
+
arguments: z5.string()
|
|
813
959
|
})
|
|
814
960
|
})
|
|
815
961
|
).nullish()
|
|
816
962
|
}),
|
|
817
|
-
index:
|
|
818
|
-
logprobs:
|
|
819
|
-
content:
|
|
820
|
-
|
|
821
|
-
token:
|
|
822
|
-
logprob:
|
|
823
|
-
top_logprobs:
|
|
824
|
-
|
|
825
|
-
token:
|
|
826
|
-
logprob:
|
|
963
|
+
index: z5.number(),
|
|
964
|
+
logprobs: z5.object({
|
|
965
|
+
content: z5.array(
|
|
966
|
+
z5.object({
|
|
967
|
+
token: z5.string(),
|
|
968
|
+
logprob: z5.number(),
|
|
969
|
+
top_logprobs: z5.array(
|
|
970
|
+
z5.object({
|
|
971
|
+
token: z5.string(),
|
|
972
|
+
logprob: z5.number()
|
|
827
973
|
})
|
|
828
974
|
)
|
|
829
975
|
})
|
|
830
976
|
).nullish()
|
|
831
977
|
}).nullish(),
|
|
832
|
-
finish_reason:
|
|
978
|
+
finish_reason: z5.string().nullish()
|
|
833
979
|
})
|
|
834
980
|
),
|
|
835
981
|
usage: openaiTokenUsageSchema
|
|
836
982
|
});
|
|
837
|
-
var openaiChatChunkSchema =
|
|
838
|
-
|
|
839
|
-
id:
|
|
840
|
-
created:
|
|
841
|
-
model:
|
|
842
|
-
choices:
|
|
843
|
-
|
|
844
|
-
delta:
|
|
845
|
-
role:
|
|
846
|
-
content:
|
|
847
|
-
tool_calls:
|
|
848
|
-
|
|
849
|
-
index:
|
|
850
|
-
id:
|
|
851
|
-
type:
|
|
852
|
-
function:
|
|
853
|
-
name:
|
|
854
|
-
arguments:
|
|
983
|
+
var openaiChatChunkSchema = z5.union([
|
|
984
|
+
z5.object({
|
|
985
|
+
id: z5.string().nullish(),
|
|
986
|
+
created: z5.number().nullish(),
|
|
987
|
+
model: z5.string().nullish(),
|
|
988
|
+
choices: z5.array(
|
|
989
|
+
z5.object({
|
|
990
|
+
delta: z5.object({
|
|
991
|
+
role: z5.enum(["assistant"]).nullish(),
|
|
992
|
+
content: z5.string().nullish(),
|
|
993
|
+
tool_calls: z5.array(
|
|
994
|
+
z5.object({
|
|
995
|
+
index: z5.number(),
|
|
996
|
+
id: z5.string().nullish(),
|
|
997
|
+
type: z5.literal("function").nullish(),
|
|
998
|
+
function: z5.object({
|
|
999
|
+
name: z5.string().nullish(),
|
|
1000
|
+
arguments: z5.string().nullish()
|
|
855
1001
|
})
|
|
856
1002
|
})
|
|
857
1003
|
).nullish()
|
|
858
1004
|
}).nullish(),
|
|
859
|
-
logprobs:
|
|
860
|
-
content:
|
|
861
|
-
|
|
862
|
-
token:
|
|
863
|
-
logprob:
|
|
864
|
-
top_logprobs:
|
|
865
|
-
|
|
866
|
-
token:
|
|
867
|
-
logprob:
|
|
1005
|
+
logprobs: z5.object({
|
|
1006
|
+
content: z5.array(
|
|
1007
|
+
z5.object({
|
|
1008
|
+
token: z5.string(),
|
|
1009
|
+
logprob: z5.number(),
|
|
1010
|
+
top_logprobs: z5.array(
|
|
1011
|
+
z5.object({
|
|
1012
|
+
token: z5.string(),
|
|
1013
|
+
logprob: z5.number()
|
|
868
1014
|
})
|
|
869
1015
|
)
|
|
870
1016
|
})
|
|
871
1017
|
).nullish()
|
|
872
1018
|
}).nullish(),
|
|
873
|
-
finish_reason:
|
|
874
|
-
index:
|
|
1019
|
+
finish_reason: z5.string().nullish(),
|
|
1020
|
+
index: z5.number()
|
|
875
1021
|
})
|
|
876
1022
|
),
|
|
877
1023
|
usage: openaiTokenUsageSchema
|
|
@@ -881,6 +1027,9 @@ var openaiChatChunkSchema = z3.union([
|
|
|
881
1027
|
function isReasoningModel(modelId) {
|
|
882
1028
|
return modelId.startsWith("o");
|
|
883
1029
|
}
|
|
1030
|
+
function supportsFlexProcessing(modelId) {
|
|
1031
|
+
return modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
1032
|
+
}
|
|
884
1033
|
function getSystemMessageMode(modelId) {
|
|
885
1034
|
var _a, _b;
|
|
886
1035
|
if (!isReasoningModel(modelId)) {
|
|
@@ -929,7 +1078,7 @@ import {
|
|
|
929
1078
|
parseProviderOptions as parseProviderOptions2,
|
|
930
1079
|
postJsonToApi as postJsonToApi2
|
|
931
1080
|
} from "@ai-sdk/provider-utils";
|
|
932
|
-
import { z as
|
|
1081
|
+
import { z as z7 } from "zod";
|
|
933
1082
|
|
|
934
1083
|
// src/convert-to-openai-completion-prompt.ts
|
|
935
1084
|
import {
|
|
@@ -1010,12 +1159,12 @@ ${user}:`]
|
|
|
1010
1159
|
}
|
|
1011
1160
|
|
|
1012
1161
|
// src/openai-completion-options.ts
|
|
1013
|
-
import { z as
|
|
1014
|
-
var openaiCompletionProviderOptions =
|
|
1162
|
+
import { z as z6 } from "zod";
|
|
1163
|
+
var openaiCompletionProviderOptions = z6.object({
|
|
1015
1164
|
/**
|
|
1016
1165
|
Echo back the prompt in addition to the completion.
|
|
1017
1166
|
*/
|
|
1018
|
-
echo:
|
|
1167
|
+
echo: z6.boolean().optional(),
|
|
1019
1168
|
/**
|
|
1020
1169
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
1021
1170
|
|
|
@@ -1030,16 +1179,16 @@ var openaiCompletionProviderOptions = z4.object({
|
|
|
1030
1179
|
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1031
1180
|
token from being generated.
|
|
1032
1181
|
*/
|
|
1033
|
-
logitBias:
|
|
1182
|
+
logitBias: z6.record(z6.string(), z6.number()).optional(),
|
|
1034
1183
|
/**
|
|
1035
1184
|
The suffix that comes after a completion of inserted text.
|
|
1036
1185
|
*/
|
|
1037
|
-
suffix:
|
|
1186
|
+
suffix: z6.string().optional(),
|
|
1038
1187
|
/**
|
|
1039
1188
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1040
1189
|
monitor and detect abuse. Learn more.
|
|
1041
1190
|
*/
|
|
1042
|
-
user:
|
|
1191
|
+
user: z6.string().optional(),
|
|
1043
1192
|
/**
|
|
1044
1193
|
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1045
1194
|
the response size and can slow down response times. However, it can
|
|
@@ -1049,7 +1198,7 @@ var openaiCompletionProviderOptions = z4.object({
|
|
|
1049
1198
|
Setting to a number will return the log probabilities of the top n
|
|
1050
1199
|
tokens that were generated.
|
|
1051
1200
|
*/
|
|
1052
|
-
logprobs:
|
|
1201
|
+
logprobs: z6.union([z6.boolean(), z6.number()]).optional()
|
|
1053
1202
|
});
|
|
1054
1203
|
|
|
1055
1204
|
// src/openai-completion-language-model.ts
|
|
@@ -1221,6 +1370,9 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1221
1370
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1222
1371
|
},
|
|
1223
1372
|
transform(chunk, controller) {
|
|
1373
|
+
if (options.includeRawChunks) {
|
|
1374
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1375
|
+
}
|
|
1224
1376
|
if (!chunk.success) {
|
|
1225
1377
|
finishReason = "error";
|
|
1226
1378
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -1238,6 +1390,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1238
1390
|
type: "response-metadata",
|
|
1239
1391
|
...getResponseMetadata(value)
|
|
1240
1392
|
});
|
|
1393
|
+
controller.enqueue({ type: "text-start", id: "0" });
|
|
1241
1394
|
}
|
|
1242
1395
|
if (value.usage != null) {
|
|
1243
1396
|
usage.inputTokens = value.usage.prompt_tokens;
|
|
@@ -1251,14 +1404,18 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1251
1404
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1252
1405
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
1253
1406
|
}
|
|
1254
|
-
if ((choice == null ? void 0 : choice.text) != null) {
|
|
1407
|
+
if ((choice == null ? void 0 : choice.text) != null && choice.text.length > 0) {
|
|
1255
1408
|
controller.enqueue({
|
|
1256
|
-
type: "text",
|
|
1257
|
-
|
|
1409
|
+
type: "text-delta",
|
|
1410
|
+
id: "0",
|
|
1411
|
+
delta: choice.text
|
|
1258
1412
|
});
|
|
1259
1413
|
}
|
|
1260
1414
|
},
|
|
1261
1415
|
flush(controller) {
|
|
1416
|
+
if (!isFirstChunk) {
|
|
1417
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
1418
|
+
}
|
|
1262
1419
|
controller.enqueue({
|
|
1263
1420
|
type: "finish",
|
|
1264
1421
|
finishReason,
|
|
@@ -1273,42 +1430,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1273
1430
|
};
|
|
1274
1431
|
}
|
|
1275
1432
|
};
|
|
1276
|
-
var usageSchema =
|
|
1277
|
-
prompt_tokens:
|
|
1278
|
-
completion_tokens:
|
|
1279
|
-
total_tokens:
|
|
1433
|
+
var usageSchema = z7.object({
|
|
1434
|
+
prompt_tokens: z7.number(),
|
|
1435
|
+
completion_tokens: z7.number(),
|
|
1436
|
+
total_tokens: z7.number()
|
|
1280
1437
|
});
|
|
1281
|
-
var openaiCompletionResponseSchema =
|
|
1282
|
-
id:
|
|
1283
|
-
created:
|
|
1284
|
-
model:
|
|
1285
|
-
choices:
|
|
1286
|
-
|
|
1287
|
-
text:
|
|
1288
|
-
finish_reason:
|
|
1289
|
-
logprobs:
|
|
1290
|
-
tokens:
|
|
1291
|
-
token_logprobs:
|
|
1292
|
-
top_logprobs:
|
|
1438
|
+
var openaiCompletionResponseSchema = z7.object({
|
|
1439
|
+
id: z7.string().nullish(),
|
|
1440
|
+
created: z7.number().nullish(),
|
|
1441
|
+
model: z7.string().nullish(),
|
|
1442
|
+
choices: z7.array(
|
|
1443
|
+
z7.object({
|
|
1444
|
+
text: z7.string(),
|
|
1445
|
+
finish_reason: z7.string(),
|
|
1446
|
+
logprobs: z7.object({
|
|
1447
|
+
tokens: z7.array(z7.string()),
|
|
1448
|
+
token_logprobs: z7.array(z7.number()),
|
|
1449
|
+
top_logprobs: z7.array(z7.record(z7.string(), z7.number())).nullish()
|
|
1293
1450
|
}).nullish()
|
|
1294
1451
|
})
|
|
1295
1452
|
),
|
|
1296
1453
|
usage: usageSchema.nullish()
|
|
1297
1454
|
});
|
|
1298
|
-
var openaiCompletionChunkSchema =
|
|
1299
|
-
|
|
1300
|
-
id:
|
|
1301
|
-
created:
|
|
1302
|
-
model:
|
|
1303
|
-
choices:
|
|
1304
|
-
|
|
1305
|
-
text:
|
|
1306
|
-
finish_reason:
|
|
1307
|
-
index:
|
|
1308
|
-
logprobs:
|
|
1309
|
-
tokens:
|
|
1310
|
-
token_logprobs:
|
|
1311
|
-
top_logprobs:
|
|
1455
|
+
var openaiCompletionChunkSchema = z7.union([
|
|
1456
|
+
z7.object({
|
|
1457
|
+
id: z7.string().nullish(),
|
|
1458
|
+
created: z7.number().nullish(),
|
|
1459
|
+
model: z7.string().nullish(),
|
|
1460
|
+
choices: z7.array(
|
|
1461
|
+
z7.object({
|
|
1462
|
+
text: z7.string(),
|
|
1463
|
+
finish_reason: z7.string().nullish(),
|
|
1464
|
+
index: z7.number(),
|
|
1465
|
+
logprobs: z7.object({
|
|
1466
|
+
tokens: z7.array(z7.string()),
|
|
1467
|
+
token_logprobs: z7.array(z7.number()),
|
|
1468
|
+
top_logprobs: z7.array(z7.record(z7.string(), z7.number())).nullish()
|
|
1312
1469
|
}).nullish()
|
|
1313
1470
|
})
|
|
1314
1471
|
),
|
|
@@ -1327,21 +1484,21 @@ import {
|
|
|
1327
1484
|
parseProviderOptions as parseProviderOptions3,
|
|
1328
1485
|
postJsonToApi as postJsonToApi3
|
|
1329
1486
|
} from "@ai-sdk/provider-utils";
|
|
1330
|
-
import { z as
|
|
1487
|
+
import { z as z9 } from "zod";
|
|
1331
1488
|
|
|
1332
1489
|
// src/openai-embedding-options.ts
|
|
1333
|
-
import { z as
|
|
1334
|
-
var openaiEmbeddingProviderOptions =
|
|
1490
|
+
import { z as z8 } from "zod";
|
|
1491
|
+
var openaiEmbeddingProviderOptions = z8.object({
|
|
1335
1492
|
/**
|
|
1336
1493
|
The number of dimensions the resulting output embeddings should have.
|
|
1337
1494
|
Only supported in text-embedding-3 and later models.
|
|
1338
1495
|
*/
|
|
1339
|
-
dimensions:
|
|
1496
|
+
dimensions: z8.number().optional(),
|
|
1340
1497
|
/**
|
|
1341
1498
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1342
1499
|
monitor and detect abuse. Learn more.
|
|
1343
1500
|
*/
|
|
1344
|
-
user:
|
|
1501
|
+
user: z8.string().optional()
|
|
1345
1502
|
});
|
|
1346
1503
|
|
|
1347
1504
|
// src/openai-embedding-model.ts
|
|
@@ -1407,9 +1564,9 @@ var OpenAIEmbeddingModel = class {
|
|
|
1407
1564
|
};
|
|
1408
1565
|
}
|
|
1409
1566
|
};
|
|
1410
|
-
var openaiTextEmbeddingResponseSchema =
|
|
1411
|
-
data:
|
|
1412
|
-
usage:
|
|
1567
|
+
var openaiTextEmbeddingResponseSchema = z9.object({
|
|
1568
|
+
data: z9.array(z9.object({ embedding: z9.array(z9.number()) })),
|
|
1569
|
+
usage: z9.object({ prompt_tokens: z9.number() }).nullish()
|
|
1413
1570
|
});
|
|
1414
1571
|
|
|
1415
1572
|
// src/openai-image-model.ts
|
|
@@ -1418,7 +1575,7 @@ import {
|
|
|
1418
1575
|
createJsonResponseHandler as createJsonResponseHandler4,
|
|
1419
1576
|
postJsonToApi as postJsonToApi4
|
|
1420
1577
|
} from "@ai-sdk/provider-utils";
|
|
1421
|
-
import { z as
|
|
1578
|
+
import { z as z10 } from "zod";
|
|
1422
1579
|
|
|
1423
1580
|
// src/openai-image-settings.ts
|
|
1424
1581
|
var modelMaxImagesPerCall = {
|
|
@@ -1506,9 +1663,9 @@ var OpenAIImageModel = class {
|
|
|
1506
1663
|
};
|
|
1507
1664
|
}
|
|
1508
1665
|
};
|
|
1509
|
-
var openaiImageResponseSchema =
|
|
1510
|
-
data:
|
|
1511
|
-
|
|
1666
|
+
var openaiImageResponseSchema = z10.object({
|
|
1667
|
+
data: z10.array(
|
|
1668
|
+
z10.object({ b64_json: z10.string(), revised_prompt: z10.string().optional() })
|
|
1512
1669
|
)
|
|
1513
1670
|
});
|
|
1514
1671
|
|
|
@@ -1520,33 +1677,33 @@ import {
|
|
|
1520
1677
|
parseProviderOptions as parseProviderOptions4,
|
|
1521
1678
|
postFormDataToApi
|
|
1522
1679
|
} from "@ai-sdk/provider-utils";
|
|
1523
|
-
import { z as
|
|
1680
|
+
import { z as z12 } from "zod";
|
|
1524
1681
|
|
|
1525
1682
|
// src/openai-transcription-options.ts
|
|
1526
|
-
import { z as
|
|
1527
|
-
var openAITranscriptionProviderOptions =
|
|
1683
|
+
import { z as z11 } from "zod";
|
|
1684
|
+
var openAITranscriptionProviderOptions = z11.object({
|
|
1528
1685
|
/**
|
|
1529
1686
|
* Additional information to include in the transcription response.
|
|
1530
1687
|
*/
|
|
1531
|
-
include:
|
|
1688
|
+
include: z11.array(z11.string()).optional(),
|
|
1532
1689
|
/**
|
|
1533
1690
|
* The language of the input audio in ISO-639-1 format.
|
|
1534
1691
|
*/
|
|
1535
|
-
language:
|
|
1692
|
+
language: z11.string().optional(),
|
|
1536
1693
|
/**
|
|
1537
1694
|
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1538
1695
|
*/
|
|
1539
|
-
prompt:
|
|
1696
|
+
prompt: z11.string().optional(),
|
|
1540
1697
|
/**
|
|
1541
1698
|
* The sampling temperature, between 0 and 1.
|
|
1542
1699
|
* @default 0
|
|
1543
1700
|
*/
|
|
1544
|
-
temperature:
|
|
1701
|
+
temperature: z11.number().min(0).max(1).default(0).optional(),
|
|
1545
1702
|
/**
|
|
1546
1703
|
* The timestamp granularities to populate for this transcription.
|
|
1547
1704
|
* @default ['segment']
|
|
1548
1705
|
*/
|
|
1549
|
-
timestampGranularities:
|
|
1706
|
+
timestampGranularities: z11.array(z11.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1550
1707
|
});
|
|
1551
1708
|
|
|
1552
1709
|
// src/openai-transcription-model.ts
|
|
@@ -1613,7 +1770,7 @@ var OpenAITranscriptionModel = class {
|
|
|
1613
1770
|
constructor(modelId, config) {
|
|
1614
1771
|
this.modelId = modelId;
|
|
1615
1772
|
this.config = config;
|
|
1616
|
-
this.specificationVersion = "
|
|
1773
|
+
this.specificationVersion = "v2";
|
|
1617
1774
|
}
|
|
1618
1775
|
get provider() {
|
|
1619
1776
|
return this.config.provider;
|
|
@@ -1694,15 +1851,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1694
1851
|
};
|
|
1695
1852
|
}
|
|
1696
1853
|
};
|
|
1697
|
-
var openaiTranscriptionResponseSchema =
|
|
1698
|
-
text:
|
|
1699
|
-
language:
|
|
1700
|
-
duration:
|
|
1701
|
-
words:
|
|
1702
|
-
|
|
1703
|
-
word:
|
|
1704
|
-
start:
|
|
1705
|
-
end:
|
|
1854
|
+
var openaiTranscriptionResponseSchema = z12.object({
|
|
1855
|
+
text: z12.string(),
|
|
1856
|
+
language: z12.string().nullish(),
|
|
1857
|
+
duration: z12.number().nullish(),
|
|
1858
|
+
words: z12.array(
|
|
1859
|
+
z12.object({
|
|
1860
|
+
word: z12.string(),
|
|
1861
|
+
start: z12.number(),
|
|
1862
|
+
end: z12.number()
|
|
1706
1863
|
})
|
|
1707
1864
|
).nullish()
|
|
1708
1865
|
});
|
|
@@ -1714,16 +1871,16 @@ import {
|
|
|
1714
1871
|
parseProviderOptions as parseProviderOptions5,
|
|
1715
1872
|
postJsonToApi as postJsonToApi5
|
|
1716
1873
|
} from "@ai-sdk/provider-utils";
|
|
1717
|
-
import { z as
|
|
1718
|
-
var OpenAIProviderOptionsSchema =
|
|
1719
|
-
instructions:
|
|
1720
|
-
speed:
|
|
1874
|
+
import { z as z13 } from "zod";
|
|
1875
|
+
var OpenAIProviderOptionsSchema = z13.object({
|
|
1876
|
+
instructions: z13.string().nullish(),
|
|
1877
|
+
speed: z13.number().min(0.25).max(4).default(1).nullish()
|
|
1721
1878
|
});
|
|
1722
1879
|
var OpenAISpeechModel = class {
|
|
1723
1880
|
constructor(modelId, config) {
|
|
1724
1881
|
this.modelId = modelId;
|
|
1725
1882
|
this.config = config;
|
|
1726
|
-
this.specificationVersion = "
|
|
1883
|
+
this.specificationVersion = "v2";
|
|
1727
1884
|
}
|
|
1728
1885
|
get provider() {
|
|
1729
1886
|
return this.config.provider;
|
|
@@ -1734,6 +1891,7 @@ var OpenAISpeechModel = class {
|
|
|
1734
1891
|
outputFormat = "mp3",
|
|
1735
1892
|
speed,
|
|
1736
1893
|
instructions,
|
|
1894
|
+
language,
|
|
1737
1895
|
providerOptions
|
|
1738
1896
|
}) {
|
|
1739
1897
|
const warnings = [];
|
|
@@ -1770,6 +1928,13 @@ var OpenAISpeechModel = class {
|
|
|
1770
1928
|
}
|
|
1771
1929
|
}
|
|
1772
1930
|
}
|
|
1931
|
+
if (language) {
|
|
1932
|
+
warnings.push({
|
|
1933
|
+
type: "unsupported-setting",
|
|
1934
|
+
setting: "language",
|
|
1935
|
+
details: `OpenAI speech models do not support language selection. Language parameter "${language}" was ignored.`
|
|
1936
|
+
});
|
|
1937
|
+
}
|
|
1773
1938
|
return {
|
|
1774
1939
|
requestBody,
|
|
1775
1940
|
warnings
|
|
@@ -1820,7 +1985,7 @@ import {
|
|
|
1820
1985
|
parseProviderOptions as parseProviderOptions6,
|
|
1821
1986
|
postJsonToApi as postJsonToApi6
|
|
1822
1987
|
} from "@ai-sdk/provider-utils";
|
|
1823
|
-
import { z as
|
|
1988
|
+
import { z as z14 } from "zod";
|
|
1824
1989
|
|
|
1825
1990
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1826
1991
|
import {
|
|
@@ -1911,11 +2076,21 @@ function convertToOpenAIResponsesMessages({
|
|
|
1911
2076
|
break;
|
|
1912
2077
|
}
|
|
1913
2078
|
case "tool-call": {
|
|
2079
|
+
if (part.providerExecuted) {
|
|
2080
|
+
break;
|
|
2081
|
+
}
|
|
1914
2082
|
messages.push({
|
|
1915
2083
|
type: "function_call",
|
|
1916
2084
|
call_id: part.toolCallId,
|
|
1917
2085
|
name: part.toolName,
|
|
1918
|
-
arguments: JSON.stringify(part.
|
|
2086
|
+
arguments: JSON.stringify(part.input)
|
|
2087
|
+
});
|
|
2088
|
+
break;
|
|
2089
|
+
}
|
|
2090
|
+
case "tool-result": {
|
|
2091
|
+
warnings.push({
|
|
2092
|
+
type: "other",
|
|
2093
|
+
message: `tool result parts in assistant messages are not supported for OpenAI responses`
|
|
1919
2094
|
});
|
|
1920
2095
|
break;
|
|
1921
2096
|
}
|
|
@@ -1925,10 +2100,23 @@ function convertToOpenAIResponsesMessages({
|
|
|
1925
2100
|
}
|
|
1926
2101
|
case "tool": {
|
|
1927
2102
|
for (const part of content) {
|
|
2103
|
+
const output = part.output;
|
|
2104
|
+
let contentValue;
|
|
2105
|
+
switch (output.type) {
|
|
2106
|
+
case "text":
|
|
2107
|
+
case "error-text":
|
|
2108
|
+
contentValue = output.value;
|
|
2109
|
+
break;
|
|
2110
|
+
case "content":
|
|
2111
|
+
case "json":
|
|
2112
|
+
case "error-json":
|
|
2113
|
+
contentValue = JSON.stringify(output.value);
|
|
2114
|
+
break;
|
|
2115
|
+
}
|
|
1928
2116
|
messages.push({
|
|
1929
2117
|
type: "function_call_output",
|
|
1930
2118
|
call_id: part.toolCallId,
|
|
1931
|
-
output:
|
|
2119
|
+
output: contentValue
|
|
1932
2120
|
});
|
|
1933
2121
|
}
|
|
1934
2122
|
break;
|
|
@@ -1982,7 +2170,7 @@ function prepareResponsesTools({
|
|
|
1982
2170
|
type: "function",
|
|
1983
2171
|
name: tool.name,
|
|
1984
2172
|
description: tool.description,
|
|
1985
|
-
parameters: tool.
|
|
2173
|
+
parameters: tool.inputSchema,
|
|
1986
2174
|
strict: strict ? true : void 0
|
|
1987
2175
|
});
|
|
1988
2176
|
break;
|
|
@@ -2116,6 +2304,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2116
2304
|
store: openaiOptions == null ? void 0 : openaiOptions.store,
|
|
2117
2305
|
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2118
2306
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2307
|
+
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2119
2308
|
// model-specific settings:
|
|
2120
2309
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2121
2310
|
reasoning: {
|
|
@@ -2149,6 +2338,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2149
2338
|
});
|
|
2150
2339
|
}
|
|
2151
2340
|
}
|
|
2341
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !supportsFlexProcessing2(this.modelId)) {
|
|
2342
|
+
warnings.push({
|
|
2343
|
+
type: "unsupported-setting",
|
|
2344
|
+
setting: "serviceTier",
|
|
2345
|
+
details: "flex processing is only available for o3 and o4-mini models"
|
|
2346
|
+
});
|
|
2347
|
+
delete baseArgs.service_tier;
|
|
2348
|
+
}
|
|
2152
2349
|
const {
|
|
2153
2350
|
tools: openaiTools,
|
|
2154
2351
|
toolChoice: openaiToolChoice,
|
|
@@ -2183,55 +2380,59 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2183
2380
|
body,
|
|
2184
2381
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2185
2382
|
successfulResponseHandler: createJsonResponseHandler6(
|
|
2186
|
-
|
|
2187
|
-
id:
|
|
2188
|
-
created_at:
|
|
2189
|
-
model:
|
|
2190
|
-
output:
|
|
2191
|
-
|
|
2192
|
-
|
|
2193
|
-
type:
|
|
2194
|
-
role:
|
|
2195
|
-
content:
|
|
2196
|
-
|
|
2197
|
-
type:
|
|
2198
|
-
text:
|
|
2199
|
-
annotations:
|
|
2200
|
-
|
|
2201
|
-
type:
|
|
2202
|
-
start_index:
|
|
2203
|
-
end_index:
|
|
2204
|
-
url:
|
|
2205
|
-
title:
|
|
2383
|
+
z14.object({
|
|
2384
|
+
id: z14.string(),
|
|
2385
|
+
created_at: z14.number(),
|
|
2386
|
+
model: z14.string(),
|
|
2387
|
+
output: z14.array(
|
|
2388
|
+
z14.discriminatedUnion("type", [
|
|
2389
|
+
z14.object({
|
|
2390
|
+
type: z14.literal("message"),
|
|
2391
|
+
role: z14.literal("assistant"),
|
|
2392
|
+
content: z14.array(
|
|
2393
|
+
z14.object({
|
|
2394
|
+
type: z14.literal("output_text"),
|
|
2395
|
+
text: z14.string(),
|
|
2396
|
+
annotations: z14.array(
|
|
2397
|
+
z14.object({
|
|
2398
|
+
type: z14.literal("url_citation"),
|
|
2399
|
+
start_index: z14.number(),
|
|
2400
|
+
end_index: z14.number(),
|
|
2401
|
+
url: z14.string(),
|
|
2402
|
+
title: z14.string()
|
|
2206
2403
|
})
|
|
2207
2404
|
)
|
|
2208
2405
|
})
|
|
2209
2406
|
)
|
|
2210
2407
|
}),
|
|
2211
|
-
|
|
2212
|
-
type:
|
|
2213
|
-
call_id:
|
|
2214
|
-
name:
|
|
2215
|
-
arguments:
|
|
2408
|
+
z14.object({
|
|
2409
|
+
type: z14.literal("function_call"),
|
|
2410
|
+
call_id: z14.string(),
|
|
2411
|
+
name: z14.string(),
|
|
2412
|
+
arguments: z14.string()
|
|
2216
2413
|
}),
|
|
2217
|
-
|
|
2218
|
-
type:
|
|
2414
|
+
z14.object({
|
|
2415
|
+
type: z14.literal("web_search_call"),
|
|
2416
|
+
id: z14.string(),
|
|
2417
|
+
status: z14.string().optional()
|
|
2219
2418
|
}),
|
|
2220
|
-
|
|
2221
|
-
type:
|
|
2419
|
+
z14.object({
|
|
2420
|
+
type: z14.literal("computer_call"),
|
|
2421
|
+
id: z14.string(),
|
|
2422
|
+
status: z14.string().optional()
|
|
2222
2423
|
}),
|
|
2223
|
-
|
|
2224
|
-
type:
|
|
2225
|
-
summary:
|
|
2226
|
-
|
|
2227
|
-
type:
|
|
2228
|
-
text:
|
|
2424
|
+
z14.object({
|
|
2425
|
+
type: z14.literal("reasoning"),
|
|
2426
|
+
summary: z14.array(
|
|
2427
|
+
z14.object({
|
|
2428
|
+
type: z14.literal("summary_text"),
|
|
2429
|
+
text: z14.string()
|
|
2229
2430
|
})
|
|
2230
2431
|
)
|
|
2231
2432
|
})
|
|
2232
2433
|
])
|
|
2233
2434
|
),
|
|
2234
|
-
incomplete_details:
|
|
2435
|
+
incomplete_details: z14.object({ reason: z14.string() }).nullable(),
|
|
2235
2436
|
usage: usageSchema2
|
|
2236
2437
|
})
|
|
2237
2438
|
),
|
|
@@ -2269,10 +2470,46 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2269
2470
|
case "function_call": {
|
|
2270
2471
|
content.push({
|
|
2271
2472
|
type: "tool-call",
|
|
2272
|
-
toolCallType: "function",
|
|
2273
2473
|
toolCallId: part.call_id,
|
|
2274
2474
|
toolName: part.name,
|
|
2275
|
-
|
|
2475
|
+
input: part.arguments
|
|
2476
|
+
});
|
|
2477
|
+
break;
|
|
2478
|
+
}
|
|
2479
|
+
case "web_search_call": {
|
|
2480
|
+
content.push({
|
|
2481
|
+
type: "tool-call",
|
|
2482
|
+
toolCallId: part.id,
|
|
2483
|
+
toolName: "web_search_preview",
|
|
2484
|
+
input: "",
|
|
2485
|
+
providerExecuted: true
|
|
2486
|
+
});
|
|
2487
|
+
content.push({
|
|
2488
|
+
type: "tool-result",
|
|
2489
|
+
toolCallId: part.id,
|
|
2490
|
+
toolName: "web_search_preview",
|
|
2491
|
+
result: { status: part.status || "completed" },
|
|
2492
|
+
providerExecuted: true
|
|
2493
|
+
});
|
|
2494
|
+
break;
|
|
2495
|
+
}
|
|
2496
|
+
case "computer_call": {
|
|
2497
|
+
content.push({
|
|
2498
|
+
type: "tool-call",
|
|
2499
|
+
toolCallId: part.id,
|
|
2500
|
+
toolName: "computer_use",
|
|
2501
|
+
input: "",
|
|
2502
|
+
providerExecuted: true
|
|
2503
|
+
});
|
|
2504
|
+
content.push({
|
|
2505
|
+
type: "tool-result",
|
|
2506
|
+
toolCallId: part.id,
|
|
2507
|
+
toolName: "computer_use",
|
|
2508
|
+
result: {
|
|
2509
|
+
type: "computer_use_tool_result",
|
|
2510
|
+
status: part.status || "completed"
|
|
2511
|
+
},
|
|
2512
|
+
providerExecuted: true
|
|
2276
2513
|
});
|
|
2277
2514
|
break;
|
|
2278
2515
|
}
|
|
@@ -2344,6 +2581,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2344
2581
|
},
|
|
2345
2582
|
transform(chunk, controller) {
|
|
2346
2583
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2584
|
+
if (options.includeRawChunks) {
|
|
2585
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2586
|
+
}
|
|
2347
2587
|
if (!chunk.success) {
|
|
2348
2588
|
finishReason = "error";
|
|
2349
2589
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -2357,22 +2597,121 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2357
2597
|
toolCallId: value.item.call_id
|
|
2358
2598
|
};
|
|
2359
2599
|
controller.enqueue({
|
|
2360
|
-
type: "tool-
|
|
2361
|
-
|
|
2600
|
+
type: "tool-input-start",
|
|
2601
|
+
id: value.item.call_id,
|
|
2602
|
+
toolName: value.item.name
|
|
2603
|
+
});
|
|
2604
|
+
} else if (value.item.type === "web_search_call") {
|
|
2605
|
+
ongoingToolCalls[value.output_index] = {
|
|
2606
|
+
toolName: "web_search_preview",
|
|
2607
|
+
toolCallId: value.item.id
|
|
2608
|
+
};
|
|
2609
|
+
controller.enqueue({
|
|
2610
|
+
type: "tool-input-start",
|
|
2611
|
+
id: value.item.id,
|
|
2612
|
+
toolName: "web_search_preview"
|
|
2613
|
+
});
|
|
2614
|
+
} else if (value.item.type === "computer_call") {
|
|
2615
|
+
ongoingToolCalls[value.output_index] = {
|
|
2616
|
+
toolName: "computer_use",
|
|
2617
|
+
toolCallId: value.item.id
|
|
2618
|
+
};
|
|
2619
|
+
controller.enqueue({
|
|
2620
|
+
type: "tool-input-start",
|
|
2621
|
+
id: value.item.id,
|
|
2622
|
+
toolName: "computer_use"
|
|
2623
|
+
});
|
|
2624
|
+
} else if (value.item.type === "message") {
|
|
2625
|
+
controller.enqueue({
|
|
2626
|
+
type: "text-start",
|
|
2627
|
+
id: value.item.id
|
|
2628
|
+
});
|
|
2629
|
+
} else if (value.item.type === "reasoning") {
|
|
2630
|
+
controller.enqueue({
|
|
2631
|
+
type: "reasoning-start",
|
|
2632
|
+
id: value.item.id
|
|
2633
|
+
});
|
|
2634
|
+
}
|
|
2635
|
+
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
2636
|
+
if (value.item.type === "function_call") {
|
|
2637
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2638
|
+
hasToolCalls = true;
|
|
2639
|
+
controller.enqueue({
|
|
2640
|
+
type: "tool-input-end",
|
|
2641
|
+
id: value.item.call_id
|
|
2642
|
+
});
|
|
2643
|
+
controller.enqueue({
|
|
2644
|
+
type: "tool-call",
|
|
2362
2645
|
toolCallId: value.item.call_id,
|
|
2363
2646
|
toolName: value.item.name,
|
|
2364
|
-
|
|
2647
|
+
input: value.item.arguments
|
|
2648
|
+
});
|
|
2649
|
+
} else if (value.item.type === "web_search_call") {
|
|
2650
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2651
|
+
hasToolCalls = true;
|
|
2652
|
+
controller.enqueue({
|
|
2653
|
+
type: "tool-input-end",
|
|
2654
|
+
id: value.item.id
|
|
2655
|
+
});
|
|
2656
|
+
controller.enqueue({
|
|
2657
|
+
type: "tool-call",
|
|
2658
|
+
toolCallId: value.item.id,
|
|
2659
|
+
toolName: "web_search_preview",
|
|
2660
|
+
input: "",
|
|
2661
|
+
providerExecuted: true
|
|
2662
|
+
});
|
|
2663
|
+
controller.enqueue({
|
|
2664
|
+
type: "tool-result",
|
|
2665
|
+
toolCallId: value.item.id,
|
|
2666
|
+
toolName: "web_search_preview",
|
|
2667
|
+
result: {
|
|
2668
|
+
type: "web_search_tool_result",
|
|
2669
|
+
status: value.item.status || "completed"
|
|
2670
|
+
},
|
|
2671
|
+
providerExecuted: true
|
|
2672
|
+
});
|
|
2673
|
+
} else if (value.item.type === "computer_call") {
|
|
2674
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2675
|
+
hasToolCalls = true;
|
|
2676
|
+
controller.enqueue({
|
|
2677
|
+
type: "tool-input-end",
|
|
2678
|
+
id: value.item.id
|
|
2679
|
+
});
|
|
2680
|
+
controller.enqueue({
|
|
2681
|
+
type: "tool-call",
|
|
2682
|
+
toolCallId: value.item.id,
|
|
2683
|
+
toolName: "computer_use",
|
|
2684
|
+
input: "",
|
|
2685
|
+
providerExecuted: true
|
|
2686
|
+
});
|
|
2687
|
+
controller.enqueue({
|
|
2688
|
+
type: "tool-result",
|
|
2689
|
+
toolCallId: value.item.id,
|
|
2690
|
+
toolName: "computer_use",
|
|
2691
|
+
result: {
|
|
2692
|
+
type: "computer_use_tool_result",
|
|
2693
|
+
status: value.item.status || "completed"
|
|
2694
|
+
},
|
|
2695
|
+
providerExecuted: true
|
|
2696
|
+
});
|
|
2697
|
+
} else if (value.item.type === "message") {
|
|
2698
|
+
controller.enqueue({
|
|
2699
|
+
type: "text-end",
|
|
2700
|
+
id: value.item.id
|
|
2701
|
+
});
|
|
2702
|
+
} else if (value.item.type === "reasoning") {
|
|
2703
|
+
controller.enqueue({
|
|
2704
|
+
type: "reasoning-end",
|
|
2705
|
+
id: value.item.id
|
|
2365
2706
|
});
|
|
2366
2707
|
}
|
|
2367
2708
|
} else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
|
|
2368
2709
|
const toolCall = ongoingToolCalls[value.output_index];
|
|
2369
2710
|
if (toolCall != null) {
|
|
2370
2711
|
controller.enqueue({
|
|
2371
|
-
type: "tool-
|
|
2372
|
-
|
|
2373
|
-
|
|
2374
|
-
toolName: toolCall.toolName,
|
|
2375
|
-
argsTextDelta: value.delta
|
|
2712
|
+
type: "tool-input-delta",
|
|
2713
|
+
id: toolCall.toolCallId,
|
|
2714
|
+
delta: value.delta
|
|
2376
2715
|
});
|
|
2377
2716
|
}
|
|
2378
2717
|
} else if (isResponseCreatedChunk(value)) {
|
|
@@ -2385,23 +2724,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2385
2724
|
});
|
|
2386
2725
|
} else if (isTextDeltaChunk(value)) {
|
|
2387
2726
|
controller.enqueue({
|
|
2388
|
-
type: "text",
|
|
2389
|
-
|
|
2727
|
+
type: "text-delta",
|
|
2728
|
+
id: value.item_id,
|
|
2729
|
+
delta: value.delta
|
|
2390
2730
|
});
|
|
2391
2731
|
} else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
|
|
2392
2732
|
controller.enqueue({
|
|
2393
|
-
type: "reasoning",
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
|
|
2397
|
-
ongoingToolCalls[value.output_index] = void 0;
|
|
2398
|
-
hasToolCalls = true;
|
|
2399
|
-
controller.enqueue({
|
|
2400
|
-
type: "tool-call",
|
|
2401
|
-
toolCallType: "function",
|
|
2402
|
-
toolCallId: value.item.call_id,
|
|
2403
|
-
toolName: value.item.name,
|
|
2404
|
-
args: value.item.arguments
|
|
2733
|
+
type: "reasoning-delta",
|
|
2734
|
+
delta: value.delta,
|
|
2735
|
+
id: value.item_id
|
|
2405
2736
|
});
|
|
2406
2737
|
} else if (isResponseFinishedChunk(value)) {
|
|
2407
2738
|
finishReason = mapOpenAIResponseFinishReason({
|
|
@@ -2442,95 +2773,134 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2442
2773
|
};
|
|
2443
2774
|
}
|
|
2444
2775
|
};
|
|
2445
|
-
var usageSchema2 =
|
|
2446
|
-
input_tokens:
|
|
2447
|
-
input_tokens_details:
|
|
2448
|
-
output_tokens:
|
|
2449
|
-
output_tokens_details:
|
|
2776
|
+
var usageSchema2 = z14.object({
|
|
2777
|
+
input_tokens: z14.number(),
|
|
2778
|
+
input_tokens_details: z14.object({ cached_tokens: z14.number().nullish() }).nullish(),
|
|
2779
|
+
output_tokens: z14.number(),
|
|
2780
|
+
output_tokens_details: z14.object({ reasoning_tokens: z14.number().nullish() }).nullish()
|
|
2450
2781
|
});
|
|
2451
|
-
var textDeltaChunkSchema =
|
|
2452
|
-
type:
|
|
2453
|
-
|
|
2782
|
+
var textDeltaChunkSchema = z14.object({
|
|
2783
|
+
type: z14.literal("response.output_text.delta"),
|
|
2784
|
+
item_id: z14.string(),
|
|
2785
|
+
delta: z14.string()
|
|
2454
2786
|
});
|
|
2455
|
-
var responseFinishedChunkSchema =
|
|
2456
|
-
type:
|
|
2457
|
-
response:
|
|
2458
|
-
incomplete_details:
|
|
2787
|
+
var responseFinishedChunkSchema = z14.object({
|
|
2788
|
+
type: z14.enum(["response.completed", "response.incomplete"]),
|
|
2789
|
+
response: z14.object({
|
|
2790
|
+
incomplete_details: z14.object({ reason: z14.string() }).nullish(),
|
|
2459
2791
|
usage: usageSchema2
|
|
2460
2792
|
})
|
|
2461
2793
|
});
|
|
2462
|
-
var responseCreatedChunkSchema =
|
|
2463
|
-
type:
|
|
2464
|
-
response:
|
|
2465
|
-
id:
|
|
2466
|
-
created_at:
|
|
2467
|
-
model:
|
|
2794
|
+
var responseCreatedChunkSchema = z14.object({
|
|
2795
|
+
type: z14.literal("response.created"),
|
|
2796
|
+
response: z14.object({
|
|
2797
|
+
id: z14.string(),
|
|
2798
|
+
created_at: z14.number(),
|
|
2799
|
+
model: z14.string()
|
|
2468
2800
|
})
|
|
2469
2801
|
});
|
|
2470
|
-
var
|
|
2471
|
-
type:
|
|
2472
|
-
output_index:
|
|
2473
|
-
item:
|
|
2474
|
-
|
|
2475
|
-
type:
|
|
2802
|
+
var responseOutputItemAddedSchema = z14.object({
|
|
2803
|
+
type: z14.literal("response.output_item.added"),
|
|
2804
|
+
output_index: z14.number(),
|
|
2805
|
+
item: z14.discriminatedUnion("type", [
|
|
2806
|
+
z14.object({
|
|
2807
|
+
type: z14.literal("message"),
|
|
2808
|
+
id: z14.string()
|
|
2476
2809
|
}),
|
|
2477
|
-
|
|
2478
|
-
type:
|
|
2479
|
-
id:
|
|
2480
|
-
|
|
2481
|
-
|
|
2482
|
-
|
|
2483
|
-
|
|
2810
|
+
z14.object({
|
|
2811
|
+
type: z14.literal("reasoning"),
|
|
2812
|
+
id: z14.string()
|
|
2813
|
+
}),
|
|
2814
|
+
z14.object({
|
|
2815
|
+
type: z14.literal("function_call"),
|
|
2816
|
+
id: z14.string(),
|
|
2817
|
+
call_id: z14.string(),
|
|
2818
|
+
name: z14.string(),
|
|
2819
|
+
arguments: z14.string()
|
|
2820
|
+
}),
|
|
2821
|
+
z14.object({
|
|
2822
|
+
type: z14.literal("web_search_call"),
|
|
2823
|
+
id: z14.string(),
|
|
2824
|
+
status: z14.string()
|
|
2825
|
+
}),
|
|
2826
|
+
z14.object({
|
|
2827
|
+
type: z14.literal("computer_call"),
|
|
2828
|
+
id: z14.string(),
|
|
2829
|
+
status: z14.string()
|
|
2484
2830
|
})
|
|
2485
2831
|
])
|
|
2486
2832
|
});
|
|
2487
|
-
var
|
|
2488
|
-
type:
|
|
2489
|
-
|
|
2490
|
-
|
|
2491
|
-
|
|
2492
|
-
|
|
2493
|
-
|
|
2494
|
-
type: z12.literal("response.output_item.added"),
|
|
2495
|
-
output_index: z12.number(),
|
|
2496
|
-
item: z12.discriminatedUnion("type", [
|
|
2497
|
-
z12.object({
|
|
2498
|
-
type: z12.literal("message")
|
|
2833
|
+
var responseOutputItemDoneSchema = z14.object({
|
|
2834
|
+
type: z14.literal("response.output_item.done"),
|
|
2835
|
+
output_index: z14.number(),
|
|
2836
|
+
item: z14.discriminatedUnion("type", [
|
|
2837
|
+
z14.object({
|
|
2838
|
+
type: z14.literal("message"),
|
|
2839
|
+
id: z14.string()
|
|
2499
2840
|
}),
|
|
2500
|
-
|
|
2501
|
-
type:
|
|
2502
|
-
id:
|
|
2503
|
-
|
|
2504
|
-
|
|
2505
|
-
|
|
2841
|
+
z14.object({
|
|
2842
|
+
type: z14.literal("reasoning"),
|
|
2843
|
+
id: z14.string()
|
|
2844
|
+
}),
|
|
2845
|
+
z14.object({
|
|
2846
|
+
type: z14.literal("function_call"),
|
|
2847
|
+
id: z14.string(),
|
|
2848
|
+
call_id: z14.string(),
|
|
2849
|
+
name: z14.string(),
|
|
2850
|
+
arguments: z14.string(),
|
|
2851
|
+
status: z14.literal("completed")
|
|
2852
|
+
}),
|
|
2853
|
+
z14.object({
|
|
2854
|
+
type: z14.literal("web_search_call"),
|
|
2855
|
+
id: z14.string(),
|
|
2856
|
+
status: z14.literal("completed")
|
|
2857
|
+
}),
|
|
2858
|
+
z14.object({
|
|
2859
|
+
type: z14.literal("computer_call"),
|
|
2860
|
+
id: z14.string(),
|
|
2861
|
+
status: z14.literal("completed")
|
|
2506
2862
|
})
|
|
2507
2863
|
])
|
|
2508
2864
|
});
|
|
2509
|
-
var
|
|
2510
|
-
type:
|
|
2511
|
-
|
|
2512
|
-
|
|
2513
|
-
|
|
2514
|
-
|
|
2865
|
+
var responseFunctionCallArgumentsDeltaSchema = z14.object({
|
|
2866
|
+
type: z14.literal("response.function_call_arguments.delta"),
|
|
2867
|
+
item_id: z14.string(),
|
|
2868
|
+
output_index: z14.number(),
|
|
2869
|
+
delta: z14.string()
|
|
2870
|
+
});
|
|
2871
|
+
var responseAnnotationAddedSchema = z14.object({
|
|
2872
|
+
type: z14.literal("response.output_text.annotation.added"),
|
|
2873
|
+
annotation: z14.object({
|
|
2874
|
+
type: z14.literal("url_citation"),
|
|
2875
|
+
url: z14.string(),
|
|
2876
|
+
title: z14.string()
|
|
2515
2877
|
})
|
|
2516
2878
|
});
|
|
2517
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
2518
|
-
type:
|
|
2519
|
-
item_id:
|
|
2520
|
-
output_index:
|
|
2521
|
-
summary_index:
|
|
2522
|
-
delta:
|
|
2879
|
+
var responseReasoningSummaryTextDeltaSchema = z14.object({
|
|
2880
|
+
type: z14.literal("response.reasoning_summary_text.delta"),
|
|
2881
|
+
item_id: z14.string(),
|
|
2882
|
+
output_index: z14.number(),
|
|
2883
|
+
summary_index: z14.number(),
|
|
2884
|
+
delta: z14.string()
|
|
2885
|
+
});
|
|
2886
|
+
var responseReasoningSummaryPartDoneSchema = z14.object({
|
|
2887
|
+
type: z14.literal("response.reasoning_summary_part.done"),
|
|
2888
|
+
item_id: z14.string(),
|
|
2889
|
+
output_index: z14.number(),
|
|
2890
|
+
summary_index: z14.number(),
|
|
2891
|
+
part: z14.unknown().nullish()
|
|
2523
2892
|
});
|
|
2524
|
-
var openaiResponsesChunkSchema =
|
|
2893
|
+
var openaiResponsesChunkSchema = z14.union([
|
|
2525
2894
|
textDeltaChunkSchema,
|
|
2526
2895
|
responseFinishedChunkSchema,
|
|
2527
2896
|
responseCreatedChunkSchema,
|
|
2897
|
+
responseOutputItemAddedSchema,
|
|
2528
2898
|
responseOutputItemDoneSchema,
|
|
2529
2899
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2530
|
-
responseOutputItemAddedSchema,
|
|
2531
2900
|
responseAnnotationAddedSchema,
|
|
2532
2901
|
responseReasoningSummaryTextDeltaSchema,
|
|
2533
|
-
|
|
2902
|
+
responseReasoningSummaryPartDoneSchema,
|
|
2903
|
+
z14.object({ type: z14.string() }).passthrough()
|
|
2534
2904
|
// fallback for unknown chunks
|
|
2535
2905
|
]);
|
|
2536
2906
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2578,16 +2948,20 @@ function getResponsesModelConfig(modelId) {
|
|
|
2578
2948
|
requiredAutoTruncation: false
|
|
2579
2949
|
};
|
|
2580
2950
|
}
|
|
2581
|
-
|
|
2582
|
-
|
|
2583
|
-
|
|
2584
|
-
|
|
2585
|
-
|
|
2586
|
-
|
|
2587
|
-
|
|
2588
|
-
|
|
2589
|
-
|
|
2590
|
-
|
|
2951
|
+
function supportsFlexProcessing2(modelId) {
|
|
2952
|
+
return modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
2953
|
+
}
|
|
2954
|
+
var openaiResponsesProviderOptionsSchema = z14.object({
|
|
2955
|
+
metadata: z14.any().nullish(),
|
|
2956
|
+
parallelToolCalls: z14.boolean().nullish(),
|
|
2957
|
+
previousResponseId: z14.string().nullish(),
|
|
2958
|
+
store: z14.boolean().nullish(),
|
|
2959
|
+
user: z14.string().nullish(),
|
|
2960
|
+
reasoningEffort: z14.string().nullish(),
|
|
2961
|
+
strictSchemas: z14.boolean().nullish(),
|
|
2962
|
+
instructions: z14.string().nullish(),
|
|
2963
|
+
reasoningSummary: z14.string().nullish(),
|
|
2964
|
+
serviceTier: z14.enum(["auto", "flex"]).nullish()
|
|
2591
2965
|
});
|
|
2592
2966
|
export {
|
|
2593
2967
|
OpenAIChatLanguageModel,
|