@ai-sdk/openai 3.0.0-beta.74 → 3.0.0-beta.75
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +0 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.js +8 -24
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +8 -24
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +0 -1
- package/dist/internal/index.d.ts +0 -1
- package/dist/internal/index.js +7 -23
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +7 -23
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -452,12 +452,6 @@ var openaiChatLanguageModelOptions = lazySchema2(
|
|
|
452
452
|
* Parameters for prediction mode.
|
|
453
453
|
*/
|
|
454
454
|
prediction: z3.record(z3.string(), z3.any()).optional(),
|
|
455
|
-
/**
|
|
456
|
-
* Whether to use structured outputs.
|
|
457
|
-
*
|
|
458
|
-
* @default true
|
|
459
|
-
*/
|
|
460
|
-
structuredOutputs: z3.boolean().optional(),
|
|
461
455
|
/**
|
|
462
456
|
* Service tier for the request.
|
|
463
457
|
* - 'auto': Default service tier. The request will be processed with the service tier configured in the
|
|
@@ -472,7 +466,7 @@ var openaiChatLanguageModelOptions = lazySchema2(
|
|
|
472
466
|
/**
|
|
473
467
|
* Whether to use strict JSON schema validation.
|
|
474
468
|
*
|
|
475
|
-
* @default
|
|
469
|
+
* @default true
|
|
476
470
|
*/
|
|
477
471
|
strictJsonSchema: z3.boolean().optional(),
|
|
478
472
|
/**
|
|
@@ -513,7 +507,6 @@ import {
|
|
|
513
507
|
function prepareChatTools({
|
|
514
508
|
tools,
|
|
515
509
|
toolChoice,
|
|
516
|
-
structuredOutputs,
|
|
517
510
|
strictJsonSchema
|
|
518
511
|
}) {
|
|
519
512
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
@@ -531,7 +524,7 @@ function prepareChatTools({
|
|
|
531
524
|
name: tool.name,
|
|
532
525
|
description: tool.description,
|
|
533
526
|
parameters: tool.inputSchema,
|
|
534
|
-
strict:
|
|
527
|
+
strict: strictJsonSchema
|
|
535
528
|
}
|
|
536
529
|
});
|
|
537
530
|
break;
|
|
@@ -600,24 +593,16 @@ var OpenAIChatLanguageModel = class {
|
|
|
600
593
|
toolChoice,
|
|
601
594
|
providerOptions
|
|
602
595
|
}) {
|
|
603
|
-
var _a, _b, _c
|
|
596
|
+
var _a, _b, _c;
|
|
604
597
|
const warnings = [];
|
|
605
598
|
const openaiOptions = (_a = await parseProviderOptions({
|
|
606
599
|
provider: "openai",
|
|
607
600
|
providerOptions,
|
|
608
601
|
schema: openaiChatLanguageModelOptions
|
|
609
602
|
})) != null ? _a : {};
|
|
610
|
-
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
611
603
|
if (topK != null) {
|
|
612
604
|
warnings.push({ type: "unsupported", feature: "topK" });
|
|
613
605
|
}
|
|
614
|
-
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !structuredOutputs) {
|
|
615
|
-
warnings.push({
|
|
616
|
-
type: "unsupported",
|
|
617
|
-
feature: "responseFormat",
|
|
618
|
-
details: "JSON response format schema is only supported with structuredOutputs"
|
|
619
|
-
});
|
|
620
|
-
}
|
|
621
606
|
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
622
607
|
{
|
|
623
608
|
prompt,
|
|
@@ -625,7 +610,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
625
610
|
}
|
|
626
611
|
);
|
|
627
612
|
warnings.push(...messageWarnings);
|
|
628
|
-
const strictJsonSchema = (
|
|
613
|
+
const strictJsonSchema = (_b = openaiOptions.strictJsonSchema) != null ? _b : true;
|
|
629
614
|
const baseArgs = {
|
|
630
615
|
// model id:
|
|
631
616
|
model: this.modelId,
|
|
@@ -641,12 +626,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
641
626
|
top_p: topP,
|
|
642
627
|
frequency_penalty: frequencyPenalty,
|
|
643
628
|
presence_penalty: presencePenalty,
|
|
644
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ?
|
|
629
|
+
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? responseFormat.schema != null ? {
|
|
645
630
|
type: "json_schema",
|
|
646
631
|
json_schema: {
|
|
647
632
|
schema: responseFormat.schema,
|
|
648
633
|
strict: strictJsonSchema,
|
|
649
|
-
name: (
|
|
634
|
+
name: (_c = responseFormat.name) != null ? _c : "response",
|
|
650
635
|
description: responseFormat.description
|
|
651
636
|
}
|
|
652
637
|
} : { type: "json_object" } : void 0,
|
|
@@ -760,7 +745,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
760
745
|
} = prepareChatTools({
|
|
761
746
|
tools,
|
|
762
747
|
toolChoice,
|
|
763
|
-
structuredOutputs,
|
|
764
748
|
strictJsonSchema
|
|
765
749
|
});
|
|
766
750
|
return {
|
|
@@ -3648,7 +3632,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3648
3632
|
hasApplyPatchTool: hasOpenAITool("openai.apply_patch")
|
|
3649
3633
|
});
|
|
3650
3634
|
warnings.push(...inputWarnings);
|
|
3651
|
-
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b :
|
|
3635
|
+
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : true;
|
|
3652
3636
|
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
3653
3637
|
function addInclude(key) {
|
|
3654
3638
|
if (include == null) {
|
|
@@ -5259,7 +5243,7 @@ var OpenAITranscriptionModel = class {
|
|
|
5259
5243
|
};
|
|
5260
5244
|
|
|
5261
5245
|
// src/version.ts
|
|
5262
|
-
var VERSION = true ? "3.0.0-beta.
|
|
5246
|
+
var VERSION = true ? "3.0.0-beta.75" : "0.0.0-test";
|
|
5263
5247
|
|
|
5264
5248
|
// src/openai-provider.ts
|
|
5265
5249
|
function createOpenAI(options = {}) {
|