@ai-sdk/openai 2.0.73 → 2.0.75
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.js +64 -29
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +64 -29
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +63 -28
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +63 -28
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.mjs
CHANGED
|
@@ -3413,7 +3413,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3413
3413
|
};
|
|
3414
3414
|
}
|
|
3415
3415
|
async doGenerate(options) {
|
|
3416
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
3416
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
|
|
3417
3417
|
const {
|
|
3418
3418
|
args: body,
|
|
3419
3419
|
warnings,
|
|
@@ -3423,6 +3423,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3423
3423
|
path: "/responses",
|
|
3424
3424
|
modelId: this.modelId
|
|
3425
3425
|
});
|
|
3426
|
+
const providerKey = this.config.provider.replace(".responses", "");
|
|
3426
3427
|
const {
|
|
3427
3428
|
responseHeaders,
|
|
3428
3429
|
value: response,
|
|
@@ -3463,7 +3464,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3463
3464
|
type: "reasoning",
|
|
3464
3465
|
text: summary.text,
|
|
3465
3466
|
providerMetadata: {
|
|
3466
|
-
|
|
3467
|
+
[providerKey]: {
|
|
3467
3468
|
itemId: part.id,
|
|
3468
3469
|
reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
|
|
3469
3470
|
}
|
|
@@ -3500,7 +3501,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3500
3501
|
action: part.action
|
|
3501
3502
|
}),
|
|
3502
3503
|
providerMetadata: {
|
|
3503
|
-
|
|
3504
|
+
[providerKey]: {
|
|
3504
3505
|
itemId: part.id
|
|
3505
3506
|
}
|
|
3506
3507
|
}
|
|
@@ -3516,7 +3517,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3516
3517
|
type: "text",
|
|
3517
3518
|
text: contentPart.text,
|
|
3518
3519
|
providerMetadata: {
|
|
3519
|
-
|
|
3520
|
+
[providerKey]: {
|
|
3520
3521
|
itemId: part.id
|
|
3521
3522
|
}
|
|
3522
3523
|
}
|
|
@@ -3540,12 +3541,43 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3540
3541
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
|
|
3541
3542
|
...annotation.file_id ? {
|
|
3542
3543
|
providerMetadata: {
|
|
3543
|
-
|
|
3544
|
+
[providerKey]: {
|
|
3544
3545
|
fileId: annotation.file_id
|
|
3545
3546
|
}
|
|
3546
3547
|
}
|
|
3547
3548
|
} : {}
|
|
3548
3549
|
});
|
|
3550
|
+
} else if (annotation.type === "container_file_citation") {
|
|
3551
|
+
content.push({
|
|
3552
|
+
type: "source",
|
|
3553
|
+
sourceType: "document",
|
|
3554
|
+
id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : generateId2(),
|
|
3555
|
+
mediaType: "text/plain",
|
|
3556
|
+
title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
|
|
3557
|
+
filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
|
|
3558
|
+
providerMetadata: {
|
|
3559
|
+
[providerKey]: {
|
|
3560
|
+
fileId: annotation.file_id,
|
|
3561
|
+
containerId: annotation.container_id,
|
|
3562
|
+
...annotation.index != null ? { index: annotation.index } : {}
|
|
3563
|
+
}
|
|
3564
|
+
}
|
|
3565
|
+
});
|
|
3566
|
+
} else if (annotation.type === "file_path") {
|
|
3567
|
+
content.push({
|
|
3568
|
+
type: "source",
|
|
3569
|
+
sourceType: "document",
|
|
3570
|
+
id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : generateId2(),
|
|
3571
|
+
mediaType: "application/octet-stream",
|
|
3572
|
+
title: annotation.file_id,
|
|
3573
|
+
filename: annotation.file_id,
|
|
3574
|
+
providerMetadata: {
|
|
3575
|
+
[providerKey]: {
|
|
3576
|
+
fileId: annotation.file_id,
|
|
3577
|
+
...annotation.index != null ? { index: annotation.index } : {}
|
|
3578
|
+
}
|
|
3579
|
+
}
|
|
3580
|
+
});
|
|
3549
3581
|
}
|
|
3550
3582
|
}
|
|
3551
3583
|
}
|
|
@@ -3559,7 +3591,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3559
3591
|
toolName: part.name,
|
|
3560
3592
|
input: part.arguments,
|
|
3561
3593
|
providerMetadata: {
|
|
3562
|
-
|
|
3594
|
+
[providerKey]: {
|
|
3563
3595
|
itemId: part.id
|
|
3564
3596
|
}
|
|
3565
3597
|
}
|
|
@@ -3617,13 +3649,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3617
3649
|
toolName: "file_search",
|
|
3618
3650
|
result: {
|
|
3619
3651
|
queries: part.queries,
|
|
3620
|
-
results: (
|
|
3652
|
+
results: (_w = (_v = part.results) == null ? void 0 : _v.map((result) => ({
|
|
3621
3653
|
attributes: result.attributes,
|
|
3622
3654
|
fileId: result.file_id,
|
|
3623
3655
|
filename: result.filename,
|
|
3624
3656
|
score: result.score,
|
|
3625
3657
|
text: result.text
|
|
3626
|
-
}))) != null ?
|
|
3658
|
+
}))) != null ? _w : null
|
|
3627
3659
|
},
|
|
3628
3660
|
providerExecuted: true
|
|
3629
3661
|
});
|
|
@@ -3654,29 +3686,29 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3654
3686
|
}
|
|
3655
3687
|
}
|
|
3656
3688
|
const providerMetadata = {
|
|
3657
|
-
|
|
3689
|
+
[providerKey]: {
|
|
3658
3690
|
...response.id != null ? { responseId: response.id } : {}
|
|
3659
3691
|
}
|
|
3660
3692
|
};
|
|
3661
3693
|
if (logprobs.length > 0) {
|
|
3662
|
-
providerMetadata.
|
|
3694
|
+
providerMetadata[providerKey].logprobs = logprobs;
|
|
3663
3695
|
}
|
|
3664
3696
|
if (typeof response.service_tier === "string") {
|
|
3665
|
-
providerMetadata.
|
|
3697
|
+
providerMetadata[providerKey].serviceTier = response.service_tier;
|
|
3666
3698
|
}
|
|
3667
3699
|
const usage = response.usage;
|
|
3668
3700
|
return {
|
|
3669
3701
|
content,
|
|
3670
3702
|
finishReason: mapOpenAIResponseFinishReason({
|
|
3671
|
-
finishReason: (
|
|
3703
|
+
finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
|
|
3672
3704
|
hasFunctionCall
|
|
3673
3705
|
}),
|
|
3674
3706
|
usage: {
|
|
3675
3707
|
inputTokens: usage.input_tokens,
|
|
3676
3708
|
outputTokens: usage.output_tokens,
|
|
3677
3709
|
totalTokens: usage.input_tokens + usage.output_tokens,
|
|
3678
|
-
reasoningTokens: (
|
|
3679
|
-
cachedInputTokens: (
|
|
3710
|
+
reasoningTokens: (_z = (_y = usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
|
|
3711
|
+
cachedInputTokens: (_B = (_A = usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
|
|
3680
3712
|
},
|
|
3681
3713
|
request: { body },
|
|
3682
3714
|
response: {
|
|
@@ -3715,6 +3747,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3715
3747
|
fetch: this.config.fetch
|
|
3716
3748
|
});
|
|
3717
3749
|
const self = this;
|
|
3750
|
+
const providerKey = this.config.provider.replace(".responses", "");
|
|
3718
3751
|
let finishReason = "unknown";
|
|
3719
3752
|
const usage = {
|
|
3720
3753
|
inputTokens: void 0,
|
|
@@ -3830,7 +3863,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3830
3863
|
type: "text-start",
|
|
3831
3864
|
id: value.item.id,
|
|
3832
3865
|
providerMetadata: {
|
|
3833
|
-
|
|
3866
|
+
[providerKey]: {
|
|
3834
3867
|
itemId: value.item.id
|
|
3835
3868
|
}
|
|
3836
3869
|
}
|
|
@@ -3844,7 +3877,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3844
3877
|
type: "reasoning-start",
|
|
3845
3878
|
id: `${value.item.id}:0`,
|
|
3846
3879
|
providerMetadata: {
|
|
3847
|
-
|
|
3880
|
+
[providerKey]: {
|
|
3848
3881
|
itemId: value.item.id,
|
|
3849
3882
|
reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
|
|
3850
3883
|
}
|
|
@@ -3857,7 +3890,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3857
3890
|
type: "text-end",
|
|
3858
3891
|
id: value.item.id,
|
|
3859
3892
|
providerMetadata: {
|
|
3860
|
-
|
|
3893
|
+
[providerKey]: {
|
|
3861
3894
|
itemId: value.item.id,
|
|
3862
3895
|
...ongoingAnnotations.length > 0 && {
|
|
3863
3896
|
annotations: ongoingAnnotations
|
|
@@ -3878,7 +3911,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3878
3911
|
toolName: value.item.name,
|
|
3879
3912
|
input: value.item.arguments,
|
|
3880
3913
|
providerMetadata: {
|
|
3881
|
-
|
|
3914
|
+
[providerKey]: {
|
|
3882
3915
|
itemId: value.item.id
|
|
3883
3916
|
}
|
|
3884
3917
|
}
|
|
@@ -3971,7 +4004,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3971
4004
|
}
|
|
3972
4005
|
}),
|
|
3973
4006
|
providerMetadata: {
|
|
3974
|
-
|
|
4007
|
+
[providerKey]: { itemId: value.item.id }
|
|
3975
4008
|
}
|
|
3976
4009
|
});
|
|
3977
4010
|
} else if (value.item.type === "reasoning") {
|
|
@@ -3986,7 +4019,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3986
4019
|
type: "reasoning-end",
|
|
3987
4020
|
id: `${value.item.id}:${summaryIndex}`,
|
|
3988
4021
|
providerMetadata: {
|
|
3989
|
-
|
|
4022
|
+
[providerKey]: {
|
|
3990
4023
|
itemId: value.item.id,
|
|
3991
4024
|
reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
|
|
3992
4025
|
}
|
|
@@ -4066,7 +4099,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4066
4099
|
controller.enqueue({
|
|
4067
4100
|
type: "reasoning-end",
|
|
4068
4101
|
id: `${value.item_id}:${summaryIndex}`,
|
|
4069
|
-
providerMetadata: {
|
|
4102
|
+
providerMetadata: {
|
|
4103
|
+
[providerKey]: { itemId: value.item_id }
|
|
4104
|
+
}
|
|
4070
4105
|
});
|
|
4071
4106
|
activeReasoningPart.summaryParts[summaryIndex] = "concluded";
|
|
4072
4107
|
}
|
|
@@ -4075,7 +4110,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4075
4110
|
type: "reasoning-start",
|
|
4076
4111
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4077
4112
|
providerMetadata: {
|
|
4078
|
-
|
|
4113
|
+
[providerKey]: {
|
|
4079
4114
|
itemId: value.item_id,
|
|
4080
4115
|
reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
|
|
4081
4116
|
}
|
|
@@ -4088,7 +4123,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4088
4123
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4089
4124
|
delta: value.delta,
|
|
4090
4125
|
providerMetadata: {
|
|
4091
|
-
|
|
4126
|
+
[providerKey]: {
|
|
4092
4127
|
itemId: value.item_id
|
|
4093
4128
|
}
|
|
4094
4129
|
}
|
|
@@ -4099,7 +4134,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4099
4134
|
type: "reasoning-end",
|
|
4100
4135
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4101
4136
|
providerMetadata: {
|
|
4102
|
-
|
|
4137
|
+
[providerKey]: { itemId: value.item_id }
|
|
4103
4138
|
}
|
|
4104
4139
|
});
|
|
4105
4140
|
activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
|
|
@@ -4139,7 +4174,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4139
4174
|
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
|
|
4140
4175
|
...value.annotation.file_id ? {
|
|
4141
4176
|
providerMetadata: {
|
|
4142
|
-
|
|
4177
|
+
[providerKey]: {
|
|
4143
4178
|
fileId: value.annotation.file_id
|
|
4144
4179
|
}
|
|
4145
4180
|
}
|
|
@@ -4152,15 +4187,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4152
4187
|
},
|
|
4153
4188
|
flush(controller) {
|
|
4154
4189
|
const providerMetadata = {
|
|
4155
|
-
|
|
4190
|
+
[providerKey]: {
|
|
4156
4191
|
responseId
|
|
4157
4192
|
}
|
|
4158
4193
|
};
|
|
4159
4194
|
if (logprobs.length > 0) {
|
|
4160
|
-
providerMetadata.
|
|
4195
|
+
providerMetadata[providerKey].logprobs = logprobs;
|
|
4161
4196
|
}
|
|
4162
4197
|
if (serviceTier !== void 0) {
|
|
4163
|
-
providerMetadata.
|
|
4198
|
+
providerMetadata[providerKey].serviceTier = serviceTier;
|
|
4164
4199
|
}
|
|
4165
4200
|
controller.enqueue({
|
|
4166
4201
|
type: "finish",
|
|
@@ -4621,7 +4656,7 @@ var OpenAITranscriptionModel = class {
|
|
|
4621
4656
|
};
|
|
4622
4657
|
|
|
4623
4658
|
// src/version.ts
|
|
4624
|
-
var VERSION = true ? "2.0.
|
|
4659
|
+
var VERSION = true ? "2.0.75" : "0.0.0-test";
|
|
4625
4660
|
|
|
4626
4661
|
// src/openai-provider.ts
|
|
4627
4662
|
function createOpenAI(options = {}) {
|