@ai-sdk/openai 2.0.73 → 2.0.75
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.js +64 -29
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +64 -29
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +63 -28
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +63 -28
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.js
CHANGED
|
@@ -3644,7 +3644,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3644
3644
|
};
|
|
3645
3645
|
}
|
|
3646
3646
|
async doGenerate(options) {
|
|
3647
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
3647
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B;
|
|
3648
3648
|
const {
|
|
3649
3649
|
args: body,
|
|
3650
3650
|
warnings,
|
|
@@ -3654,6 +3654,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3654
3654
|
path: "/responses",
|
|
3655
3655
|
modelId: this.modelId
|
|
3656
3656
|
});
|
|
3657
|
+
const providerKey = this.config.provider.replace(".responses", "");
|
|
3657
3658
|
const {
|
|
3658
3659
|
responseHeaders,
|
|
3659
3660
|
value: response,
|
|
@@ -3694,7 +3695,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3694
3695
|
type: "reasoning",
|
|
3695
3696
|
text: summary.text,
|
|
3696
3697
|
providerMetadata: {
|
|
3697
|
-
|
|
3698
|
+
[providerKey]: {
|
|
3698
3699
|
itemId: part.id,
|
|
3699
3700
|
reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
|
|
3700
3701
|
}
|
|
@@ -3731,7 +3732,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3731
3732
|
action: part.action
|
|
3732
3733
|
}),
|
|
3733
3734
|
providerMetadata: {
|
|
3734
|
-
|
|
3735
|
+
[providerKey]: {
|
|
3735
3736
|
itemId: part.id
|
|
3736
3737
|
}
|
|
3737
3738
|
}
|
|
@@ -3747,7 +3748,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3747
3748
|
type: "text",
|
|
3748
3749
|
text: contentPart.text,
|
|
3749
3750
|
providerMetadata: {
|
|
3750
|
-
|
|
3751
|
+
[providerKey]: {
|
|
3751
3752
|
itemId: part.id
|
|
3752
3753
|
}
|
|
3753
3754
|
}
|
|
@@ -3771,12 +3772,43 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3771
3772
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id,
|
|
3772
3773
|
...annotation.file_id ? {
|
|
3773
3774
|
providerMetadata: {
|
|
3774
|
-
|
|
3775
|
+
[providerKey]: {
|
|
3775
3776
|
fileId: annotation.file_id
|
|
3776
3777
|
}
|
|
3777
3778
|
}
|
|
3778
3779
|
} : {}
|
|
3779
3780
|
});
|
|
3781
|
+
} else if (annotation.type === "container_file_citation") {
|
|
3782
|
+
content.push({
|
|
3783
|
+
type: "source",
|
|
3784
|
+
sourceType: "document",
|
|
3785
|
+
id: (_o = (_n = (_m = this.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils29.generateId)(),
|
|
3786
|
+
mediaType: "text/plain",
|
|
3787
|
+
title: (_q = (_p = annotation.filename) != null ? _p : annotation.file_id) != null ? _q : "Document",
|
|
3788
|
+
filename: (_r = annotation.filename) != null ? _r : annotation.file_id,
|
|
3789
|
+
providerMetadata: {
|
|
3790
|
+
[providerKey]: {
|
|
3791
|
+
fileId: annotation.file_id,
|
|
3792
|
+
containerId: annotation.container_id,
|
|
3793
|
+
...annotation.index != null ? { index: annotation.index } : {}
|
|
3794
|
+
}
|
|
3795
|
+
}
|
|
3796
|
+
});
|
|
3797
|
+
} else if (annotation.type === "file_path") {
|
|
3798
|
+
content.push({
|
|
3799
|
+
type: "source",
|
|
3800
|
+
sourceType: "document",
|
|
3801
|
+
id: (_u = (_t = (_s = this.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : (0, import_provider_utils29.generateId)(),
|
|
3802
|
+
mediaType: "application/octet-stream",
|
|
3803
|
+
title: annotation.file_id,
|
|
3804
|
+
filename: annotation.file_id,
|
|
3805
|
+
providerMetadata: {
|
|
3806
|
+
[providerKey]: {
|
|
3807
|
+
fileId: annotation.file_id,
|
|
3808
|
+
...annotation.index != null ? { index: annotation.index } : {}
|
|
3809
|
+
}
|
|
3810
|
+
}
|
|
3811
|
+
});
|
|
3780
3812
|
}
|
|
3781
3813
|
}
|
|
3782
3814
|
}
|
|
@@ -3790,7 +3822,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3790
3822
|
toolName: part.name,
|
|
3791
3823
|
input: part.arguments,
|
|
3792
3824
|
providerMetadata: {
|
|
3793
|
-
|
|
3825
|
+
[providerKey]: {
|
|
3794
3826
|
itemId: part.id
|
|
3795
3827
|
}
|
|
3796
3828
|
}
|
|
@@ -3848,13 +3880,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3848
3880
|
toolName: "file_search",
|
|
3849
3881
|
result: {
|
|
3850
3882
|
queries: part.queries,
|
|
3851
|
-
results: (
|
|
3883
|
+
results: (_w = (_v = part.results) == null ? void 0 : _v.map((result) => ({
|
|
3852
3884
|
attributes: result.attributes,
|
|
3853
3885
|
fileId: result.file_id,
|
|
3854
3886
|
filename: result.filename,
|
|
3855
3887
|
score: result.score,
|
|
3856
3888
|
text: result.text
|
|
3857
|
-
}))) != null ?
|
|
3889
|
+
}))) != null ? _w : null
|
|
3858
3890
|
},
|
|
3859
3891
|
providerExecuted: true
|
|
3860
3892
|
});
|
|
@@ -3885,29 +3917,29 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3885
3917
|
}
|
|
3886
3918
|
}
|
|
3887
3919
|
const providerMetadata = {
|
|
3888
|
-
|
|
3920
|
+
[providerKey]: {
|
|
3889
3921
|
...response.id != null ? { responseId: response.id } : {}
|
|
3890
3922
|
}
|
|
3891
3923
|
};
|
|
3892
3924
|
if (logprobs.length > 0) {
|
|
3893
|
-
providerMetadata.
|
|
3925
|
+
providerMetadata[providerKey].logprobs = logprobs;
|
|
3894
3926
|
}
|
|
3895
3927
|
if (typeof response.service_tier === "string") {
|
|
3896
|
-
providerMetadata.
|
|
3928
|
+
providerMetadata[providerKey].serviceTier = response.service_tier;
|
|
3897
3929
|
}
|
|
3898
3930
|
const usage = response.usage;
|
|
3899
3931
|
return {
|
|
3900
3932
|
content,
|
|
3901
3933
|
finishReason: mapOpenAIResponseFinishReason({
|
|
3902
|
-
finishReason: (
|
|
3934
|
+
finishReason: (_x = response.incomplete_details) == null ? void 0 : _x.reason,
|
|
3903
3935
|
hasFunctionCall
|
|
3904
3936
|
}),
|
|
3905
3937
|
usage: {
|
|
3906
3938
|
inputTokens: usage.input_tokens,
|
|
3907
3939
|
outputTokens: usage.output_tokens,
|
|
3908
3940
|
totalTokens: usage.input_tokens + usage.output_tokens,
|
|
3909
|
-
reasoningTokens: (
|
|
3910
|
-
cachedInputTokens: (
|
|
3941
|
+
reasoningTokens: (_z = (_y = usage.output_tokens_details) == null ? void 0 : _y.reasoning_tokens) != null ? _z : void 0,
|
|
3942
|
+
cachedInputTokens: (_B = (_A = usage.input_tokens_details) == null ? void 0 : _A.cached_tokens) != null ? _B : void 0
|
|
3911
3943
|
},
|
|
3912
3944
|
request: { body },
|
|
3913
3945
|
response: {
|
|
@@ -3946,6 +3978,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3946
3978
|
fetch: this.config.fetch
|
|
3947
3979
|
});
|
|
3948
3980
|
const self = this;
|
|
3981
|
+
const providerKey = this.config.provider.replace(".responses", "");
|
|
3949
3982
|
let finishReason = "unknown";
|
|
3950
3983
|
const usage = {
|
|
3951
3984
|
inputTokens: void 0,
|
|
@@ -4061,7 +4094,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4061
4094
|
type: "text-start",
|
|
4062
4095
|
id: value.item.id,
|
|
4063
4096
|
providerMetadata: {
|
|
4064
|
-
|
|
4097
|
+
[providerKey]: {
|
|
4065
4098
|
itemId: value.item.id
|
|
4066
4099
|
}
|
|
4067
4100
|
}
|
|
@@ -4075,7 +4108,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4075
4108
|
type: "reasoning-start",
|
|
4076
4109
|
id: `${value.item.id}:0`,
|
|
4077
4110
|
providerMetadata: {
|
|
4078
|
-
|
|
4111
|
+
[providerKey]: {
|
|
4079
4112
|
itemId: value.item.id,
|
|
4080
4113
|
reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
|
|
4081
4114
|
}
|
|
@@ -4088,7 +4121,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4088
4121
|
type: "text-end",
|
|
4089
4122
|
id: value.item.id,
|
|
4090
4123
|
providerMetadata: {
|
|
4091
|
-
|
|
4124
|
+
[providerKey]: {
|
|
4092
4125
|
itemId: value.item.id,
|
|
4093
4126
|
...ongoingAnnotations.length > 0 && {
|
|
4094
4127
|
annotations: ongoingAnnotations
|
|
@@ -4109,7 +4142,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4109
4142
|
toolName: value.item.name,
|
|
4110
4143
|
input: value.item.arguments,
|
|
4111
4144
|
providerMetadata: {
|
|
4112
|
-
|
|
4145
|
+
[providerKey]: {
|
|
4113
4146
|
itemId: value.item.id
|
|
4114
4147
|
}
|
|
4115
4148
|
}
|
|
@@ -4202,7 +4235,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4202
4235
|
}
|
|
4203
4236
|
}),
|
|
4204
4237
|
providerMetadata: {
|
|
4205
|
-
|
|
4238
|
+
[providerKey]: { itemId: value.item.id }
|
|
4206
4239
|
}
|
|
4207
4240
|
});
|
|
4208
4241
|
} else if (value.item.type === "reasoning") {
|
|
@@ -4217,7 +4250,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4217
4250
|
type: "reasoning-end",
|
|
4218
4251
|
id: `${value.item.id}:${summaryIndex}`,
|
|
4219
4252
|
providerMetadata: {
|
|
4220
|
-
|
|
4253
|
+
[providerKey]: {
|
|
4221
4254
|
itemId: value.item.id,
|
|
4222
4255
|
reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
|
|
4223
4256
|
}
|
|
@@ -4297,7 +4330,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4297
4330
|
controller.enqueue({
|
|
4298
4331
|
type: "reasoning-end",
|
|
4299
4332
|
id: `${value.item_id}:${summaryIndex}`,
|
|
4300
|
-
providerMetadata: {
|
|
4333
|
+
providerMetadata: {
|
|
4334
|
+
[providerKey]: { itemId: value.item_id }
|
|
4335
|
+
}
|
|
4301
4336
|
});
|
|
4302
4337
|
activeReasoningPart.summaryParts[summaryIndex] = "concluded";
|
|
4303
4338
|
}
|
|
@@ -4306,7 +4341,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4306
4341
|
type: "reasoning-start",
|
|
4307
4342
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4308
4343
|
providerMetadata: {
|
|
4309
|
-
|
|
4344
|
+
[providerKey]: {
|
|
4310
4345
|
itemId: value.item_id,
|
|
4311
4346
|
reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
|
|
4312
4347
|
}
|
|
@@ -4319,7 +4354,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4319
4354
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4320
4355
|
delta: value.delta,
|
|
4321
4356
|
providerMetadata: {
|
|
4322
|
-
|
|
4357
|
+
[providerKey]: {
|
|
4323
4358
|
itemId: value.item_id
|
|
4324
4359
|
}
|
|
4325
4360
|
}
|
|
@@ -4330,7 +4365,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4330
4365
|
type: "reasoning-end",
|
|
4331
4366
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4332
4367
|
providerMetadata: {
|
|
4333
|
-
|
|
4368
|
+
[providerKey]: { itemId: value.item_id }
|
|
4334
4369
|
}
|
|
4335
4370
|
});
|
|
4336
4371
|
activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
|
|
@@ -4370,7 +4405,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4370
4405
|
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id,
|
|
4371
4406
|
...value.annotation.file_id ? {
|
|
4372
4407
|
providerMetadata: {
|
|
4373
|
-
|
|
4408
|
+
[providerKey]: {
|
|
4374
4409
|
fileId: value.annotation.file_id
|
|
4375
4410
|
}
|
|
4376
4411
|
}
|
|
@@ -4383,15 +4418,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4383
4418
|
},
|
|
4384
4419
|
flush(controller) {
|
|
4385
4420
|
const providerMetadata = {
|
|
4386
|
-
|
|
4421
|
+
[providerKey]: {
|
|
4387
4422
|
responseId
|
|
4388
4423
|
}
|
|
4389
4424
|
};
|
|
4390
4425
|
if (logprobs.length > 0) {
|
|
4391
|
-
providerMetadata.
|
|
4426
|
+
providerMetadata[providerKey].logprobs = logprobs;
|
|
4392
4427
|
}
|
|
4393
4428
|
if (serviceTier !== void 0) {
|
|
4394
|
-
providerMetadata.
|
|
4429
|
+
providerMetadata[providerKey].serviceTier = serviceTier;
|
|
4395
4430
|
}
|
|
4396
4431
|
controller.enqueue({
|
|
4397
4432
|
type: "finish",
|