@ai-sdk/openai 2.0.51 → 2.0.53
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +62 -37
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +62 -37
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +61 -36
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +61 -36
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.mjs
CHANGED
|
@@ -2392,6 +2392,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2392
2392
|
input.push(reasoningMessages[reasoningId]);
|
|
2393
2393
|
} else {
|
|
2394
2394
|
reasoningMessage.summary.push(...summaryParts);
|
|
2395
|
+
if ((providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent) != null) {
|
|
2396
|
+
reasoningMessage.encrypted_content = providerOptions.reasoningEncryptedContent;
|
|
2397
|
+
}
|
|
2395
2398
|
}
|
|
2396
2399
|
}
|
|
2397
2400
|
} else {
|
|
@@ -2737,6 +2740,11 @@ var openaiResponsesChunkSchema = lazyValidator11(
|
|
|
2737
2740
|
summary_index: z14.number(),
|
|
2738
2741
|
delta: z14.string()
|
|
2739
2742
|
}),
|
|
2743
|
+
z14.object({
|
|
2744
|
+
type: z14.literal("response.reasoning_summary_part.done"),
|
|
2745
|
+
item_id: z14.string(),
|
|
2746
|
+
summary_index: z14.number()
|
|
2747
|
+
}),
|
|
2740
2748
|
z14.object({
|
|
2741
2749
|
type: z14.literal("error"),
|
|
2742
2750
|
code: z14.string(),
|
|
@@ -3017,6 +3025,7 @@ var openaiResponsesProviderOptionsSchema = lazyValidator12(
|
|
|
3017
3025
|
store: z15.boolean().nullish(),
|
|
3018
3026
|
strictJsonSchema: z15.boolean().nullish(),
|
|
3019
3027
|
textVerbosity: z15.enum(["low", "medium", "high"]).nullish(),
|
|
3028
|
+
truncation: z15.enum(["auto", "disabled"]).nullish(),
|
|
3020
3029
|
user: z15.string().nullish()
|
|
3021
3030
|
})
|
|
3022
3031
|
)
|
|
@@ -3544,6 +3553,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3544
3553
|
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
3545
3554
|
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
3546
3555
|
top_logprobs: topLogprobs,
|
|
3556
|
+
truncation: openaiOptions == null ? void 0 : openaiOptions.truncation,
|
|
3547
3557
|
// model-specific settings:
|
|
3548
3558
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
3549
3559
|
reasoning: {
|
|
@@ -3554,9 +3564,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3554
3564
|
summary: openaiOptions.reasoningSummary
|
|
3555
3565
|
}
|
|
3556
3566
|
}
|
|
3557
|
-
},
|
|
3558
|
-
...modelConfig.requiredAutoTruncation && {
|
|
3559
|
-
truncation: "auto"
|
|
3560
3567
|
}
|
|
3561
3568
|
};
|
|
3562
3569
|
if (modelConfig.isReasoningModel) {
|
|
@@ -3624,7 +3631,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3624
3631
|
tools: openaiTools,
|
|
3625
3632
|
tool_choice: openaiToolChoice
|
|
3626
3633
|
},
|
|
3627
|
-
warnings: [...warnings, ...toolWarnings]
|
|
3634
|
+
warnings: [...warnings, ...toolWarnings],
|
|
3635
|
+
store
|
|
3628
3636
|
};
|
|
3629
3637
|
}
|
|
3630
3638
|
async doGenerate(options) {
|
|
@@ -3899,7 +3907,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3899
3907
|
const {
|
|
3900
3908
|
args: body,
|
|
3901
3909
|
warnings,
|
|
3902
|
-
webSearchToolName
|
|
3910
|
+
webSearchToolName,
|
|
3911
|
+
store
|
|
3903
3912
|
} = await this.getArgs(options);
|
|
3904
3913
|
const { responseHeaders, value: response } = await postJsonToApi6({
|
|
3905
3914
|
url: this.config.url({
|
|
@@ -3938,7 +3947,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3938
3947
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3939
3948
|
},
|
|
3940
3949
|
transform(chunk, controller) {
|
|
3941
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v
|
|
3950
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
|
|
3942
3951
|
if (options.includeRawChunks) {
|
|
3943
3952
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3944
3953
|
}
|
|
@@ -4037,10 +4046,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4037
4046
|
}
|
|
4038
4047
|
}
|
|
4039
4048
|
});
|
|
4040
|
-
} else if (
|
|
4049
|
+
} else if (isResponseOutputItemAddedChunk(value) && value.item.type === "reasoning") {
|
|
4041
4050
|
activeReasoning[value.item.id] = {
|
|
4042
4051
|
encryptedContent: value.item.encrypted_content,
|
|
4043
|
-
summaryParts:
|
|
4052
|
+
summaryParts: { 0: "active" }
|
|
4044
4053
|
};
|
|
4045
4054
|
controller.enqueue({
|
|
4046
4055
|
type: "reasoning-start",
|
|
@@ -4168,9 +4177,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4168
4177
|
type: "text-end",
|
|
4169
4178
|
id: value.item.id
|
|
4170
4179
|
});
|
|
4171
|
-
} else if (
|
|
4180
|
+
} else if (value.item.type === "reasoning") {
|
|
4172
4181
|
const activeReasoningPart = activeReasoning[value.item.id];
|
|
4173
|
-
|
|
4182
|
+
const summaryPartIndices = Object.entries(
|
|
4183
|
+
activeReasoningPart.summaryParts
|
|
4184
|
+
).filter(
|
|
4185
|
+
([_, status]) => status === "active" || status === "can-conclude"
|
|
4186
|
+
).map(([summaryIndex]) => summaryIndex);
|
|
4187
|
+
for (const summaryIndex of summaryPartIndices) {
|
|
4174
4188
|
controller.enqueue({
|
|
4175
4189
|
type: "reasoning-end",
|
|
4176
4190
|
id: `${value.item.id}:${summaryIndex}`,
|
|
@@ -4244,23 +4258,34 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4244
4258
|
if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
|
|
4245
4259
|
logprobs.push(value.logprobs);
|
|
4246
4260
|
}
|
|
4247
|
-
} else if (
|
|
4261
|
+
} else if (value.type === "response.reasoning_summary_part.added") {
|
|
4248
4262
|
if (value.summary_index > 0) {
|
|
4249
|
-
|
|
4250
|
-
|
|
4251
|
-
|
|
4263
|
+
const activeReasoningPart = activeReasoning[value.item_id];
|
|
4264
|
+
activeReasoningPart.summaryParts[value.summary_index] = "active";
|
|
4265
|
+
for (const summaryIndex of Object.keys(
|
|
4266
|
+
activeReasoningPart.summaryParts
|
|
4267
|
+
)) {
|
|
4268
|
+
if (activeReasoningPart.summaryParts[summaryIndex] === "can-conclude") {
|
|
4269
|
+
controller.enqueue({
|
|
4270
|
+
type: "reasoning-end",
|
|
4271
|
+
id: `${value.item_id}:${summaryIndex}`,
|
|
4272
|
+
providerMetadata: { openai: { itemId: value.item_id } }
|
|
4273
|
+
});
|
|
4274
|
+
activeReasoningPart.summaryParts[summaryIndex] = "concluded";
|
|
4275
|
+
}
|
|
4276
|
+
}
|
|
4252
4277
|
controller.enqueue({
|
|
4253
4278
|
type: "reasoning-start",
|
|
4254
4279
|
id: `${value.item_id}:${value.summary_index}`,
|
|
4255
4280
|
providerMetadata: {
|
|
4256
4281
|
openai: {
|
|
4257
4282
|
itemId: value.item_id,
|
|
4258
|
-
reasoningEncryptedContent: (
|
|
4283
|
+
reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
|
|
4259
4284
|
}
|
|
4260
4285
|
}
|
|
4261
4286
|
});
|
|
4262
4287
|
}
|
|
4263
|
-
} else if (
|
|
4288
|
+
} else if (value.type === "response.reasoning_summary_text.delta") {
|
|
4264
4289
|
controller.enqueue({
|
|
4265
4290
|
type: "reasoning-delta",
|
|
4266
4291
|
id: `${value.item_id}:${value.summary_index}`,
|
|
@@ -4271,16 +4296,29 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4271
4296
|
}
|
|
4272
4297
|
}
|
|
4273
4298
|
});
|
|
4299
|
+
} else if (value.type === "response.reasoning_summary_part.done") {
|
|
4300
|
+
if (store) {
|
|
4301
|
+
controller.enqueue({
|
|
4302
|
+
type: "reasoning-end",
|
|
4303
|
+
id: `${value.item_id}:${value.summary_index}`,
|
|
4304
|
+
providerMetadata: {
|
|
4305
|
+
openai: { itemId: value.item_id }
|
|
4306
|
+
}
|
|
4307
|
+
});
|
|
4308
|
+
activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
|
|
4309
|
+
} else {
|
|
4310
|
+
activeReasoning[value.item_id].summaryParts[value.summary_index] = "can-conclude";
|
|
4311
|
+
}
|
|
4274
4312
|
} else if (isResponseFinishedChunk(value)) {
|
|
4275
4313
|
finishReason = mapOpenAIResponseFinishReason({
|
|
4276
|
-
finishReason: (
|
|
4314
|
+
finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
|
|
4277
4315
|
hasFunctionCall
|
|
4278
4316
|
});
|
|
4279
4317
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
4280
4318
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
4281
4319
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
4282
|
-
usage.reasoningTokens = (
|
|
4283
|
-
usage.cachedInputTokens = (
|
|
4320
|
+
usage.reasoningTokens = (_k = (_j = value.response.usage.output_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0;
|
|
4321
|
+
usage.cachedInputTokens = (_m = (_l = value.response.usage.input_tokens_details) == null ? void 0 : _l.cached_tokens) != null ? _m : void 0;
|
|
4284
4322
|
if (typeof value.response.service_tier === "string") {
|
|
4285
4323
|
serviceTier = value.response.service_tier;
|
|
4286
4324
|
}
|
|
@@ -4289,7 +4327,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4289
4327
|
controller.enqueue({
|
|
4290
4328
|
type: "source",
|
|
4291
4329
|
sourceType: "url",
|
|
4292
|
-
id: (
|
|
4330
|
+
id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : generateId2(),
|
|
4293
4331
|
url: value.annotation.url,
|
|
4294
4332
|
title: value.annotation.title
|
|
4295
4333
|
});
|
|
@@ -4297,10 +4335,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4297
4335
|
controller.enqueue({
|
|
4298
4336
|
type: "source",
|
|
4299
4337
|
sourceType: "document",
|
|
4300
|
-
id: (
|
|
4338
|
+
id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId2(),
|
|
4301
4339
|
mediaType: "text/plain",
|
|
4302
|
-
title: (
|
|
4303
|
-
filename: (
|
|
4340
|
+
title: (_u = (_t = value.annotation.quote) != null ? _t : value.annotation.filename) != null ? _u : "Document",
|
|
4341
|
+
filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id
|
|
4304
4342
|
});
|
|
4305
4343
|
}
|
|
4306
4344
|
} else if (isErrorChunk(value)) {
|
|
@@ -4339,9 +4377,6 @@ function isTextDeltaChunk(chunk) {
|
|
|
4339
4377
|
function isResponseOutputItemDoneChunk(chunk) {
|
|
4340
4378
|
return chunk.type === "response.output_item.done";
|
|
4341
4379
|
}
|
|
4342
|
-
function isResponseOutputItemDoneReasoningChunk(chunk) {
|
|
4343
|
-
return isResponseOutputItemDoneChunk(chunk) && chunk.item.type === "reasoning";
|
|
4344
|
-
}
|
|
4345
4380
|
function isResponseFinishedChunk(chunk) {
|
|
4346
4381
|
return chunk.type === "response.completed" || chunk.type === "response.incomplete";
|
|
4347
4382
|
}
|
|
@@ -4360,18 +4395,9 @@ function isResponseCodeInterpreterCallCodeDoneChunk(chunk) {
|
|
|
4360
4395
|
function isResponseOutputItemAddedChunk(chunk) {
|
|
4361
4396
|
return chunk.type === "response.output_item.added";
|
|
4362
4397
|
}
|
|
4363
|
-
function isResponseOutputItemAddedReasoningChunk(chunk) {
|
|
4364
|
-
return isResponseOutputItemAddedChunk(chunk) && chunk.item.type === "reasoning";
|
|
4365
|
-
}
|
|
4366
4398
|
function isResponseAnnotationAddedChunk(chunk) {
|
|
4367
4399
|
return chunk.type === "response.output_text.annotation.added";
|
|
4368
4400
|
}
|
|
4369
|
-
function isResponseReasoningSummaryPartAddedChunk(chunk) {
|
|
4370
|
-
return chunk.type === "response.reasoning_summary_part.added";
|
|
4371
|
-
}
|
|
4372
|
-
function isResponseReasoningSummaryTextDeltaChunk(chunk) {
|
|
4373
|
-
return chunk.type === "response.reasoning_summary_text.delta";
|
|
4374
|
-
}
|
|
4375
4401
|
function isErrorChunk(chunk) {
|
|
4376
4402
|
return chunk.type === "error";
|
|
4377
4403
|
}
|
|
@@ -4379,7 +4405,6 @@ function getResponsesModelConfig(modelId) {
|
|
|
4379
4405
|
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
4380
4406
|
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
4381
4407
|
const defaults = {
|
|
4382
|
-
requiredAutoTruncation: false,
|
|
4383
4408
|
systemMessageMode: "system",
|
|
4384
4409
|
supportsFlexProcessing: supportsFlexProcessing2,
|
|
4385
4410
|
supportsPriorityProcessing: supportsPriorityProcessing2
|