@ai-sdk/openai 3.0.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +51 -303
- package/dist/index.js +157 -46
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +157 -46
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +156 -45
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +156 -45
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.js
CHANGED
|
@@ -2469,9 +2469,10 @@ async function convertToOpenAIResponsesInput({
|
|
|
2469
2469
|
hasShellTool = false,
|
|
2470
2470
|
hasApplyPatchTool = false
|
|
2471
2471
|
}) {
|
|
2472
|
-
var _a, _b, _c, _d, _e;
|
|
2472
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
2473
2473
|
const input = [];
|
|
2474
2474
|
const warnings = [];
|
|
2475
|
+
const processedApprovalIds = /* @__PURE__ */ new Set();
|
|
2475
2476
|
for (const { role, content } of prompt) {
|
|
2476
2477
|
switch (role) {
|
|
2477
2478
|
case "system": {
|
|
@@ -2562,10 +2563,13 @@ async function convertToOpenAIResponsesInput({
|
|
|
2562
2563
|
break;
|
|
2563
2564
|
}
|
|
2564
2565
|
case "tool-call": {
|
|
2566
|
+
const id = (_g = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId) != null ? _g : (_f = (_e = part.providerMetadata) == null ? void 0 : _e.openai) == null ? void 0 : _f.itemId;
|
|
2565
2567
|
if (part.providerExecuted) {
|
|
2568
|
+
if (store && id != null) {
|
|
2569
|
+
input.push({ type: "item_reference", id });
|
|
2570
|
+
}
|
|
2566
2571
|
break;
|
|
2567
2572
|
}
|
|
2568
|
-
const id = (_d = (_c = part.providerOptions) == null ? void 0 : _c.openai) == null ? void 0 : _d.itemId;
|
|
2569
2573
|
if (store && id != null) {
|
|
2570
2574
|
input.push({ type: "item_reference", id });
|
|
2571
2575
|
break;
|
|
@@ -2622,8 +2626,12 @@ async function convertToOpenAIResponsesInput({
|
|
|
2622
2626
|
}
|
|
2623
2627
|
// assistant tool result parts are from provider-executed tools:
|
|
2624
2628
|
case "tool-result": {
|
|
2629
|
+
if (part.output.type === "execution-denied" || part.output.type === "json" && typeof part.output.value === "object" && part.output.value != null && "type" in part.output.value && part.output.value.type === "execution-denied") {
|
|
2630
|
+
break;
|
|
2631
|
+
}
|
|
2625
2632
|
if (store) {
|
|
2626
|
-
|
|
2633
|
+
const itemId = (_j = (_i = (_h = part.providerMetadata) == null ? void 0 : _h.openai) == null ? void 0 : _i.itemId) != null ? _j : part.toolCallId;
|
|
2634
|
+
input.push({ type: "item_reference", id: itemId });
|
|
2627
2635
|
} else {
|
|
2628
2636
|
warnings.push({
|
|
2629
2637
|
type: "other",
|
|
@@ -2693,9 +2701,31 @@ async function convertToOpenAIResponsesInput({
|
|
|
2693
2701
|
case "tool": {
|
|
2694
2702
|
for (const part of content) {
|
|
2695
2703
|
if (part.type === "tool-approval-response") {
|
|
2704
|
+
const approvalResponse = part;
|
|
2705
|
+
if (processedApprovalIds.has(approvalResponse.approvalId)) {
|
|
2706
|
+
continue;
|
|
2707
|
+
}
|
|
2708
|
+
processedApprovalIds.add(approvalResponse.approvalId);
|
|
2709
|
+
if (store) {
|
|
2710
|
+
input.push({
|
|
2711
|
+
type: "item_reference",
|
|
2712
|
+
id: approvalResponse.approvalId
|
|
2713
|
+
});
|
|
2714
|
+
}
|
|
2715
|
+
input.push({
|
|
2716
|
+
type: "mcp_approval_response",
|
|
2717
|
+
approval_request_id: approvalResponse.approvalId,
|
|
2718
|
+
approve: approvalResponse.approved
|
|
2719
|
+
});
|
|
2696
2720
|
continue;
|
|
2697
2721
|
}
|
|
2698
2722
|
const output = part.output;
|
|
2723
|
+
if (output.type === "execution-denied") {
|
|
2724
|
+
const approvalId = (_l = (_k = output.providerOptions) == null ? void 0 : _k.openai) == null ? void 0 : _l.approvalId;
|
|
2725
|
+
if (approvalId) {
|
|
2726
|
+
continue;
|
|
2727
|
+
}
|
|
2728
|
+
}
|
|
2699
2729
|
const resolvedToolName = toolNameMapping.toProviderToolName(
|
|
2700
2730
|
part.toolName
|
|
2701
2731
|
);
|
|
@@ -2750,7 +2780,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2750
2780
|
contentValue = output.value;
|
|
2751
2781
|
break;
|
|
2752
2782
|
case "execution-denied":
|
|
2753
|
-
contentValue = (
|
|
2783
|
+
contentValue = (_m = output.reason) != null ? _m : "Tool execution denied.";
|
|
2754
2784
|
break;
|
|
2755
2785
|
case "json":
|
|
2756
2786
|
case "error-json":
|
|
@@ -2925,7 +2955,8 @@ var openaiResponsesChunkSchema = (0, import_provider_utils23.lazySchema)(
|
|
|
2925
2955
|
import_v416.z.object({
|
|
2926
2956
|
type: import_v416.z.literal("mcp_call"),
|
|
2927
2957
|
id: import_v416.z.string(),
|
|
2928
|
-
status: import_v416.z.string()
|
|
2958
|
+
status: import_v416.z.string(),
|
|
2959
|
+
approval_request_id: import_v416.z.string().nullish()
|
|
2929
2960
|
}),
|
|
2930
2961
|
import_v416.z.object({
|
|
2931
2962
|
type: import_v416.z.literal("mcp_list_tools"),
|
|
@@ -3082,7 +3113,8 @@ var openaiResponsesChunkSchema = (0, import_provider_utils23.lazySchema)(
|
|
|
3082
3113
|
code: import_v416.z.union([import_v416.z.number(), import_v416.z.string()]).optional(),
|
|
3083
3114
|
message: import_v416.z.string().optional()
|
|
3084
3115
|
}).loose()
|
|
3085
|
-
]).nullish()
|
|
3116
|
+
]).nullish(),
|
|
3117
|
+
approval_request_id: import_v416.z.string().nullish()
|
|
3086
3118
|
}),
|
|
3087
3119
|
import_v416.z.object({
|
|
3088
3120
|
type: import_v416.z.literal("mcp_list_tools"),
|
|
@@ -3111,7 +3143,7 @@ var openaiResponsesChunkSchema = (0, import_provider_utils23.lazySchema)(
|
|
|
3111
3143
|
server_label: import_v416.z.string(),
|
|
3112
3144
|
name: import_v416.z.string(),
|
|
3113
3145
|
arguments: import_v416.z.string(),
|
|
3114
|
-
approval_request_id: import_v416.z.string()
|
|
3146
|
+
approval_request_id: import_v416.z.string().optional()
|
|
3115
3147
|
}),
|
|
3116
3148
|
import_v416.z.object({
|
|
3117
3149
|
type: import_v416.z.literal("apply_patch_call"),
|
|
@@ -3434,7 +3466,8 @@ var openaiResponsesResponseSchema = (0, import_provider_utils23.lazySchema)(
|
|
|
3434
3466
|
code: import_v416.z.union([import_v416.z.number(), import_v416.z.string()]).optional(),
|
|
3435
3467
|
message: import_v416.z.string().optional()
|
|
3436
3468
|
}).loose()
|
|
3437
|
-
]).nullish()
|
|
3469
|
+
]).nullish(),
|
|
3470
|
+
approval_request_id: import_v416.z.string().nullish()
|
|
3438
3471
|
}),
|
|
3439
3472
|
import_v416.z.object({
|
|
3440
3473
|
type: import_v416.z.literal("mcp_list_tools"),
|
|
@@ -3463,7 +3496,7 @@ var openaiResponsesResponseSchema = (0, import_provider_utils23.lazySchema)(
|
|
|
3463
3496
|
server_label: import_v416.z.string(),
|
|
3464
3497
|
name: import_v416.z.string(),
|
|
3465
3498
|
arguments: import_v416.z.string(),
|
|
3466
|
-
approval_request_id: import_v416.z.string()
|
|
3499
|
+
approval_request_id: import_v416.z.string().optional()
|
|
3467
3500
|
}),
|
|
3468
3501
|
import_v416.z.object({
|
|
3469
3502
|
type: import_v416.z.literal("apply_patch_call"),
|
|
@@ -3921,7 +3954,7 @@ var mcpOutputSchema = (0, import_provider_utils28.lazySchema)(
|
|
|
3921
3954
|
serverLabel: import_v421.z.string(),
|
|
3922
3955
|
name: import_v421.z.string(),
|
|
3923
3956
|
arguments: import_v421.z.string(),
|
|
3924
|
-
output: import_v421.z.string().
|
|
3957
|
+
output: import_v421.z.string().nullish(),
|
|
3925
3958
|
error: import_v421.z.union([import_v421.z.string(), jsonValueSchema]).optional()
|
|
3926
3959
|
})
|
|
3927
3960
|
)
|
|
@@ -4156,6 +4189,11 @@ async function prepareResponsesTools({
|
|
|
4156
4189
|
value: tool.args,
|
|
4157
4190
|
schema: mcpArgsSchema
|
|
4158
4191
|
});
|
|
4192
|
+
const mapApprovalFilter = (filter) => ({
|
|
4193
|
+
tool_names: filter.toolNames
|
|
4194
|
+
});
|
|
4195
|
+
const requireApproval = args.requireApproval;
|
|
4196
|
+
const requireApprovalParam = requireApproval == null ? void 0 : typeof requireApproval === "string" ? requireApproval : requireApproval.never != null ? { never: mapApprovalFilter(requireApproval.never) } : void 0;
|
|
4159
4197
|
openaiTools.push({
|
|
4160
4198
|
type: "mcp",
|
|
4161
4199
|
server_label: args.serverLabel,
|
|
@@ -4166,7 +4204,7 @@ async function prepareResponsesTools({
|
|
|
4166
4204
|
authorization: args.authorization,
|
|
4167
4205
|
connector_id: args.connectorId,
|
|
4168
4206
|
headers: args.headers,
|
|
4169
|
-
require_approval: "never",
|
|
4207
|
+
require_approval: requireApprovalParam != null ? requireApprovalParam : "never",
|
|
4170
4208
|
server_description: args.serverDescription,
|
|
4171
4209
|
server_url: args.serverUrl
|
|
4172
4210
|
});
|
|
@@ -4208,6 +4246,21 @@ async function prepareResponsesTools({
|
|
|
4208
4246
|
}
|
|
4209
4247
|
|
|
4210
4248
|
// src/responses/openai-responses-language-model.ts
|
|
4249
|
+
function extractApprovalRequestIdToToolCallIdMapping(prompt) {
|
|
4250
|
+
var _a, _b;
|
|
4251
|
+
const mapping = {};
|
|
4252
|
+
for (const message of prompt) {
|
|
4253
|
+
if (message.role !== "assistant") continue;
|
|
4254
|
+
for (const part of message.content) {
|
|
4255
|
+
if (part.type !== "tool-call") continue;
|
|
4256
|
+
const approvalRequestId = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.approvalRequestId;
|
|
4257
|
+
if (approvalRequestId != null) {
|
|
4258
|
+
mapping[approvalRequestId] = part.toolCallId;
|
|
4259
|
+
}
|
|
4260
|
+
}
|
|
4261
|
+
}
|
|
4262
|
+
return mapping;
|
|
4263
|
+
}
|
|
4211
4264
|
var OpenAIResponsesLanguageModel = class {
|
|
4212
4265
|
constructor(modelId, config) {
|
|
4213
4266
|
this.specificationVersion = "v3";
|
|
@@ -4443,7 +4496,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4443
4496
|
};
|
|
4444
4497
|
}
|
|
4445
4498
|
async doGenerate(options) {
|
|
4446
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z;
|
|
4499
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E;
|
|
4447
4500
|
const {
|
|
4448
4501
|
args: body,
|
|
4449
4502
|
warnings,
|
|
@@ -4455,6 +4508,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4455
4508
|
modelId: this.modelId
|
|
4456
4509
|
});
|
|
4457
4510
|
const providerKey = this.config.provider.replace(".responses", "");
|
|
4511
|
+
const approvalRequestIdToDummyToolCallIdFromPrompt = extractApprovalRequestIdToToolCallIdMapping(options.prompt);
|
|
4458
4512
|
const {
|
|
4459
4513
|
responseHeaders,
|
|
4460
4514
|
value: response,
|
|
@@ -4671,17 +4725,20 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4671
4725
|
break;
|
|
4672
4726
|
}
|
|
4673
4727
|
case "mcp_call": {
|
|
4728
|
+
const toolCallId = part.approval_request_id != null ? (_v = approvalRequestIdToDummyToolCallIdFromPrompt[part.approval_request_id]) != null ? _v : part.id : part.id;
|
|
4729
|
+
const toolName = `mcp.${part.name}`;
|
|
4674
4730
|
content.push({
|
|
4675
4731
|
type: "tool-call",
|
|
4676
|
-
toolCallId
|
|
4677
|
-
toolName
|
|
4678
|
-
input:
|
|
4679
|
-
providerExecuted: true
|
|
4732
|
+
toolCallId,
|
|
4733
|
+
toolName,
|
|
4734
|
+
input: part.arguments,
|
|
4735
|
+
providerExecuted: true,
|
|
4736
|
+
dynamic: true
|
|
4680
4737
|
});
|
|
4681
4738
|
content.push({
|
|
4682
4739
|
type: "tool-result",
|
|
4683
|
-
toolCallId
|
|
4684
|
-
toolName
|
|
4740
|
+
toolCallId,
|
|
4741
|
+
toolName,
|
|
4685
4742
|
result: {
|
|
4686
4743
|
type: "call",
|
|
4687
4744
|
serverLabel: part.server_label,
|
|
@@ -4689,6 +4746,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4689
4746
|
arguments: part.arguments,
|
|
4690
4747
|
...part.output != null ? { output: part.output } : {},
|
|
4691
4748
|
...part.error != null ? { error: part.error } : {}
|
|
4749
|
+
},
|
|
4750
|
+
providerMetadata: {
|
|
4751
|
+
[providerKey]: {
|
|
4752
|
+
itemId: part.id
|
|
4753
|
+
}
|
|
4692
4754
|
}
|
|
4693
4755
|
});
|
|
4694
4756
|
break;
|
|
@@ -4697,6 +4759,22 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4697
4759
|
break;
|
|
4698
4760
|
}
|
|
4699
4761
|
case "mcp_approval_request": {
|
|
4762
|
+
const approvalRequestId = (_w = part.approval_request_id) != null ? _w : part.id;
|
|
4763
|
+
const dummyToolCallId = (_z = (_y = (_x = this.config).generateId) == null ? void 0 : _y.call(_x)) != null ? _z : (0, import_provider_utils32.generateId)();
|
|
4764
|
+
const toolName = `mcp.${part.name}`;
|
|
4765
|
+
content.push({
|
|
4766
|
+
type: "tool-call",
|
|
4767
|
+
toolCallId: dummyToolCallId,
|
|
4768
|
+
toolName,
|
|
4769
|
+
input: part.arguments,
|
|
4770
|
+
providerExecuted: true,
|
|
4771
|
+
dynamic: true
|
|
4772
|
+
});
|
|
4773
|
+
content.push({
|
|
4774
|
+
type: "tool-approval-request",
|
|
4775
|
+
approvalId: approvalRequestId,
|
|
4776
|
+
toolCallId: dummyToolCallId
|
|
4777
|
+
});
|
|
4700
4778
|
break;
|
|
4701
4779
|
}
|
|
4702
4780
|
case "computer_call": {
|
|
@@ -4732,13 +4810,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4732
4810
|
toolName: toolNameMapping.toCustomToolName("file_search"),
|
|
4733
4811
|
result: {
|
|
4734
4812
|
queries: part.queries,
|
|
4735
|
-
results: (
|
|
4813
|
+
results: (_B = (_A = part.results) == null ? void 0 : _A.map((result) => ({
|
|
4736
4814
|
attributes: result.attributes,
|
|
4737
4815
|
fileId: result.file_id,
|
|
4738
4816
|
filename: result.filename,
|
|
4739
4817
|
score: result.score,
|
|
4740
4818
|
text: result.text
|
|
4741
|
-
}))) != null ?
|
|
4819
|
+
}))) != null ? _B : null
|
|
4742
4820
|
}
|
|
4743
4821
|
});
|
|
4744
4822
|
break;
|
|
@@ -4797,10 +4875,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4797
4875
|
content,
|
|
4798
4876
|
finishReason: {
|
|
4799
4877
|
unified: mapOpenAIResponseFinishReason({
|
|
4800
|
-
finishReason: (
|
|
4878
|
+
finishReason: (_C = response.incomplete_details) == null ? void 0 : _C.reason,
|
|
4801
4879
|
hasFunctionCall
|
|
4802
4880
|
}),
|
|
4803
|
-
raw: (
|
|
4881
|
+
raw: (_E = (_D = response.incomplete_details) == null ? void 0 : _D.reason) != null ? _E : void 0
|
|
4804
4882
|
},
|
|
4805
4883
|
usage: convertOpenAIResponsesUsage(usage),
|
|
4806
4884
|
request: { body },
|
|
@@ -4842,6 +4920,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4842
4920
|
});
|
|
4843
4921
|
const self = this;
|
|
4844
4922
|
const providerKey = this.config.provider.replace(".responses", "");
|
|
4923
|
+
const approvalRequestIdToDummyToolCallIdFromPrompt = extractApprovalRequestIdToToolCallIdMapping(options.prompt);
|
|
4924
|
+
const approvalRequestIdToDummyToolCallIdFromStream = /* @__PURE__ */ new Map();
|
|
4845
4925
|
let finishReason = {
|
|
4846
4926
|
unified: "other",
|
|
4847
4927
|
raw: void 0
|
|
@@ -4861,7 +4941,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4861
4941
|
controller.enqueue({ type: "stream-start", warnings });
|
|
4862
4942
|
},
|
|
4863
4943
|
transform(chunk, controller) {
|
|
4864
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C;
|
|
4944
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H, _I, _J;
|
|
4865
4945
|
if (options.includeRawChunks) {
|
|
4866
4946
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
4867
4947
|
}
|
|
@@ -4957,13 +5037,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
4957
5037
|
providerExecuted: true
|
|
4958
5038
|
});
|
|
4959
5039
|
} else if (value.item.type === "mcp_call" || value.item.type === "mcp_list_tools" || value.item.type === "mcp_approval_request") {
|
|
4960
|
-
controller.enqueue({
|
|
4961
|
-
type: "tool-call",
|
|
4962
|
-
toolCallId: value.item.id,
|
|
4963
|
-
toolName: toolNameMapping.toCustomToolName("mcp"),
|
|
4964
|
-
input: "{}",
|
|
4965
|
-
providerExecuted: true
|
|
4966
|
-
});
|
|
4967
5040
|
} else if (value.item.type === "apply_patch_call") {
|
|
4968
5041
|
const { call_id: callId, operation } = value.item;
|
|
4969
5042
|
ongoingToolCalls[value.output_index] = {
|
|
@@ -5135,10 +5208,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5135
5208
|
});
|
|
5136
5209
|
} else if (value.item.type === "mcp_call") {
|
|
5137
5210
|
ongoingToolCalls[value.output_index] = void 0;
|
|
5211
|
+
const approvalRequestId = (_d = value.item.approval_request_id) != null ? _d : void 0;
|
|
5212
|
+
const aliasedToolCallId = approvalRequestId != null ? (_f = (_e = approvalRequestIdToDummyToolCallIdFromStream.get(
|
|
5213
|
+
approvalRequestId
|
|
5214
|
+
)) != null ? _e : approvalRequestIdToDummyToolCallIdFromPrompt[approvalRequestId]) != null ? _f : value.item.id : value.item.id;
|
|
5215
|
+
const toolName = `mcp.${value.item.name}`;
|
|
5216
|
+
controller.enqueue({
|
|
5217
|
+
type: "tool-call",
|
|
5218
|
+
toolCallId: aliasedToolCallId,
|
|
5219
|
+
toolName,
|
|
5220
|
+
input: value.item.arguments,
|
|
5221
|
+
providerExecuted: true,
|
|
5222
|
+
dynamic: true
|
|
5223
|
+
});
|
|
5138
5224
|
controller.enqueue({
|
|
5139
5225
|
type: "tool-result",
|
|
5140
|
-
toolCallId:
|
|
5141
|
-
toolName
|
|
5226
|
+
toolCallId: aliasedToolCallId,
|
|
5227
|
+
toolName,
|
|
5142
5228
|
result: {
|
|
5143
5229
|
type: "call",
|
|
5144
5230
|
serverLabel: value.item.server_label,
|
|
@@ -5146,6 +5232,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5146
5232
|
arguments: value.item.arguments,
|
|
5147
5233
|
...value.item.output != null ? { output: value.item.output } : {},
|
|
5148
5234
|
...value.item.error != null ? { error: value.item.error } : {}
|
|
5235
|
+
},
|
|
5236
|
+
providerMetadata: {
|
|
5237
|
+
[providerKey]: {
|
|
5238
|
+
itemId: value.item.id
|
|
5239
|
+
}
|
|
5149
5240
|
}
|
|
5150
5241
|
});
|
|
5151
5242
|
} else if (value.item.type === "mcp_list_tools") {
|
|
@@ -5190,6 +5281,26 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5190
5281
|
ongoingToolCalls[value.output_index] = void 0;
|
|
5191
5282
|
} else if (value.item.type === "mcp_approval_request") {
|
|
5192
5283
|
ongoingToolCalls[value.output_index] = void 0;
|
|
5284
|
+
const dummyToolCallId = (_i = (_h = (_g = self.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils32.generateId)();
|
|
5285
|
+
const approvalRequestId = (_j = value.item.approval_request_id) != null ? _j : value.item.id;
|
|
5286
|
+
approvalRequestIdToDummyToolCallIdFromStream.set(
|
|
5287
|
+
approvalRequestId,
|
|
5288
|
+
dummyToolCallId
|
|
5289
|
+
);
|
|
5290
|
+
const toolName = `mcp.${value.item.name}`;
|
|
5291
|
+
controller.enqueue({
|
|
5292
|
+
type: "tool-call",
|
|
5293
|
+
toolCallId: dummyToolCallId,
|
|
5294
|
+
toolName,
|
|
5295
|
+
input: value.item.arguments,
|
|
5296
|
+
providerExecuted: true,
|
|
5297
|
+
dynamic: true
|
|
5298
|
+
});
|
|
5299
|
+
controller.enqueue({
|
|
5300
|
+
type: "tool-approval-request",
|
|
5301
|
+
approvalId: approvalRequestId,
|
|
5302
|
+
toolCallId: dummyToolCallId
|
|
5303
|
+
});
|
|
5193
5304
|
} else if (value.item.type === "local_shell_call") {
|
|
5194
5305
|
ongoingToolCalls[value.output_index] = void 0;
|
|
5195
5306
|
controller.enqueue({
|
|
@@ -5239,7 +5350,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5239
5350
|
providerMetadata: {
|
|
5240
5351
|
[providerKey]: {
|
|
5241
5352
|
itemId: value.item.id,
|
|
5242
|
-
reasoningEncryptedContent: (
|
|
5353
|
+
reasoningEncryptedContent: (_k = value.item.encrypted_content) != null ? _k : null
|
|
5243
5354
|
}
|
|
5244
5355
|
}
|
|
5245
5356
|
});
|
|
@@ -5343,7 +5454,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5343
5454
|
id: value.item_id,
|
|
5344
5455
|
delta: value.delta
|
|
5345
5456
|
});
|
|
5346
|
-
if (((
|
|
5457
|
+
if (((_m = (_l = options.providerOptions) == null ? void 0 : _l.openai) == null ? void 0 : _m.logprobs) && value.logprobs) {
|
|
5347
5458
|
logprobs.push(value.logprobs);
|
|
5348
5459
|
}
|
|
5349
5460
|
} else if (value.type === "response.reasoning_summary_part.added") {
|
|
@@ -5370,7 +5481,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5370
5481
|
providerMetadata: {
|
|
5371
5482
|
[providerKey]: {
|
|
5372
5483
|
itemId: value.item_id,
|
|
5373
|
-
reasoningEncryptedContent: (
|
|
5484
|
+
reasoningEncryptedContent: (_o = (_n = activeReasoning[value.item_id]) == null ? void 0 : _n.encryptedContent) != null ? _o : null
|
|
5374
5485
|
}
|
|
5375
5486
|
}
|
|
5376
5487
|
});
|
|
@@ -5402,10 +5513,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5402
5513
|
} else if (isResponseFinishedChunk(value)) {
|
|
5403
5514
|
finishReason = {
|
|
5404
5515
|
unified: mapOpenAIResponseFinishReason({
|
|
5405
|
-
finishReason: (
|
|
5516
|
+
finishReason: (_p = value.response.incomplete_details) == null ? void 0 : _p.reason,
|
|
5406
5517
|
hasFunctionCall
|
|
5407
5518
|
}),
|
|
5408
|
-
raw: (
|
|
5519
|
+
raw: (_r = (_q = value.response.incomplete_details) == null ? void 0 : _q.reason) != null ? _r : void 0
|
|
5409
5520
|
};
|
|
5410
5521
|
usage = value.response.usage;
|
|
5411
5522
|
if (typeof value.response.service_tier === "string") {
|
|
@@ -5417,7 +5528,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5417
5528
|
controller.enqueue({
|
|
5418
5529
|
type: "source",
|
|
5419
5530
|
sourceType: "url",
|
|
5420
|
-
id: (
|
|
5531
|
+
id: (_u = (_t = (_s = self.config).generateId) == null ? void 0 : _t.call(_s)) != null ? _u : (0, import_provider_utils32.generateId)(),
|
|
5421
5532
|
url: value.annotation.url,
|
|
5422
5533
|
title: value.annotation.title
|
|
5423
5534
|
});
|
|
@@ -5425,10 +5536,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5425
5536
|
controller.enqueue({
|
|
5426
5537
|
type: "source",
|
|
5427
5538
|
sourceType: "document",
|
|
5428
|
-
id: (
|
|
5539
|
+
id: (_x = (_w = (_v = self.config).generateId) == null ? void 0 : _w.call(_v)) != null ? _x : (0, import_provider_utils32.generateId)(),
|
|
5429
5540
|
mediaType: "text/plain",
|
|
5430
|
-
title: (
|
|
5431
|
-
filename: (
|
|
5541
|
+
title: (_z = (_y = value.annotation.quote) != null ? _y : value.annotation.filename) != null ? _z : "Document",
|
|
5542
|
+
filename: (_A = value.annotation.filename) != null ? _A : value.annotation.file_id,
|
|
5432
5543
|
...value.annotation.file_id ? {
|
|
5433
5544
|
providerMetadata: {
|
|
5434
5545
|
[providerKey]: {
|
|
@@ -5441,10 +5552,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5441
5552
|
controller.enqueue({
|
|
5442
5553
|
type: "source",
|
|
5443
5554
|
sourceType: "document",
|
|
5444
|
-
id: (
|
|
5555
|
+
id: (_D = (_C = (_B = self.config).generateId) == null ? void 0 : _C.call(_B)) != null ? _D : (0, import_provider_utils32.generateId)(),
|
|
5445
5556
|
mediaType: "text/plain",
|
|
5446
|
-
title: (
|
|
5447
|
-
filename: (
|
|
5557
|
+
title: (_F = (_E = value.annotation.filename) != null ? _E : value.annotation.file_id) != null ? _F : "Document",
|
|
5558
|
+
filename: (_G = value.annotation.filename) != null ? _G : value.annotation.file_id,
|
|
5448
5559
|
providerMetadata: {
|
|
5449
5560
|
[providerKey]: {
|
|
5450
5561
|
fileId: value.annotation.file_id,
|
|
@@ -5457,7 +5568,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
5457
5568
|
controller.enqueue({
|
|
5458
5569
|
type: "source",
|
|
5459
5570
|
sourceType: "document",
|
|
5460
|
-
id: (
|
|
5571
|
+
id: (_J = (_I = (_H = self.config).generateId) == null ? void 0 : _I.call(_H)) != null ? _J : (0, import_provider_utils32.generateId)(),
|
|
5461
5572
|
mediaType: "application/octet-stream",
|
|
5462
5573
|
title: value.annotation.file_id,
|
|
5463
5574
|
filename: value.annotation.file_id,
|