@ai-sdk/openai 0.0.67 → 0.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +181 -118
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +177 -112
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +1 -1
- package/internal/dist/index.d.ts +1 -1
- package/internal/dist/index.js +181 -118
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +177 -112
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -4,7 +4,7 @@ import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
|
|
|
4
4
|
// src/openai-chat-language-model.ts
|
|
5
5
|
import {
|
|
6
6
|
InvalidResponseDataError,
|
|
7
|
-
UnsupportedFunctionalityError as
|
|
7
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
8
8
|
} from "@ai-sdk/provider";
|
|
9
9
|
import {
|
|
10
10
|
combineHeaders,
|
|
@@ -56,9 +56,31 @@ function convertToOpenAIChatMessages({
|
|
|
56
56
|
};
|
|
57
57
|
}
|
|
58
58
|
case "file": {
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
59
|
+
if (part.data instanceof URL) {
|
|
60
|
+
throw new UnsupportedFunctionalityError({
|
|
61
|
+
functionality: "'File content parts with URL data' functionality not supported."
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
switch (part.mimeType) {
|
|
65
|
+
case "audio/wav": {
|
|
66
|
+
return {
|
|
67
|
+
type: "input_audio",
|
|
68
|
+
input_audio: { data: part.data, format: "wav" }
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
case "audio/mp3":
|
|
72
|
+
case "audio/mpeg": {
|
|
73
|
+
return {
|
|
74
|
+
type: "input_audio",
|
|
75
|
+
input_audio: { data: part.data, format: "mp3" }
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
default: {
|
|
79
|
+
throw new UnsupportedFunctionalityError({
|
|
80
|
+
functionality: `File content part type ${part.mimeType} in user messages`
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
}
|
|
62
84
|
}
|
|
63
85
|
}
|
|
64
86
|
})
|
|
@@ -200,6 +222,107 @@ function getResponseMetadata({
|
|
|
200
222
|
};
|
|
201
223
|
}
|
|
202
224
|
|
|
225
|
+
// src/openai-prepare-tools.ts
|
|
226
|
+
import {
|
|
227
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
228
|
+
} from "@ai-sdk/provider";
|
|
229
|
+
function prepareTools({
|
|
230
|
+
mode,
|
|
231
|
+
useLegacyFunctionCalling = false,
|
|
232
|
+
structuredOutputs = false
|
|
233
|
+
}) {
|
|
234
|
+
var _a;
|
|
235
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
236
|
+
const toolWarnings = [];
|
|
237
|
+
if (tools == null) {
|
|
238
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
239
|
+
}
|
|
240
|
+
const toolChoice = mode.toolChoice;
|
|
241
|
+
if (useLegacyFunctionCalling) {
|
|
242
|
+
const openaiFunctions = [];
|
|
243
|
+
for (const tool of tools) {
|
|
244
|
+
if (tool.type === "provider-defined") {
|
|
245
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
246
|
+
} else {
|
|
247
|
+
openaiFunctions.push({
|
|
248
|
+
name: tool.name,
|
|
249
|
+
description: tool.description,
|
|
250
|
+
parameters: tool.parameters
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
if (toolChoice == null) {
|
|
255
|
+
return {
|
|
256
|
+
functions: openaiFunctions,
|
|
257
|
+
function_call: void 0,
|
|
258
|
+
toolWarnings
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
const type2 = toolChoice.type;
|
|
262
|
+
switch (type2) {
|
|
263
|
+
case "auto":
|
|
264
|
+
case "none":
|
|
265
|
+
case void 0:
|
|
266
|
+
return {
|
|
267
|
+
functions: openaiFunctions,
|
|
268
|
+
function_call: void 0,
|
|
269
|
+
toolWarnings
|
|
270
|
+
};
|
|
271
|
+
case "required":
|
|
272
|
+
throw new UnsupportedFunctionalityError2({
|
|
273
|
+
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
274
|
+
});
|
|
275
|
+
default:
|
|
276
|
+
return {
|
|
277
|
+
functions: openaiFunctions,
|
|
278
|
+
function_call: { name: toolChoice.toolName },
|
|
279
|
+
toolWarnings
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
const openaiTools = [];
|
|
284
|
+
for (const tool of tools) {
|
|
285
|
+
if (tool.type === "provider-defined") {
|
|
286
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
287
|
+
} else {
|
|
288
|
+
openaiTools.push({
|
|
289
|
+
type: "function",
|
|
290
|
+
function: {
|
|
291
|
+
name: tool.name,
|
|
292
|
+
description: tool.description,
|
|
293
|
+
parameters: tool.parameters,
|
|
294
|
+
strict: structuredOutputs === true ? true : void 0
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
if (toolChoice == null) {
|
|
300
|
+
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
301
|
+
}
|
|
302
|
+
const type = toolChoice.type;
|
|
303
|
+
switch (type) {
|
|
304
|
+
case "auto":
|
|
305
|
+
case "none":
|
|
306
|
+
case "required":
|
|
307
|
+
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
308
|
+
case "tool":
|
|
309
|
+
return {
|
|
310
|
+
tools: openaiTools,
|
|
311
|
+
tool_choice: {
|
|
312
|
+
type: "function",
|
|
313
|
+
function: {
|
|
314
|
+
name: toolChoice.toolName
|
|
315
|
+
}
|
|
316
|
+
},
|
|
317
|
+
toolWarnings
|
|
318
|
+
};
|
|
319
|
+
default: {
|
|
320
|
+
const _exhaustiveCheck = type;
|
|
321
|
+
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
203
326
|
// src/openai-chat-language-model.ts
|
|
204
327
|
var OpenAIChatLanguageModel = class {
|
|
205
328
|
constructor(modelId, settings, config) {
|
|
@@ -212,6 +335,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
212
335
|
return this.settings.structuredOutputs === true;
|
|
213
336
|
}
|
|
214
337
|
get defaultObjectGenerationMode() {
|
|
338
|
+
if (isAudioModel(this.modelId)) {
|
|
339
|
+
return "tool";
|
|
340
|
+
}
|
|
215
341
|
return this.supportsStructuredOutputs ? "json" : "tool";
|
|
216
342
|
}
|
|
217
343
|
get provider() {
|
|
@@ -252,12 +378,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
252
378
|
}
|
|
253
379
|
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
254
380
|
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
255
|
-
throw new
|
|
381
|
+
throw new UnsupportedFunctionalityError3({
|
|
256
382
|
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
257
383
|
});
|
|
258
384
|
}
|
|
259
385
|
if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {
|
|
260
|
-
throw new
|
|
386
|
+
throw new UnsupportedFunctionalityError3({
|
|
261
387
|
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
262
388
|
});
|
|
263
389
|
}
|
|
@@ -298,16 +424,20 @@ var OpenAIChatLanguageModel = class {
|
|
|
298
424
|
}
|
|
299
425
|
switch (type) {
|
|
300
426
|
case "regular": {
|
|
427
|
+
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
428
|
+
mode,
|
|
429
|
+
useLegacyFunctionCalling,
|
|
430
|
+
structuredOutputs: this.settings.structuredOutputs
|
|
431
|
+
});
|
|
301
432
|
return {
|
|
302
433
|
args: {
|
|
303
434
|
...baseArgs,
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
})
|
|
435
|
+
tools,
|
|
436
|
+
tool_choice,
|
|
437
|
+
functions,
|
|
438
|
+
function_call
|
|
309
439
|
},
|
|
310
|
-
warnings
|
|
440
|
+
warnings: [...warnings, ...toolWarnings]
|
|
311
441
|
};
|
|
312
442
|
}
|
|
313
443
|
case "object-json": {
|
|
@@ -370,14 +500,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
370
500
|
}
|
|
371
501
|
async doGenerate(options) {
|
|
372
502
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
373
|
-
const { args, warnings } = this.getArgs(options);
|
|
503
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
374
504
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
375
505
|
url: this.config.url({
|
|
376
506
|
path: "/chat/completions",
|
|
377
507
|
modelId: this.modelId
|
|
378
508
|
}),
|
|
379
509
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
380
|
-
body
|
|
510
|
+
body,
|
|
381
511
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
382
512
|
successfulResponseHandler: createJsonResponseHandler(
|
|
383
513
|
openAIChatResponseSchema
|
|
@@ -385,7 +515,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
385
515
|
abortSignal: options.abortSignal,
|
|
386
516
|
fetch: this.config.fetch
|
|
387
517
|
});
|
|
388
|
-
const { messages: rawPrompt, ...rawSettings } =
|
|
518
|
+
const { messages: rawPrompt, ...rawSettings } = body;
|
|
389
519
|
const choice = response.choices[0];
|
|
390
520
|
let providerMetadata;
|
|
391
521
|
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
|
|
@@ -422,6 +552,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
422
552
|
},
|
|
423
553
|
rawCall: { rawPrompt, rawSettings },
|
|
424
554
|
rawResponse: { headers: responseHeaders },
|
|
555
|
+
request: { body: JSON.stringify(body) },
|
|
425
556
|
response: getResponseMetadata(response),
|
|
426
557
|
warnings,
|
|
427
558
|
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
@@ -466,18 +597,19 @@ var OpenAIChatLanguageModel = class {
|
|
|
466
597
|
};
|
|
467
598
|
}
|
|
468
599
|
const { args, warnings } = this.getArgs(options);
|
|
600
|
+
const body = {
|
|
601
|
+
...args,
|
|
602
|
+
stream: true,
|
|
603
|
+
// only include stream_options when in strict compatibility mode:
|
|
604
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
605
|
+
};
|
|
469
606
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
470
607
|
url: this.config.url({
|
|
471
608
|
path: "/chat/completions",
|
|
472
609
|
modelId: this.modelId
|
|
473
610
|
}),
|
|
474
611
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
475
|
-
body
|
|
476
|
-
...args,
|
|
477
|
-
stream: true,
|
|
478
|
-
// only include stream_options when in strict compatibility mode:
|
|
479
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
480
|
-
},
|
|
612
|
+
body,
|
|
481
613
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
482
614
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
483
615
|
openaiChatChunkSchema
|
|
@@ -654,6 +786,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
654
786
|
),
|
|
655
787
|
rawCall: { rawPrompt, rawSettings },
|
|
656
788
|
rawResponse: { headers: responseHeaders },
|
|
789
|
+
request: { body: JSON.stringify(body) },
|
|
657
790
|
warnings
|
|
658
791
|
};
|
|
659
792
|
}
|
|
@@ -760,87 +893,16 @@ var openaiChatChunkSchema = z2.union([
|
|
|
760
893
|
}),
|
|
761
894
|
openAIErrorDataSchema
|
|
762
895
|
]);
|
|
763
|
-
function prepareToolsAndToolChoice({
|
|
764
|
-
mode,
|
|
765
|
-
useLegacyFunctionCalling = false,
|
|
766
|
-
structuredOutputs = false
|
|
767
|
-
}) {
|
|
768
|
-
var _a;
|
|
769
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
770
|
-
if (tools == null) {
|
|
771
|
-
return { tools: void 0, tool_choice: void 0 };
|
|
772
|
-
}
|
|
773
|
-
const toolChoice = mode.toolChoice;
|
|
774
|
-
if (useLegacyFunctionCalling) {
|
|
775
|
-
const mappedFunctions = tools.map((tool) => ({
|
|
776
|
-
name: tool.name,
|
|
777
|
-
description: tool.description,
|
|
778
|
-
parameters: tool.parameters
|
|
779
|
-
}));
|
|
780
|
-
if (toolChoice == null) {
|
|
781
|
-
return { functions: mappedFunctions, function_call: void 0 };
|
|
782
|
-
}
|
|
783
|
-
const type2 = toolChoice.type;
|
|
784
|
-
switch (type2) {
|
|
785
|
-
case "auto":
|
|
786
|
-
case "none":
|
|
787
|
-
case void 0:
|
|
788
|
-
return {
|
|
789
|
-
functions: mappedFunctions,
|
|
790
|
-
function_call: void 0
|
|
791
|
-
};
|
|
792
|
-
case "required":
|
|
793
|
-
throw new UnsupportedFunctionalityError2({
|
|
794
|
-
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
795
|
-
});
|
|
796
|
-
default:
|
|
797
|
-
return {
|
|
798
|
-
functions: mappedFunctions,
|
|
799
|
-
function_call: { name: toolChoice.toolName }
|
|
800
|
-
};
|
|
801
|
-
}
|
|
802
|
-
}
|
|
803
|
-
const mappedTools = tools.map((tool) => ({
|
|
804
|
-
type: "function",
|
|
805
|
-
function: {
|
|
806
|
-
name: tool.name,
|
|
807
|
-
description: tool.description,
|
|
808
|
-
parameters: tool.parameters,
|
|
809
|
-
strict: structuredOutputs === true ? true : void 0
|
|
810
|
-
}
|
|
811
|
-
}));
|
|
812
|
-
if (toolChoice == null) {
|
|
813
|
-
return { tools: mappedTools, tool_choice: void 0 };
|
|
814
|
-
}
|
|
815
|
-
const type = toolChoice.type;
|
|
816
|
-
switch (type) {
|
|
817
|
-
case "auto":
|
|
818
|
-
case "none":
|
|
819
|
-
case "required":
|
|
820
|
-
return { tools: mappedTools, tool_choice: type };
|
|
821
|
-
case "tool":
|
|
822
|
-
return {
|
|
823
|
-
tools: mappedTools,
|
|
824
|
-
tool_choice: {
|
|
825
|
-
type: "function",
|
|
826
|
-
function: {
|
|
827
|
-
name: toolChoice.toolName
|
|
828
|
-
}
|
|
829
|
-
}
|
|
830
|
-
};
|
|
831
|
-
default: {
|
|
832
|
-
const _exhaustiveCheck = type;
|
|
833
|
-
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
834
|
-
}
|
|
835
|
-
}
|
|
836
|
-
}
|
|
837
896
|
function isReasoningModel(modelId) {
|
|
838
897
|
return modelId.startsWith("o1-");
|
|
839
898
|
}
|
|
899
|
+
function isAudioModel(modelId) {
|
|
900
|
+
return modelId.startsWith("gpt-4o-audio-preview");
|
|
901
|
+
}
|
|
840
902
|
|
|
841
903
|
// src/openai-completion-language-model.ts
|
|
842
904
|
import {
|
|
843
|
-
UnsupportedFunctionalityError as
|
|
905
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
844
906
|
} from "@ai-sdk/provider";
|
|
845
907
|
import {
|
|
846
908
|
combineHeaders as combineHeaders2,
|
|
@@ -853,7 +915,7 @@ import { z as z3 } from "zod";
|
|
|
853
915
|
// src/convert-to-openai-completion-prompt.ts
|
|
854
916
|
import {
|
|
855
917
|
InvalidPromptError,
|
|
856
|
-
UnsupportedFunctionalityError as
|
|
918
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
857
919
|
} from "@ai-sdk/provider";
|
|
858
920
|
function convertToOpenAICompletionPrompt({
|
|
859
921
|
prompt,
|
|
@@ -886,7 +948,7 @@ function convertToOpenAICompletionPrompt({
|
|
|
886
948
|
return part.text;
|
|
887
949
|
}
|
|
888
950
|
case "image": {
|
|
889
|
-
throw new
|
|
951
|
+
throw new UnsupportedFunctionalityError4({
|
|
890
952
|
functionality: "images"
|
|
891
953
|
});
|
|
892
954
|
}
|
|
@@ -905,7 +967,7 @@ ${userMessage}
|
|
|
905
967
|
return part.text;
|
|
906
968
|
}
|
|
907
969
|
case "tool-call": {
|
|
908
|
-
throw new
|
|
970
|
+
throw new UnsupportedFunctionalityError4({
|
|
909
971
|
functionality: "tool-call messages"
|
|
910
972
|
});
|
|
911
973
|
}
|
|
@@ -918,7 +980,7 @@ ${assistantMessage}
|
|
|
918
980
|
break;
|
|
919
981
|
}
|
|
920
982
|
case "tool": {
|
|
921
|
-
throw new
|
|
983
|
+
throw new UnsupportedFunctionalityError4({
|
|
922
984
|
functionality: "tool messages"
|
|
923
985
|
});
|
|
924
986
|
}
|
|
@@ -1019,24 +1081,24 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1019
1081
|
switch (type) {
|
|
1020
1082
|
case "regular": {
|
|
1021
1083
|
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1022
|
-
throw new
|
|
1084
|
+
throw new UnsupportedFunctionalityError5({
|
|
1023
1085
|
functionality: "tools"
|
|
1024
1086
|
});
|
|
1025
1087
|
}
|
|
1026
1088
|
if (mode.toolChoice) {
|
|
1027
|
-
throw new
|
|
1089
|
+
throw new UnsupportedFunctionalityError5({
|
|
1028
1090
|
functionality: "toolChoice"
|
|
1029
1091
|
});
|
|
1030
1092
|
}
|
|
1031
1093
|
return { args: baseArgs, warnings };
|
|
1032
1094
|
}
|
|
1033
1095
|
case "object-json": {
|
|
1034
|
-
throw new
|
|
1096
|
+
throw new UnsupportedFunctionalityError5({
|
|
1035
1097
|
functionality: "object-json mode"
|
|
1036
1098
|
});
|
|
1037
1099
|
}
|
|
1038
1100
|
case "object-tool": {
|
|
1039
|
-
throw new
|
|
1101
|
+
throw new UnsupportedFunctionalityError5({
|
|
1040
1102
|
functionality: "object-tool mode"
|
|
1041
1103
|
});
|
|
1042
1104
|
}
|
|
@@ -1075,23 +1137,25 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1075
1137
|
rawCall: { rawPrompt, rawSettings },
|
|
1076
1138
|
rawResponse: { headers: responseHeaders },
|
|
1077
1139
|
response: getResponseMetadata(response),
|
|
1078
|
-
warnings
|
|
1140
|
+
warnings,
|
|
1141
|
+
request: { body: JSON.stringify(args) }
|
|
1079
1142
|
};
|
|
1080
1143
|
}
|
|
1081
1144
|
async doStream(options) {
|
|
1082
1145
|
const { args, warnings } = this.getArgs(options);
|
|
1146
|
+
const body = {
|
|
1147
|
+
...args,
|
|
1148
|
+
stream: true,
|
|
1149
|
+
// only include stream_options when in strict compatibility mode:
|
|
1150
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1151
|
+
};
|
|
1083
1152
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1084
1153
|
url: this.config.url({
|
|
1085
1154
|
path: "/completions",
|
|
1086
1155
|
modelId: this.modelId
|
|
1087
1156
|
}),
|
|
1088
1157
|
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1089
|
-
body
|
|
1090
|
-
...args,
|
|
1091
|
-
stream: true,
|
|
1092
|
-
// only include stream_options when in strict compatibility mode:
|
|
1093
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1094
|
-
},
|
|
1158
|
+
body,
|
|
1095
1159
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1096
1160
|
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1097
1161
|
openaiCompletionChunkSchema
|
|
@@ -1165,7 +1229,8 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1165
1229
|
),
|
|
1166
1230
|
rawCall: { rawPrompt, rawSettings },
|
|
1167
1231
|
rawResponse: { headers: responseHeaders },
|
|
1168
|
-
warnings
|
|
1232
|
+
warnings,
|
|
1233
|
+
request: { body: JSON.stringify(body) }
|
|
1169
1234
|
};
|
|
1170
1235
|
}
|
|
1171
1236
|
};
|