@providerprotocol/ai 0.0.21 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.d.ts +1 -1
- package/dist/anthropic/index.js +100 -29
- package/dist/anthropic/index.js.map +1 -1
- package/dist/{chunk-Y3GBJNA2.js → chunk-7WYBJPJJ.js} +2 -2
- package/dist/chunk-I2VHCGQE.js +49 -0
- package/dist/chunk-I2VHCGQE.js.map +1 -0
- package/dist/{chunk-SKY2JLA7.js → chunk-MKDLXV4O.js} +1 -1
- package/dist/chunk-MKDLXV4O.js.map +1 -0
- package/dist/{chunk-Z7RBRCRN.js → chunk-NWS5IKNR.js} +37 -11
- package/dist/chunk-NWS5IKNR.js.map +1 -0
- package/dist/{chunk-EDENPF3E.js → chunk-RFWLEFAB.js} +96 -42
- package/dist/chunk-RFWLEFAB.js.map +1 -0
- package/dist/{chunk-Z4ILICF5.js → chunk-RS7C25LS.js} +35 -10
- package/dist/chunk-RS7C25LS.js.map +1 -0
- package/dist/google/index.d.ts +20 -6
- package/dist/google/index.js +261 -65
- package/dist/google/index.js.map +1 -1
- package/dist/http/index.d.ts +3 -3
- package/dist/http/index.js +4 -4
- package/dist/index.d.ts +7 -5
- package/dist/index.js +286 -119
- package/dist/index.js.map +1 -1
- package/dist/ollama/index.d.ts +1 -1
- package/dist/ollama/index.js +66 -12
- package/dist/ollama/index.js.map +1 -1
- package/dist/openai/index.d.ts +1 -1
- package/dist/openai/index.js +183 -43
- package/dist/openai/index.js.map +1 -1
- package/dist/openrouter/index.d.ts +1 -1
- package/dist/openrouter/index.js +161 -31
- package/dist/openrouter/index.js.map +1 -1
- package/dist/{provider-DGQHYE6I.d.ts → provider-DWEAzeM5.d.ts} +11 -1
- package/dist/proxy/index.d.ts +2 -2
- package/dist/proxy/index.js +171 -12
- package/dist/proxy/index.js.map +1 -1
- package/dist/{retry-Pcs3hnbu.d.ts → retry-DmPmqZL6.d.ts} +11 -2
- package/dist/{stream-Di9acos2.d.ts → stream-DbkLOIbJ.d.ts} +15 -5
- package/dist/xai/index.d.ts +1 -1
- package/dist/xai/index.js +139 -30
- package/dist/xai/index.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-EDENPF3E.js.map +0 -1
- package/dist/chunk-SKY2JLA7.js.map +0 -1
- package/dist/chunk-Z4ILICF5.js.map +0 -1
- package/dist/chunk-Z7RBRCRN.js.map +0 -1
- /package/dist/{chunk-Y3GBJNA2.js.map → chunk-7WYBJPJJ.js.map} +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { C as ContentBlock, l as ImageBlock, m as AudioBlock, V as VideoBlock, A as AssistantContent, U as UserContent } from './provider-
|
|
1
|
+
import { C as ContentBlock, l as ImageBlock, m as AudioBlock, V as VideoBlock, A as AssistantContent, U as UserContent } from './provider-DWEAzeM5.js';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* @fileoverview JSON Schema types for tool parameters and structured outputs.
|
|
@@ -752,7 +752,12 @@ interface Turn<TData = unknown> {
|
|
|
752
752
|
readonly messages: Message[];
|
|
753
753
|
/** The final assistant response (last AssistantMessage in the turn) */
|
|
754
754
|
readonly response: AssistantMessage;
|
|
755
|
-
/**
|
|
755
|
+
/**
|
|
756
|
+
* Tool executions that occurred during this turn.
|
|
757
|
+
*
|
|
758
|
+
* Execution order reflects completion timing, not call order.
|
|
759
|
+
* Correlate with tool calls using toolCallId.
|
|
760
|
+
*/
|
|
756
761
|
readonly toolExecutions: ToolExecution[];
|
|
757
762
|
/** Aggregate token usage for the entire turn */
|
|
758
763
|
readonly usage: TokenUsage;
|
|
@@ -767,6 +772,9 @@ interface Turn<TData = unknown> {
|
|
|
767
772
|
/**
|
|
768
773
|
* Turn serialized to JSON format.
|
|
769
774
|
* Messages are converted to MessageJSON, response is omitted (computed from messages).
|
|
775
|
+
*
|
|
776
|
+
* @remarks
|
|
777
|
+
* This type is derived from {@link Turn} and should stay in sync with it.
|
|
770
778
|
*/
|
|
771
779
|
type TurnJSON = Omit<Turn, 'messages' | 'response'> & {
|
|
772
780
|
messages: MessageJSON[];
|
|
@@ -850,7 +858,7 @@ type StreamEventType =
|
|
|
850
858
|
| 'video_delta'
|
|
851
859
|
/** Incremental tool call data (arguments being streamed) */
|
|
852
860
|
| 'tool_call_delta'
|
|
853
|
-
/** Tool execution has started */
|
|
861
|
+
/** Tool execution has started (may be emitted after completion in some implementations) */
|
|
854
862
|
| 'tool_execution_start'
|
|
855
863
|
/** Tool execution has completed */
|
|
856
864
|
| 'tool_execution_end'
|
|
@@ -938,10 +946,12 @@ interface StreamEvent {
|
|
|
938
946
|
interface StreamResult<TData = unknown> extends AsyncIterable<StreamEvent> {
|
|
939
947
|
/**
|
|
940
948
|
* Promise that resolves to the complete Turn after streaming finishes.
|
|
949
|
+
* Rejects if the stream is aborted or terminated early.
|
|
941
950
|
*/
|
|
942
951
|
readonly turn: Promise<Turn<TData>>;
|
|
943
952
|
/**
|
|
944
953
|
* Aborts the stream, stopping further events and cancelling the request.
|
|
954
|
+
* This will cause {@link StreamResult.turn} to reject.
|
|
945
955
|
*/
|
|
946
956
|
abort(): void;
|
|
947
957
|
}
|
|
@@ -950,7 +960,7 @@ interface StreamResult<TData = unknown> extends AsyncIterable<StreamEvent> {
|
|
|
950
960
|
*
|
|
951
961
|
* @typeParam TData - Type of the structured output data
|
|
952
962
|
* @param generator - Async generator that yields stream events
|
|
953
|
-
* @param
|
|
963
|
+
* @param turnPromiseOrFactory - Promise or factory that resolves to the complete Turn
|
|
954
964
|
* @param abortController - Controller for aborting the stream
|
|
955
965
|
* @returns A StreamResult that can be iterated and awaited
|
|
956
966
|
*
|
|
@@ -964,7 +974,7 @@ interface StreamResult<TData = unknown> extends AsyncIterable<StreamEvent> {
|
|
|
964
974
|
* );
|
|
965
975
|
* ```
|
|
966
976
|
*/
|
|
967
|
-
declare function createStreamResult<TData = unknown>(generator: AsyncGenerator<StreamEvent, void, unknown>,
|
|
977
|
+
declare function createStreamResult<TData = unknown>(generator: AsyncGenerator<StreamEvent, void, unknown>, turnPromiseOrFactory: Promise<Turn<TData>> | (() => Promise<Turn<TData>>), abortController: AbortController): StreamResult<TData>;
|
|
968
978
|
/**
|
|
969
979
|
* Creates a text delta stream event.
|
|
970
980
|
*
|
package/dist/xai/index.d.ts
CHANGED
package/dist/xai/index.js
CHANGED
|
@@ -1,25 +1,30 @@
|
|
|
1
1
|
import {
|
|
2
2
|
Image
|
|
3
3
|
} from "../chunk-WAKD3OO5.js";
|
|
4
|
+
import {
|
|
5
|
+
parseJsonResponse
|
|
6
|
+
} from "../chunk-I2VHCGQE.js";
|
|
4
7
|
import {
|
|
5
8
|
AssistantMessage,
|
|
6
9
|
createProvider,
|
|
10
|
+
generateId,
|
|
7
11
|
isAssistantMessage,
|
|
8
12
|
isToolResultMessage,
|
|
9
13
|
isUserMessage
|
|
10
14
|
} from "../chunk-M4BMM5IB.js";
|
|
11
15
|
import {
|
|
12
16
|
parseSSEStream
|
|
13
|
-
} from "../chunk-
|
|
17
|
+
} from "../chunk-NWS5IKNR.js";
|
|
14
18
|
import {
|
|
15
19
|
resolveApiKey
|
|
16
|
-
} from "../chunk-
|
|
20
|
+
} from "../chunk-7WYBJPJJ.js";
|
|
17
21
|
import {
|
|
18
22
|
UPPError,
|
|
19
23
|
doFetch,
|
|
20
24
|
doStreamFetch,
|
|
21
|
-
normalizeHttpError
|
|
22
|
-
|
|
25
|
+
normalizeHttpError,
|
|
26
|
+
toError
|
|
27
|
+
} from "../chunk-RFWLEFAB.js";
|
|
23
28
|
|
|
24
29
|
// src/providers/xai/transform.completions.ts
|
|
25
30
|
function transformRequest(request, modelId) {
|
|
@@ -55,9 +60,40 @@ function transformRequest(request, modelId) {
|
|
|
55
60
|
return xaiRequest;
|
|
56
61
|
}
|
|
57
62
|
function normalizeSystem(system) {
|
|
58
|
-
if (
|
|
63
|
+
if (system === void 0 || system === null) return void 0;
|
|
59
64
|
if (typeof system === "string") return system;
|
|
60
|
-
|
|
65
|
+
if (!Array.isArray(system)) {
|
|
66
|
+
throw new UPPError(
|
|
67
|
+
"System prompt must be a string or an array of text blocks",
|
|
68
|
+
"INVALID_REQUEST",
|
|
69
|
+
"xai",
|
|
70
|
+
"llm"
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
const texts = [];
|
|
74
|
+
for (const block of system) {
|
|
75
|
+
if (!block || typeof block !== "object" || !("text" in block)) {
|
|
76
|
+
throw new UPPError(
|
|
77
|
+
"System prompt array must contain objects with a text field",
|
|
78
|
+
"INVALID_REQUEST",
|
|
79
|
+
"xai",
|
|
80
|
+
"llm"
|
|
81
|
+
);
|
|
82
|
+
}
|
|
83
|
+
const textValue = block.text;
|
|
84
|
+
if (typeof textValue !== "string") {
|
|
85
|
+
throw new UPPError(
|
|
86
|
+
"System prompt text must be a string",
|
|
87
|
+
"INVALID_REQUEST",
|
|
88
|
+
"xai",
|
|
89
|
+
"llm"
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
if (textValue.length > 0) {
|
|
93
|
+
texts.push(textValue);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
return texts.length > 0 ? texts.join("\n\n") : void 0;
|
|
61
97
|
}
|
|
62
98
|
function transformMessages(messages, system) {
|
|
63
99
|
const result = [];
|
|
@@ -216,11 +252,12 @@ function transformResponse(data) {
|
|
|
216
252
|
});
|
|
217
253
|
}
|
|
218
254
|
}
|
|
255
|
+
const responseId = data.id || generateId();
|
|
219
256
|
const message = new AssistantMessage(
|
|
220
257
|
textContent,
|
|
221
258
|
toolCalls.length > 0 ? toolCalls : void 0,
|
|
222
259
|
{
|
|
223
|
-
id:
|
|
260
|
+
id: responseId,
|
|
224
261
|
metadata: {
|
|
225
262
|
xai: {
|
|
226
263
|
model: data.model,
|
|
@@ -370,11 +407,12 @@ function buildResponseFromState(state) {
|
|
|
370
407
|
arguments: args
|
|
371
408
|
});
|
|
372
409
|
}
|
|
410
|
+
const messageId = state.id || generateId();
|
|
373
411
|
const message = new AssistantMessage(
|
|
374
412
|
textContent,
|
|
375
413
|
toolCalls.length > 0 ? toolCalls : void 0,
|
|
376
414
|
{
|
|
377
|
-
id:
|
|
415
|
+
id: messageId,
|
|
378
416
|
metadata: {
|
|
379
417
|
xai: {
|
|
380
418
|
model: state.model,
|
|
@@ -479,7 +517,7 @@ function createCompletionsLLMHandler() {
|
|
|
479
517
|
"xai",
|
|
480
518
|
"llm"
|
|
481
519
|
);
|
|
482
|
-
const data = await response
|
|
520
|
+
const data = await parseJsonResponse(response, "xai", "llm");
|
|
483
521
|
return transformResponse(data);
|
|
484
522
|
},
|
|
485
523
|
stream(request) {
|
|
@@ -504,7 +542,8 @@ function createCompletionsLLMHandler() {
|
|
|
504
542
|
body.stream_options = { include_usage: true };
|
|
505
543
|
const headers = {
|
|
506
544
|
"Content-Type": "application/json",
|
|
507
|
-
Authorization: `Bearer ${apiKey}
|
|
545
|
+
Authorization: `Bearer ${apiKey}`,
|
|
546
|
+
Accept: "text/event-stream"
|
|
508
547
|
};
|
|
509
548
|
if (request.config.headers) {
|
|
510
549
|
for (const [key, value] of Object.entries(request.config.headers)) {
|
|
@@ -565,8 +604,9 @@ function createCompletionsLLMHandler() {
|
|
|
565
604
|
}
|
|
566
605
|
responseResolve(buildResponseFromState(state));
|
|
567
606
|
} catch (error) {
|
|
568
|
-
|
|
569
|
-
|
|
607
|
+
const err = toError(error);
|
|
608
|
+
responseReject(err);
|
|
609
|
+
throw err;
|
|
570
610
|
}
|
|
571
611
|
}
|
|
572
612
|
return {
|
|
@@ -622,9 +662,40 @@ function transformRequest2(request, modelId) {
|
|
|
622
662
|
return xaiRequest;
|
|
623
663
|
}
|
|
624
664
|
function normalizeSystem2(system) {
|
|
625
|
-
if (
|
|
665
|
+
if (system === void 0 || system === null) return void 0;
|
|
626
666
|
if (typeof system === "string") return system;
|
|
627
|
-
|
|
667
|
+
if (!Array.isArray(system)) {
|
|
668
|
+
throw new UPPError(
|
|
669
|
+
"System prompt must be a string or an array of text blocks",
|
|
670
|
+
"INVALID_REQUEST",
|
|
671
|
+
"xai",
|
|
672
|
+
"llm"
|
|
673
|
+
);
|
|
674
|
+
}
|
|
675
|
+
const texts = [];
|
|
676
|
+
for (const block of system) {
|
|
677
|
+
if (!block || typeof block !== "object" || !("text" in block)) {
|
|
678
|
+
throw new UPPError(
|
|
679
|
+
"System prompt array must contain objects with a text field",
|
|
680
|
+
"INVALID_REQUEST",
|
|
681
|
+
"xai",
|
|
682
|
+
"llm"
|
|
683
|
+
);
|
|
684
|
+
}
|
|
685
|
+
const textValue = block.text;
|
|
686
|
+
if (typeof textValue !== "string") {
|
|
687
|
+
throw new UPPError(
|
|
688
|
+
"System prompt text must be a string",
|
|
689
|
+
"INVALID_REQUEST",
|
|
690
|
+
"xai",
|
|
691
|
+
"llm"
|
|
692
|
+
);
|
|
693
|
+
}
|
|
694
|
+
if (textValue.length > 0) {
|
|
695
|
+
texts.push(textValue);
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
return texts.length > 0 ? texts.join("\n\n") : void 0;
|
|
628
699
|
}
|
|
629
700
|
function transformInputItems(messages, system) {
|
|
630
701
|
const result = [];
|
|
@@ -805,16 +876,17 @@ function transformResponse2(data) {
|
|
|
805
876
|
});
|
|
806
877
|
}
|
|
807
878
|
}
|
|
879
|
+
const responseId = data.id || generateId();
|
|
808
880
|
const message = new AssistantMessage(
|
|
809
881
|
textContent,
|
|
810
882
|
toolCalls.length > 0 ? toolCalls : void 0,
|
|
811
883
|
{
|
|
812
|
-
id:
|
|
884
|
+
id: responseId,
|
|
813
885
|
metadata: {
|
|
814
886
|
xai: {
|
|
815
887
|
model: data.model,
|
|
816
888
|
status: data.status,
|
|
817
|
-
response_id:
|
|
889
|
+
response_id: responseId,
|
|
818
890
|
functionCallItems: functionCallItems.length > 0 ? functionCallItems : void 0,
|
|
819
891
|
citations: data.citations,
|
|
820
892
|
inline_citations: data.inline_citations
|
|
@@ -1040,16 +1112,17 @@ function buildResponseFromState2(state) {
|
|
|
1040
1112
|
});
|
|
1041
1113
|
}
|
|
1042
1114
|
}
|
|
1115
|
+
const responseId = state.id || generateId();
|
|
1043
1116
|
const message = new AssistantMessage(
|
|
1044
1117
|
textContent,
|
|
1045
1118
|
toolCalls.length > 0 ? toolCalls : void 0,
|
|
1046
1119
|
{
|
|
1047
|
-
id:
|
|
1120
|
+
id: responseId,
|
|
1048
1121
|
metadata: {
|
|
1049
1122
|
xai: {
|
|
1050
1123
|
model: state.model,
|
|
1051
1124
|
status: state.status,
|
|
1052
|
-
response_id:
|
|
1125
|
+
response_id: responseId,
|
|
1053
1126
|
functionCallItems: functionCallItems.length > 0 ? functionCallItems : void 0
|
|
1054
1127
|
}
|
|
1055
1128
|
}
|
|
@@ -1142,7 +1215,7 @@ function createResponsesLLMHandler() {
|
|
|
1142
1215
|
"xai",
|
|
1143
1216
|
"llm"
|
|
1144
1217
|
);
|
|
1145
|
-
const data = await response
|
|
1218
|
+
const data = await parseJsonResponse(response, "xai", "llm");
|
|
1146
1219
|
if (data.status === "failed" && data.error) {
|
|
1147
1220
|
throw new UPPError(
|
|
1148
1221
|
data.error.message,
|
|
@@ -1174,7 +1247,8 @@ function createResponsesLLMHandler() {
|
|
|
1174
1247
|
body.stream = true;
|
|
1175
1248
|
const headers = {
|
|
1176
1249
|
"Content-Type": "application/json",
|
|
1177
|
-
Authorization: `Bearer ${apiKey}
|
|
1250
|
+
Authorization: `Bearer ${apiKey}`,
|
|
1251
|
+
Accept: "text/event-stream"
|
|
1178
1252
|
};
|
|
1179
1253
|
if (request.config.headers) {
|
|
1180
1254
|
for (const [key, value] of Object.entries(request.config.headers)) {
|
|
@@ -1235,8 +1309,9 @@ function createResponsesLLMHandler() {
|
|
|
1235
1309
|
}
|
|
1236
1310
|
responseResolve(buildResponseFromState2(state));
|
|
1237
1311
|
} catch (error) {
|
|
1238
|
-
|
|
1239
|
-
|
|
1312
|
+
const err = toError(error);
|
|
1313
|
+
responseReject(err);
|
|
1314
|
+
throw err;
|
|
1240
1315
|
}
|
|
1241
1316
|
}
|
|
1242
1317
|
return {
|
|
@@ -1254,9 +1329,40 @@ function createResponsesLLMHandler() {
|
|
|
1254
1329
|
|
|
1255
1330
|
// src/providers/xai/transform.messages.ts
|
|
1256
1331
|
function normalizeSystem3(system) {
|
|
1257
|
-
if (
|
|
1332
|
+
if (system === void 0 || system === null) return void 0;
|
|
1258
1333
|
if (typeof system === "string") return system;
|
|
1259
|
-
|
|
1334
|
+
if (!Array.isArray(system)) {
|
|
1335
|
+
throw new UPPError(
|
|
1336
|
+
"System prompt must be a string or an array of text blocks",
|
|
1337
|
+
"INVALID_REQUEST",
|
|
1338
|
+
"xai",
|
|
1339
|
+
"llm"
|
|
1340
|
+
);
|
|
1341
|
+
}
|
|
1342
|
+
const texts = [];
|
|
1343
|
+
for (const block of system) {
|
|
1344
|
+
if (!block || typeof block !== "object" || !("text" in block)) {
|
|
1345
|
+
throw new UPPError(
|
|
1346
|
+
"System prompt array must contain objects with a text field",
|
|
1347
|
+
"INVALID_REQUEST",
|
|
1348
|
+
"xai",
|
|
1349
|
+
"llm"
|
|
1350
|
+
);
|
|
1351
|
+
}
|
|
1352
|
+
const textValue = block.text;
|
|
1353
|
+
if (typeof textValue !== "string") {
|
|
1354
|
+
throw new UPPError(
|
|
1355
|
+
"System prompt text must be a string",
|
|
1356
|
+
"INVALID_REQUEST",
|
|
1357
|
+
"xai",
|
|
1358
|
+
"llm"
|
|
1359
|
+
);
|
|
1360
|
+
}
|
|
1361
|
+
if (textValue.length > 0) {
|
|
1362
|
+
texts.push(textValue);
|
|
1363
|
+
}
|
|
1364
|
+
}
|
|
1365
|
+
return texts.length > 0 ? texts.join("\n\n") : void 0;
|
|
1260
1366
|
}
|
|
1261
1367
|
function transformRequest3(request, modelId) {
|
|
1262
1368
|
const params = request.params ?? {};
|
|
@@ -1547,11 +1653,12 @@ function buildResponseFromState3(state) {
|
|
|
1547
1653
|
});
|
|
1548
1654
|
}
|
|
1549
1655
|
}
|
|
1656
|
+
const messageId = state.messageId || generateId();
|
|
1550
1657
|
const message = new AssistantMessage(
|
|
1551
1658
|
textContent,
|
|
1552
1659
|
toolCalls.length > 0 ? toolCalls : void 0,
|
|
1553
1660
|
{
|
|
1554
|
-
id:
|
|
1661
|
+
id: messageId,
|
|
1555
1662
|
metadata: {
|
|
1556
1663
|
xai: {
|
|
1557
1664
|
stop_reason: state.stopReason,
|
|
@@ -1639,7 +1746,7 @@ function createMessagesLLMHandler() {
|
|
|
1639
1746
|
"xai",
|
|
1640
1747
|
"llm"
|
|
1641
1748
|
);
|
|
1642
|
-
const data = await response
|
|
1749
|
+
const data = await parseJsonResponse(response, "xai", "llm");
|
|
1643
1750
|
return transformResponse3(data);
|
|
1644
1751
|
},
|
|
1645
1752
|
stream(request) {
|
|
@@ -1664,7 +1771,8 @@ function createMessagesLLMHandler() {
|
|
|
1664
1771
|
const headers = {
|
|
1665
1772
|
"Content-Type": "application/json",
|
|
1666
1773
|
"x-api-key": apiKey,
|
|
1667
|
-
"anthropic-version": "2023-06-01"
|
|
1774
|
+
"anthropic-version": "2023-06-01",
|
|
1775
|
+
Accept: "text/event-stream"
|
|
1668
1776
|
};
|
|
1669
1777
|
if (request.config.headers) {
|
|
1670
1778
|
for (const [key, value] of Object.entries(request.config.headers)) {
|
|
@@ -1721,8 +1829,9 @@ function createMessagesLLMHandler() {
|
|
|
1721
1829
|
}
|
|
1722
1830
|
responseResolve(buildResponseFromState3(state));
|
|
1723
1831
|
} catch (error) {
|
|
1724
|
-
|
|
1725
|
-
|
|
1832
|
+
const err = toError(error);
|
|
1833
|
+
responseReject(err);
|
|
1834
|
+
throw err;
|
|
1726
1835
|
}
|
|
1727
1836
|
}
|
|
1728
1837
|
return {
|
|
@@ -1813,7 +1922,7 @@ async function executeGenerate(modelId, request) {
|
|
|
1813
1922
|
body: JSON.stringify(body),
|
|
1814
1923
|
signal: request.signal
|
|
1815
1924
|
}, request.config, "xai", "image");
|
|
1816
|
-
const data = await response
|
|
1925
|
+
const data = await parseJsonResponse(response, "xai", "image");
|
|
1817
1926
|
return transformResponse4(data);
|
|
1818
1927
|
}
|
|
1819
1928
|
function transformResponse4(data) {
|