@ai-sdk/xai 1.0.5 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.d.mts +26 -20
- package/dist/index.d.ts +26 -20
- package/dist/index.js +21 -595
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -607
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/index.mjs
CHANGED
|
@@ -2,634 +2,46 @@
|
|
|
2
2
|
import {
|
|
3
3
|
NoSuchModelError
|
|
4
4
|
} from "@ai-sdk/provider";
|
|
5
|
+
import { OpenAICompatibleChatLanguageModel } from "@ai-sdk/openai-compatible";
|
|
5
6
|
import {
|
|
6
7
|
loadApiKey,
|
|
7
8
|
withoutTrailingSlash
|
|
8
9
|
} from "@ai-sdk/provider-utils";
|
|
9
|
-
|
|
10
|
-
// src/xai-chat-language-model.ts
|
|
11
|
-
import {
|
|
12
|
-
InvalidResponseDataError,
|
|
13
|
-
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
14
|
-
} from "@ai-sdk/provider";
|
|
15
|
-
import {
|
|
16
|
-
combineHeaders,
|
|
17
|
-
createEventSourceResponseHandler,
|
|
18
|
-
createJsonResponseHandler,
|
|
19
|
-
generateId,
|
|
20
|
-
isParsableJson,
|
|
21
|
-
postJsonToApi
|
|
22
|
-
} from "@ai-sdk/provider-utils";
|
|
23
|
-
import { z as z2 } from "zod";
|
|
24
|
-
|
|
25
|
-
// src/convert-to-xai-chat-messages.ts
|
|
26
|
-
import {
|
|
27
|
-
UnsupportedFunctionalityError
|
|
28
|
-
} from "@ai-sdk/provider";
|
|
29
|
-
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
30
|
-
function convertToXaiChatMessages(prompt) {
|
|
31
|
-
const messages = [];
|
|
32
|
-
for (const { role, content } of prompt) {
|
|
33
|
-
switch (role) {
|
|
34
|
-
case "system": {
|
|
35
|
-
messages.push({ role: "system", content });
|
|
36
|
-
break;
|
|
37
|
-
}
|
|
38
|
-
case "user": {
|
|
39
|
-
if (content.length === 1 && content[0].type === "text") {
|
|
40
|
-
messages.push({ role: "user", content: content[0].text });
|
|
41
|
-
break;
|
|
42
|
-
}
|
|
43
|
-
messages.push({
|
|
44
|
-
role: "user",
|
|
45
|
-
content: content.map((part) => {
|
|
46
|
-
var _a;
|
|
47
|
-
switch (part.type) {
|
|
48
|
-
case "text": {
|
|
49
|
-
return { type: "text", text: part.text };
|
|
50
|
-
}
|
|
51
|
-
case "image": {
|
|
52
|
-
return {
|
|
53
|
-
type: "image_url",
|
|
54
|
-
image_url: {
|
|
55
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
|
|
56
|
-
}
|
|
57
|
-
};
|
|
58
|
-
}
|
|
59
|
-
case "file": {
|
|
60
|
-
throw new UnsupportedFunctionalityError({
|
|
61
|
-
functionality: "File content parts in user messages"
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
})
|
|
66
|
-
});
|
|
67
|
-
break;
|
|
68
|
-
}
|
|
69
|
-
case "assistant": {
|
|
70
|
-
let text = "";
|
|
71
|
-
const toolCalls = [];
|
|
72
|
-
for (const part of content) {
|
|
73
|
-
switch (part.type) {
|
|
74
|
-
case "text": {
|
|
75
|
-
text += part.text;
|
|
76
|
-
break;
|
|
77
|
-
}
|
|
78
|
-
case "tool-call": {
|
|
79
|
-
toolCalls.push({
|
|
80
|
-
id: part.toolCallId,
|
|
81
|
-
type: "function",
|
|
82
|
-
function: {
|
|
83
|
-
name: part.toolName,
|
|
84
|
-
arguments: JSON.stringify(part.args)
|
|
85
|
-
}
|
|
86
|
-
});
|
|
87
|
-
break;
|
|
88
|
-
}
|
|
89
|
-
default: {
|
|
90
|
-
const _exhaustiveCheck = part;
|
|
91
|
-
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
messages.push({
|
|
96
|
-
role: "assistant",
|
|
97
|
-
content: text,
|
|
98
|
-
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
99
|
-
});
|
|
100
|
-
break;
|
|
101
|
-
}
|
|
102
|
-
case "tool": {
|
|
103
|
-
for (const toolResponse of content) {
|
|
104
|
-
messages.push({
|
|
105
|
-
role: "tool",
|
|
106
|
-
tool_call_id: toolResponse.toolCallId,
|
|
107
|
-
content: JSON.stringify(toolResponse.result)
|
|
108
|
-
});
|
|
109
|
-
}
|
|
110
|
-
break;
|
|
111
|
-
}
|
|
112
|
-
default: {
|
|
113
|
-
const _exhaustiveCheck = role;
|
|
114
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
return messages;
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
// src/get-response-metadata.ts
|
|
122
|
-
function getResponseMetadata({
|
|
123
|
-
id,
|
|
124
|
-
model,
|
|
125
|
-
created
|
|
126
|
-
}) {
|
|
127
|
-
return {
|
|
128
|
-
id: id != null ? id : void 0,
|
|
129
|
-
modelId: model != null ? model : void 0,
|
|
130
|
-
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
131
|
-
};
|
|
132
|
-
}
|
|
133
|
-
|
|
134
|
-
// src/xai-error.ts
|
|
135
10
|
import { z } from "zod";
|
|
136
|
-
|
|
137
|
-
var xaiErrorDataSchema = z.object({
|
|
11
|
+
var xaiErrorSchema = z.object({
|
|
138
12
|
code: z.string(),
|
|
139
13
|
error: z.string()
|
|
140
14
|
});
|
|
141
|
-
var
|
|
142
|
-
errorSchema:
|
|
15
|
+
var xaiErrorStructure = {
|
|
16
|
+
errorSchema: xaiErrorSchema,
|
|
143
17
|
errorToMessage: (data) => data.error
|
|
144
|
-
});
|
|
145
|
-
|
|
146
|
-
// src/xai-prepare-tools.ts
|
|
147
|
-
import {
|
|
148
|
-
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
149
|
-
} from "@ai-sdk/provider";
|
|
150
|
-
function prepareTools({
|
|
151
|
-
mode
|
|
152
|
-
}) {
|
|
153
|
-
var _a;
|
|
154
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
155
|
-
const toolWarnings = [];
|
|
156
|
-
if (tools == null) {
|
|
157
|
-
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
158
|
-
}
|
|
159
|
-
const toolChoice = mode.toolChoice;
|
|
160
|
-
const xaiTools = [];
|
|
161
|
-
for (const tool of tools) {
|
|
162
|
-
if (tool.type === "provider-defined") {
|
|
163
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
164
|
-
} else {
|
|
165
|
-
xaiTools.push({
|
|
166
|
-
type: "function",
|
|
167
|
-
function: {
|
|
168
|
-
name: tool.name,
|
|
169
|
-
description: tool.description,
|
|
170
|
-
parameters: tool.parameters
|
|
171
|
-
}
|
|
172
|
-
});
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
if (toolChoice == null) {
|
|
176
|
-
return { tools: xaiTools, tool_choice: void 0, toolWarnings };
|
|
177
|
-
}
|
|
178
|
-
const type = toolChoice.type;
|
|
179
|
-
switch (type) {
|
|
180
|
-
case "auto":
|
|
181
|
-
case "none":
|
|
182
|
-
case "required":
|
|
183
|
-
return { tools: xaiTools, tool_choice: type, toolWarnings };
|
|
184
|
-
case "tool":
|
|
185
|
-
return {
|
|
186
|
-
tools: xaiTools,
|
|
187
|
-
tool_choice: {
|
|
188
|
-
type: "function",
|
|
189
|
-
function: {
|
|
190
|
-
name: toolChoice.toolName
|
|
191
|
-
}
|
|
192
|
-
},
|
|
193
|
-
toolWarnings
|
|
194
|
-
};
|
|
195
|
-
default: {
|
|
196
|
-
const _exhaustiveCheck = type;
|
|
197
|
-
throw new UnsupportedFunctionalityError2({
|
|
198
|
-
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
199
|
-
});
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
// src/map-xai-finish-reason.ts
|
|
205
|
-
function mapXaiFinishReason(finishReason) {
|
|
206
|
-
switch (finishReason) {
|
|
207
|
-
case "stop":
|
|
208
|
-
return "stop";
|
|
209
|
-
case "length":
|
|
210
|
-
return "length";
|
|
211
|
-
case "content_filter":
|
|
212
|
-
return "content-filter";
|
|
213
|
-
case "function_call":
|
|
214
|
-
case "tool_calls":
|
|
215
|
-
return "tool-calls";
|
|
216
|
-
default:
|
|
217
|
-
return "unknown";
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
// src/xai-chat-language-model.ts
|
|
222
|
-
var XaiChatLanguageModel = class {
|
|
223
|
-
constructor(modelId, settings, config) {
|
|
224
|
-
this.specificationVersion = "v1";
|
|
225
|
-
this.supportsStructuredOutputs = false;
|
|
226
|
-
this.defaultObjectGenerationMode = "tool";
|
|
227
|
-
this.modelId = modelId;
|
|
228
|
-
this.settings = settings;
|
|
229
|
-
this.config = config;
|
|
230
|
-
}
|
|
231
|
-
get provider() {
|
|
232
|
-
return this.config.provider;
|
|
233
|
-
}
|
|
234
|
-
getArgs({
|
|
235
|
-
mode,
|
|
236
|
-
prompt,
|
|
237
|
-
maxTokens,
|
|
238
|
-
temperature,
|
|
239
|
-
topP,
|
|
240
|
-
topK,
|
|
241
|
-
frequencyPenalty,
|
|
242
|
-
presencePenalty,
|
|
243
|
-
stopSequences,
|
|
244
|
-
responseFormat,
|
|
245
|
-
seed,
|
|
246
|
-
stream
|
|
247
|
-
}) {
|
|
248
|
-
const type = mode.type;
|
|
249
|
-
const warnings = [];
|
|
250
|
-
if (topK != null) {
|
|
251
|
-
warnings.push({
|
|
252
|
-
type: "unsupported-setting",
|
|
253
|
-
setting: "topK"
|
|
254
|
-
});
|
|
255
|
-
}
|
|
256
|
-
if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
|
|
257
|
-
warnings.push({
|
|
258
|
-
type: "unsupported-setting",
|
|
259
|
-
setting: "responseFormat",
|
|
260
|
-
details: "JSON response format schema is not supported"
|
|
261
|
-
});
|
|
262
|
-
}
|
|
263
|
-
const baseArgs = {
|
|
264
|
-
// model id:
|
|
265
|
-
model: this.modelId,
|
|
266
|
-
// model specific settings:
|
|
267
|
-
user: this.settings.user,
|
|
268
|
-
// standardized settings:
|
|
269
|
-
max_tokens: maxTokens,
|
|
270
|
-
temperature,
|
|
271
|
-
top_p: topP,
|
|
272
|
-
frequency_penalty: frequencyPenalty,
|
|
273
|
-
presence_penalty: presencePenalty,
|
|
274
|
-
stop: stopSequences,
|
|
275
|
-
seed,
|
|
276
|
-
// response format:
|
|
277
|
-
response_format: (
|
|
278
|
-
// json object response format is not currently supported
|
|
279
|
-
void 0
|
|
280
|
-
),
|
|
281
|
-
// messages:
|
|
282
|
-
messages: convertToXaiChatMessages(prompt)
|
|
283
|
-
};
|
|
284
|
-
switch (type) {
|
|
285
|
-
case "regular": {
|
|
286
|
-
const { tools, tool_choice, toolWarnings } = prepareTools({ mode });
|
|
287
|
-
return {
|
|
288
|
-
args: {
|
|
289
|
-
...baseArgs,
|
|
290
|
-
tools,
|
|
291
|
-
tool_choice
|
|
292
|
-
},
|
|
293
|
-
warnings: [...warnings, ...toolWarnings]
|
|
294
|
-
};
|
|
295
|
-
}
|
|
296
|
-
case "object-json": {
|
|
297
|
-
throw new UnsupportedFunctionalityError3({
|
|
298
|
-
functionality: "object-json mode"
|
|
299
|
-
});
|
|
300
|
-
}
|
|
301
|
-
case "object-tool": {
|
|
302
|
-
return {
|
|
303
|
-
args: {
|
|
304
|
-
...baseArgs,
|
|
305
|
-
tool_choice: {
|
|
306
|
-
type: "function",
|
|
307
|
-
function: { name: mode.tool.name }
|
|
308
|
-
},
|
|
309
|
-
tools: [
|
|
310
|
-
{
|
|
311
|
-
type: "function",
|
|
312
|
-
function: {
|
|
313
|
-
name: mode.tool.name,
|
|
314
|
-
description: mode.tool.description,
|
|
315
|
-
parameters: mode.tool.parameters
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
]
|
|
319
|
-
},
|
|
320
|
-
warnings
|
|
321
|
-
};
|
|
322
|
-
}
|
|
323
|
-
default: {
|
|
324
|
-
const _exhaustiveCheck = type;
|
|
325
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
async doGenerate(options) {
|
|
330
|
-
var _a, _b, _c, _d, _e, _f;
|
|
331
|
-
const { args, warnings } = this.getArgs({ ...options, stream: false });
|
|
332
|
-
const body = JSON.stringify(args);
|
|
333
|
-
const { responseHeaders, value: response } = await postJsonToApi({
|
|
334
|
-
url: this.config.url({
|
|
335
|
-
path: "/chat/completions",
|
|
336
|
-
modelId: this.modelId
|
|
337
|
-
}),
|
|
338
|
-
headers: combineHeaders(this.config.headers(), options.headers),
|
|
339
|
-
body: args,
|
|
340
|
-
failedResponseHandler: xaiFailedResponseHandler,
|
|
341
|
-
successfulResponseHandler: createJsonResponseHandler(
|
|
342
|
-
xaiChatResponseSchema
|
|
343
|
-
),
|
|
344
|
-
abortSignal: options.abortSignal,
|
|
345
|
-
fetch: this.config.fetch
|
|
346
|
-
});
|
|
347
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
|
348
|
-
const choice = response.choices[0];
|
|
349
|
-
return {
|
|
350
|
-
text: (_a = choice.message.content) != null ? _a : void 0,
|
|
351
|
-
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
|
|
352
|
-
var _a2;
|
|
353
|
-
return {
|
|
354
|
-
toolCallType: "function",
|
|
355
|
-
toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
|
|
356
|
-
toolName: toolCall.function.name,
|
|
357
|
-
args: toolCall.function.arguments
|
|
358
|
-
};
|
|
359
|
-
}),
|
|
360
|
-
finishReason: mapXaiFinishReason(choice.finish_reason),
|
|
361
|
-
usage: {
|
|
362
|
-
promptTokens: (_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens) != null ? _d : NaN,
|
|
363
|
-
completionTokens: (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens) != null ? _f : NaN
|
|
364
|
-
},
|
|
365
|
-
rawCall: { rawPrompt, rawSettings },
|
|
366
|
-
rawResponse: { headers: responseHeaders },
|
|
367
|
-
response: getResponseMetadata(response),
|
|
368
|
-
warnings,
|
|
369
|
-
request: { body }
|
|
370
|
-
};
|
|
371
|
-
}
|
|
372
|
-
async doStream(options) {
|
|
373
|
-
const { args, warnings } = this.getArgs({ ...options, stream: true });
|
|
374
|
-
const body = JSON.stringify({ ...args, stream: true });
|
|
375
|
-
const { responseHeaders, value: response } = await postJsonToApi({
|
|
376
|
-
url: this.config.url({
|
|
377
|
-
path: "/chat/completions",
|
|
378
|
-
modelId: this.modelId
|
|
379
|
-
}),
|
|
380
|
-
headers: combineHeaders(this.config.headers(), options.headers),
|
|
381
|
-
body: {
|
|
382
|
-
...args,
|
|
383
|
-
stream: true
|
|
384
|
-
},
|
|
385
|
-
failedResponseHandler: xaiFailedResponseHandler,
|
|
386
|
-
successfulResponseHandler: createEventSourceResponseHandler(xaiChatChunkSchema),
|
|
387
|
-
abortSignal: options.abortSignal,
|
|
388
|
-
fetch: this.config.fetch
|
|
389
|
-
});
|
|
390
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
|
391
|
-
const toolCalls = [];
|
|
392
|
-
let finishReason = "unknown";
|
|
393
|
-
let usage = {
|
|
394
|
-
promptTokens: void 0,
|
|
395
|
-
completionTokens: void 0
|
|
396
|
-
};
|
|
397
|
-
let isFirstChunk = true;
|
|
398
|
-
let providerMetadata;
|
|
399
|
-
return {
|
|
400
|
-
stream: response.pipeThrough(
|
|
401
|
-
new TransformStream({
|
|
402
|
-
transform(chunk, controller) {
|
|
403
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
404
|
-
if (!chunk.success) {
|
|
405
|
-
finishReason = "error";
|
|
406
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
|
407
|
-
return;
|
|
408
|
-
}
|
|
409
|
-
const value = chunk.value;
|
|
410
|
-
if ("error" in value) {
|
|
411
|
-
finishReason = "error";
|
|
412
|
-
controller.enqueue({ type: "error", error: value.error });
|
|
413
|
-
return;
|
|
414
|
-
}
|
|
415
|
-
if (isFirstChunk) {
|
|
416
|
-
isFirstChunk = false;
|
|
417
|
-
controller.enqueue({
|
|
418
|
-
type: "response-metadata",
|
|
419
|
-
...getResponseMetadata(value)
|
|
420
|
-
});
|
|
421
|
-
}
|
|
422
|
-
if (value.usage != null) {
|
|
423
|
-
usage = {
|
|
424
|
-
promptTokens: (_a = value.usage.prompt_tokens) != null ? _a : void 0,
|
|
425
|
-
completionTokens: (_b = value.usage.completion_tokens) != null ? _b : void 0
|
|
426
|
-
};
|
|
427
|
-
}
|
|
428
|
-
const choice = value.choices[0];
|
|
429
|
-
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
430
|
-
finishReason = mapXaiFinishReason(choice.finish_reason);
|
|
431
|
-
}
|
|
432
|
-
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
433
|
-
return;
|
|
434
|
-
}
|
|
435
|
-
const delta = choice.delta;
|
|
436
|
-
if (delta.content != null) {
|
|
437
|
-
controller.enqueue({
|
|
438
|
-
type: "text-delta",
|
|
439
|
-
textDelta: delta.content
|
|
440
|
-
});
|
|
441
|
-
}
|
|
442
|
-
if (delta.tool_calls != null) {
|
|
443
|
-
for (const toolCallDelta of delta.tool_calls) {
|
|
444
|
-
const index = toolCallDelta.index;
|
|
445
|
-
if (toolCalls[index] == null) {
|
|
446
|
-
if (toolCallDelta.type !== "function") {
|
|
447
|
-
throw new InvalidResponseDataError({
|
|
448
|
-
data: toolCallDelta,
|
|
449
|
-
message: `Expected 'function' type.`
|
|
450
|
-
});
|
|
451
|
-
}
|
|
452
|
-
if (toolCallDelta.id == null) {
|
|
453
|
-
throw new InvalidResponseDataError({
|
|
454
|
-
data: toolCallDelta,
|
|
455
|
-
message: `Expected 'id' to be a string.`
|
|
456
|
-
});
|
|
457
|
-
}
|
|
458
|
-
if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
|
|
459
|
-
throw new InvalidResponseDataError({
|
|
460
|
-
data: toolCallDelta,
|
|
461
|
-
message: `Expected 'function.name' to be a string.`
|
|
462
|
-
});
|
|
463
|
-
}
|
|
464
|
-
toolCalls[index] = {
|
|
465
|
-
id: toolCallDelta.id,
|
|
466
|
-
type: "function",
|
|
467
|
-
function: {
|
|
468
|
-
name: toolCallDelta.function.name,
|
|
469
|
-
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
|
|
470
|
-
}
|
|
471
|
-
};
|
|
472
|
-
const toolCall2 = toolCalls[index];
|
|
473
|
-
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
|
|
474
|
-
if (toolCall2.function.arguments.length > 0) {
|
|
475
|
-
controller.enqueue({
|
|
476
|
-
type: "tool-call-delta",
|
|
477
|
-
toolCallType: "function",
|
|
478
|
-
toolCallId: toolCall2.id,
|
|
479
|
-
toolName: toolCall2.function.name,
|
|
480
|
-
argsTextDelta: toolCall2.function.arguments
|
|
481
|
-
});
|
|
482
|
-
}
|
|
483
|
-
if (isParsableJson(toolCall2.function.arguments)) {
|
|
484
|
-
controller.enqueue({
|
|
485
|
-
type: "tool-call",
|
|
486
|
-
toolCallType: "function",
|
|
487
|
-
toolCallId: (_g = toolCall2.id) != null ? _g : generateId(),
|
|
488
|
-
toolName: toolCall2.function.name,
|
|
489
|
-
args: toolCall2.function.arguments
|
|
490
|
-
});
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
|
-
continue;
|
|
494
|
-
}
|
|
495
|
-
const toolCall = toolCalls[index];
|
|
496
|
-
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
|
|
497
|
-
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
|
|
498
|
-
}
|
|
499
|
-
controller.enqueue({
|
|
500
|
-
type: "tool-call-delta",
|
|
501
|
-
toolCallType: "function",
|
|
502
|
-
toolCallId: toolCall.id,
|
|
503
|
-
toolName: toolCall.function.name,
|
|
504
|
-
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
|
|
505
|
-
});
|
|
506
|
-
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
507
|
-
controller.enqueue({
|
|
508
|
-
type: "tool-call",
|
|
509
|
-
toolCallType: "function",
|
|
510
|
-
toolCallId: (_n = toolCall.id) != null ? _n : generateId(),
|
|
511
|
-
toolName: toolCall.function.name,
|
|
512
|
-
args: toolCall.function.arguments
|
|
513
|
-
});
|
|
514
|
-
}
|
|
515
|
-
}
|
|
516
|
-
}
|
|
517
|
-
},
|
|
518
|
-
flush(controller) {
|
|
519
|
-
var _a, _b;
|
|
520
|
-
controller.enqueue({
|
|
521
|
-
type: "finish",
|
|
522
|
-
finishReason,
|
|
523
|
-
usage: {
|
|
524
|
-
promptTokens: (_a = usage.promptTokens) != null ? _a : NaN,
|
|
525
|
-
completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
|
|
526
|
-
},
|
|
527
|
-
...providerMetadata != null ? { providerMetadata } : {}
|
|
528
|
-
});
|
|
529
|
-
}
|
|
530
|
-
})
|
|
531
|
-
),
|
|
532
|
-
rawCall: { rawPrompt, rawSettings },
|
|
533
|
-
rawResponse: { headers: responseHeaders },
|
|
534
|
-
warnings,
|
|
535
|
-
request: { body }
|
|
536
|
-
};
|
|
537
|
-
}
|
|
538
18
|
};
|
|
539
|
-
var xaiChatResponseSchema = z2.object({
|
|
540
|
-
id: z2.string().nullish(),
|
|
541
|
-
created: z2.number().nullish(),
|
|
542
|
-
model: z2.string().nullish(),
|
|
543
|
-
choices: z2.array(
|
|
544
|
-
z2.object({
|
|
545
|
-
message: z2.object({
|
|
546
|
-
role: z2.literal("assistant").nullish(),
|
|
547
|
-
content: z2.string().nullish(),
|
|
548
|
-
tool_calls: z2.array(
|
|
549
|
-
z2.object({
|
|
550
|
-
id: z2.string().nullish(),
|
|
551
|
-
type: z2.literal("function"),
|
|
552
|
-
function: z2.object({
|
|
553
|
-
name: z2.string(),
|
|
554
|
-
arguments: z2.string()
|
|
555
|
-
})
|
|
556
|
-
})
|
|
557
|
-
).nullish()
|
|
558
|
-
}),
|
|
559
|
-
index: z2.number(),
|
|
560
|
-
finish_reason: z2.string().nullish()
|
|
561
|
-
})
|
|
562
|
-
),
|
|
563
|
-
usage: z2.object({
|
|
564
|
-
prompt_tokens: z2.number().nullish(),
|
|
565
|
-
completion_tokens: z2.number().nullish()
|
|
566
|
-
}).nullish()
|
|
567
|
-
});
|
|
568
|
-
var xaiChatChunkSchema = z2.union([
|
|
569
|
-
z2.object({
|
|
570
|
-
id: z2.string().nullish(),
|
|
571
|
-
created: z2.number().nullish(),
|
|
572
|
-
model: z2.string().nullish(),
|
|
573
|
-
choices: z2.array(
|
|
574
|
-
z2.object({
|
|
575
|
-
delta: z2.object({
|
|
576
|
-
role: z2.enum(["assistant"]).nullish(),
|
|
577
|
-
content: z2.string().nullish(),
|
|
578
|
-
tool_calls: z2.array(
|
|
579
|
-
z2.object({
|
|
580
|
-
index: z2.number(),
|
|
581
|
-
id: z2.string().nullish(),
|
|
582
|
-
type: z2.literal("function").optional(),
|
|
583
|
-
function: z2.object({
|
|
584
|
-
name: z2.string().nullish(),
|
|
585
|
-
arguments: z2.string().nullish()
|
|
586
|
-
})
|
|
587
|
-
})
|
|
588
|
-
).nullish()
|
|
589
|
-
}).nullish(),
|
|
590
|
-
finish_reason: z2.string().nullable().optional(),
|
|
591
|
-
index: z2.number()
|
|
592
|
-
})
|
|
593
|
-
),
|
|
594
|
-
usage: z2.object({
|
|
595
|
-
prompt_tokens: z2.number().nullish(),
|
|
596
|
-
completion_tokens: z2.number().nullish()
|
|
597
|
-
}).nullish()
|
|
598
|
-
}),
|
|
599
|
-
xaiErrorDataSchema
|
|
600
|
-
]);
|
|
601
|
-
|
|
602
|
-
// src/xai-provider.ts
|
|
603
19
|
function createXai(options = {}) {
|
|
604
20
|
var _a;
|
|
605
|
-
const baseURL =
|
|
21
|
+
const baseURL = withoutTrailingSlash(
|
|
22
|
+
(_a = options.baseURL) != null ? _a : "https://api.x.ai/v1"
|
|
23
|
+
);
|
|
606
24
|
const getHeaders = () => ({
|
|
607
25
|
Authorization: `Bearer ${loadApiKey({
|
|
608
26
|
apiKey: options.apiKey,
|
|
609
27
|
environmentVariableName: "XAI_API_KEY",
|
|
610
|
-
description: "xAI"
|
|
28
|
+
description: "xAI API key"
|
|
611
29
|
})}`,
|
|
612
30
|
...options.headers
|
|
613
31
|
});
|
|
614
|
-
const
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
"The xAI model function cannot be called with the new keyword."
|
|
624
|
-
);
|
|
625
|
-
}
|
|
626
|
-
return createChatModel(modelId, settings);
|
|
627
|
-
};
|
|
628
|
-
const provider = function(modelId, settings) {
|
|
629
|
-
return createLanguageModel(modelId, settings);
|
|
32
|
+
const createLanguageModel = (modelId, settings = {}) => {
|
|
33
|
+
return new OpenAICompatibleChatLanguageModel(modelId, settings, {
|
|
34
|
+
provider: "xai.chat",
|
|
35
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
36
|
+
headers: getHeaders,
|
|
37
|
+
fetch: options.fetch,
|
|
38
|
+
defaultObjectGenerationMode: "tool",
|
|
39
|
+
errorStructure: xaiErrorStructure
|
|
40
|
+
});
|
|
630
41
|
};
|
|
42
|
+
const provider = (modelId, settings) => createLanguageModel(modelId, settings);
|
|
631
43
|
provider.languageModel = createLanguageModel;
|
|
632
|
-
provider.chat =
|
|
44
|
+
provider.chat = createLanguageModel;
|
|
633
45
|
provider.textEmbeddingModel = (modelId) => {
|
|
634
46
|
throw new NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
|
635
47
|
};
|