@ai-sdk/xai 0.0.0-64aae7dd-20260114144918 → 0.0.0-98261322-20260122142521
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +64 -5
- package/dist/index.js +1 -1
- package/dist/index.mjs +1 -1
- package/docs/01-xai.mdx +697 -0
- package/package.json +11 -6
- package/src/convert-to-xai-chat-messages.test.ts +243 -0
- package/src/convert-to-xai-chat-messages.ts +142 -0
- package/src/convert-xai-chat-usage.test.ts +240 -0
- package/src/convert-xai-chat-usage.ts +23 -0
- package/src/get-response-metadata.ts +19 -0
- package/src/index.ts +14 -0
- package/src/map-xai-finish-reason.ts +19 -0
- package/src/responses/__fixtures__/xai-code-execution-tool.1.json +68 -0
- package/src/responses/__fixtures__/xai-text-streaming.1.chunks.txt +698 -0
- package/src/responses/__fixtures__/xai-text-with-reasoning-streaming-store-false.1.chunks.txt +655 -0
- package/src/responses/__fixtures__/xai-text-with-reasoning-streaming.1.chunks.txt +679 -0
- package/src/responses/__fixtures__/xai-web-search-tool.1.chunks.txt +274 -0
- package/src/responses/__fixtures__/xai-web-search-tool.1.json +90 -0
- package/src/responses/__fixtures__/xai-x-search-tool.1.json +149 -0
- package/src/responses/__fixtures__/xai-x-search-tool.chunks.txt +1757 -0
- package/src/responses/__snapshots__/xai-responses-language-model.test.ts.snap +21929 -0
- package/src/responses/convert-to-xai-responses-input.test.ts +463 -0
- package/src/responses/convert-to-xai-responses-input.ts +206 -0
- package/src/responses/convert-xai-responses-usage.ts +24 -0
- package/src/responses/map-xai-responses-finish-reason.ts +20 -0
- package/src/responses/xai-responses-api.ts +393 -0
- package/src/responses/xai-responses-language-model.test.ts +1803 -0
- package/src/responses/xai-responses-language-model.ts +732 -0
- package/src/responses/xai-responses-options.ts +34 -0
- package/src/responses/xai-responses-prepare-tools.test.ts +497 -0
- package/src/responses/xai-responses-prepare-tools.ts +226 -0
- package/src/tool/code-execution.ts +17 -0
- package/src/tool/index.ts +15 -0
- package/src/tool/view-image.ts +20 -0
- package/src/tool/view-x-video.ts +18 -0
- package/src/tool/web-search.ts +56 -0
- package/src/tool/x-search.ts +63 -0
- package/src/version.ts +6 -0
- package/src/xai-chat-language-model.test.ts +1805 -0
- package/src/xai-chat-language-model.ts +681 -0
- package/src/xai-chat-options.ts +131 -0
- package/src/xai-chat-prompt.ts +44 -0
- package/src/xai-error.ts +19 -0
- package/src/xai-image-settings.ts +1 -0
- package/src/xai-prepare-tools.ts +95 -0
- package/src/xai-provider.test.ts +167 -0
- package/src/xai-provider.ts +162 -0
|
@@ -0,0 +1,681 @@
|
|
|
1
|
+
import {
|
|
2
|
+
APICallError,
|
|
3
|
+
LanguageModelV3,
|
|
4
|
+
LanguageModelV3CallOptions,
|
|
5
|
+
LanguageModelV3Content,
|
|
6
|
+
LanguageModelV3FinishReason,
|
|
7
|
+
LanguageModelV3GenerateResult,
|
|
8
|
+
LanguageModelV3StreamPart,
|
|
9
|
+
LanguageModelV3StreamResult,
|
|
10
|
+
LanguageModelV3Usage,
|
|
11
|
+
SharedV3Warning,
|
|
12
|
+
} from '@ai-sdk/provider';
|
|
13
|
+
import {
|
|
14
|
+
combineHeaders,
|
|
15
|
+
createEventSourceResponseHandler,
|
|
16
|
+
createJsonResponseHandler,
|
|
17
|
+
extractResponseHeaders,
|
|
18
|
+
FetchFunction,
|
|
19
|
+
parseProviderOptions,
|
|
20
|
+
ParseResult,
|
|
21
|
+
postJsonToApi,
|
|
22
|
+
safeParseJSON,
|
|
23
|
+
} from '@ai-sdk/provider-utils';
|
|
24
|
+
import { z } from 'zod/v4';
|
|
25
|
+
import { convertToXaiChatMessages } from './convert-to-xai-chat-messages';
|
|
26
|
+
import { convertXaiChatUsage } from './convert-xai-chat-usage';
|
|
27
|
+
import { getResponseMetadata } from './get-response-metadata';
|
|
28
|
+
import { mapXaiFinishReason } from './map-xai-finish-reason';
|
|
29
|
+
import { XaiChatModelId, xaiProviderOptions } from './xai-chat-options';
|
|
30
|
+
import { xaiFailedResponseHandler } from './xai-error';
|
|
31
|
+
import { prepareTools } from './xai-prepare-tools';
|
|
32
|
+
|
|
33
|
+
type XaiChatConfig = {
|
|
34
|
+
provider: string;
|
|
35
|
+
baseURL: string | undefined;
|
|
36
|
+
headers: () => Record<string, string | undefined>;
|
|
37
|
+
generateId: () => string;
|
|
38
|
+
fetch?: FetchFunction;
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
export class XaiChatLanguageModel implements LanguageModelV3 {
|
|
42
|
+
readonly specificationVersion = 'v3';
|
|
43
|
+
|
|
44
|
+
readonly modelId: XaiChatModelId;
|
|
45
|
+
|
|
46
|
+
private readonly config: XaiChatConfig;
|
|
47
|
+
|
|
48
|
+
constructor(modelId: XaiChatModelId, config: XaiChatConfig) {
|
|
49
|
+
this.modelId = modelId;
|
|
50
|
+
this.config = config;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
get provider(): string {
|
|
54
|
+
return this.config.provider;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
readonly supportedUrls: Record<string, RegExp[]> = {
|
|
58
|
+
'image/*': [/^https?:\/\/.*$/],
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
private async getArgs({
|
|
62
|
+
prompt,
|
|
63
|
+
maxOutputTokens,
|
|
64
|
+
temperature,
|
|
65
|
+
topP,
|
|
66
|
+
topK,
|
|
67
|
+
frequencyPenalty,
|
|
68
|
+
presencePenalty,
|
|
69
|
+
stopSequences,
|
|
70
|
+
seed,
|
|
71
|
+
responseFormat,
|
|
72
|
+
providerOptions,
|
|
73
|
+
tools,
|
|
74
|
+
toolChoice,
|
|
75
|
+
}: LanguageModelV3CallOptions) {
|
|
76
|
+
const warnings: SharedV3Warning[] = [];
|
|
77
|
+
|
|
78
|
+
// parse xai-specific provider options
|
|
79
|
+
const options =
|
|
80
|
+
(await parseProviderOptions({
|
|
81
|
+
provider: 'xai',
|
|
82
|
+
providerOptions,
|
|
83
|
+
schema: xaiProviderOptions,
|
|
84
|
+
})) ?? {};
|
|
85
|
+
|
|
86
|
+
// check for unsupported parameters
|
|
87
|
+
if (topK != null) {
|
|
88
|
+
warnings.push({ type: 'unsupported', feature: 'topK' });
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (frequencyPenalty != null) {
|
|
92
|
+
warnings.push({ type: 'unsupported', feature: 'frequencyPenalty' });
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (presencePenalty != null) {
|
|
96
|
+
warnings.push({ type: 'unsupported', feature: 'presencePenalty' });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (stopSequences != null) {
|
|
100
|
+
warnings.push({ type: 'unsupported', feature: 'stopSequences' });
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// convert ai sdk messages to xai format
|
|
104
|
+
const { messages, warnings: messageWarnings } =
|
|
105
|
+
convertToXaiChatMessages(prompt);
|
|
106
|
+
warnings.push(...messageWarnings);
|
|
107
|
+
|
|
108
|
+
// prepare tools for xai
|
|
109
|
+
const {
|
|
110
|
+
tools: xaiTools,
|
|
111
|
+
toolChoice: xaiToolChoice,
|
|
112
|
+
toolWarnings,
|
|
113
|
+
} = prepareTools({
|
|
114
|
+
tools,
|
|
115
|
+
toolChoice,
|
|
116
|
+
});
|
|
117
|
+
warnings.push(...toolWarnings);
|
|
118
|
+
|
|
119
|
+
const baseArgs = {
|
|
120
|
+
// model id
|
|
121
|
+
model: this.modelId,
|
|
122
|
+
|
|
123
|
+
// standard generation settings
|
|
124
|
+
max_completion_tokens: maxOutputTokens,
|
|
125
|
+
temperature,
|
|
126
|
+
top_p: topP,
|
|
127
|
+
seed,
|
|
128
|
+
reasoning_effort: options.reasoningEffort,
|
|
129
|
+
|
|
130
|
+
// parallel function calling
|
|
131
|
+
parallel_function_calling: options.parallel_function_calling,
|
|
132
|
+
|
|
133
|
+
// response format
|
|
134
|
+
response_format:
|
|
135
|
+
responseFormat?.type === 'json'
|
|
136
|
+
? responseFormat.schema != null
|
|
137
|
+
? {
|
|
138
|
+
type: 'json_schema',
|
|
139
|
+
json_schema: {
|
|
140
|
+
name: responseFormat.name ?? 'response',
|
|
141
|
+
schema: responseFormat.schema,
|
|
142
|
+
strict: true,
|
|
143
|
+
},
|
|
144
|
+
}
|
|
145
|
+
: { type: 'json_object' }
|
|
146
|
+
: undefined,
|
|
147
|
+
|
|
148
|
+
// search parameters
|
|
149
|
+
search_parameters: options.searchParameters
|
|
150
|
+
? {
|
|
151
|
+
mode: options.searchParameters.mode,
|
|
152
|
+
return_citations: options.searchParameters.returnCitations,
|
|
153
|
+
from_date: options.searchParameters.fromDate,
|
|
154
|
+
to_date: options.searchParameters.toDate,
|
|
155
|
+
max_search_results: options.searchParameters.maxSearchResults,
|
|
156
|
+
sources: options.searchParameters.sources?.map(source => ({
|
|
157
|
+
type: source.type,
|
|
158
|
+
...(source.type === 'web' && {
|
|
159
|
+
country: source.country,
|
|
160
|
+
excluded_websites: source.excludedWebsites,
|
|
161
|
+
allowed_websites: source.allowedWebsites,
|
|
162
|
+
safe_search: source.safeSearch,
|
|
163
|
+
}),
|
|
164
|
+
...(source.type === 'x' && {
|
|
165
|
+
excluded_x_handles: source.excludedXHandles,
|
|
166
|
+
included_x_handles: source.includedXHandles ?? source.xHandles,
|
|
167
|
+
post_favorite_count: source.postFavoriteCount,
|
|
168
|
+
post_view_count: source.postViewCount,
|
|
169
|
+
}),
|
|
170
|
+
...(source.type === 'news' && {
|
|
171
|
+
country: source.country,
|
|
172
|
+
excluded_websites: source.excludedWebsites,
|
|
173
|
+
safe_search: source.safeSearch,
|
|
174
|
+
}),
|
|
175
|
+
...(source.type === 'rss' && {
|
|
176
|
+
links: source.links,
|
|
177
|
+
}),
|
|
178
|
+
})),
|
|
179
|
+
}
|
|
180
|
+
: undefined,
|
|
181
|
+
|
|
182
|
+
// messages in xai format
|
|
183
|
+
messages,
|
|
184
|
+
|
|
185
|
+
// tools in xai format
|
|
186
|
+
tools: xaiTools,
|
|
187
|
+
tool_choice: xaiToolChoice,
|
|
188
|
+
};
|
|
189
|
+
|
|
190
|
+
return {
|
|
191
|
+
args: baseArgs,
|
|
192
|
+
warnings,
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
async doGenerate(
|
|
197
|
+
options: LanguageModelV3CallOptions,
|
|
198
|
+
): Promise<LanguageModelV3GenerateResult> {
|
|
199
|
+
const { args: body, warnings } = await this.getArgs(options);
|
|
200
|
+
|
|
201
|
+
const url = `${this.config.baseURL ?? 'https://api.x.ai/v1'}/chat/completions`;
|
|
202
|
+
|
|
203
|
+
const {
|
|
204
|
+
responseHeaders,
|
|
205
|
+
value: response,
|
|
206
|
+
rawValue: rawResponse,
|
|
207
|
+
} = await postJsonToApi({
|
|
208
|
+
url,
|
|
209
|
+
headers: combineHeaders(this.config.headers(), options.headers),
|
|
210
|
+
body,
|
|
211
|
+
failedResponseHandler: xaiFailedResponseHandler,
|
|
212
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
213
|
+
xaiChatResponseSchema,
|
|
214
|
+
),
|
|
215
|
+
abortSignal: options.abortSignal,
|
|
216
|
+
fetch: this.config.fetch,
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
if (response.error != null) {
|
|
220
|
+
throw new APICallError({
|
|
221
|
+
message: response.error,
|
|
222
|
+
url,
|
|
223
|
+
requestBodyValues: body,
|
|
224
|
+
statusCode: 200,
|
|
225
|
+
responseHeaders,
|
|
226
|
+
responseBody: JSON.stringify(rawResponse),
|
|
227
|
+
isRetryable: response.code === 'The service is currently unavailable',
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
const choice = response.choices![0];
|
|
232
|
+
const content: Array<LanguageModelV3Content> = [];
|
|
233
|
+
|
|
234
|
+
// extract text content
|
|
235
|
+
if (choice.message.content != null && choice.message.content.length > 0) {
|
|
236
|
+
let text = choice.message.content;
|
|
237
|
+
|
|
238
|
+
// skip if this content duplicates the last assistant message
|
|
239
|
+
const lastMessage = body.messages[body.messages.length - 1];
|
|
240
|
+
if (lastMessage?.role === 'assistant' && text === lastMessage.content) {
|
|
241
|
+
text = '';
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (text.length > 0) {
|
|
245
|
+
content.push({ type: 'text', text });
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// extract reasoning content
|
|
250
|
+
if (
|
|
251
|
+
choice.message.reasoning_content != null &&
|
|
252
|
+
choice.message.reasoning_content.length > 0
|
|
253
|
+
) {
|
|
254
|
+
content.push({
|
|
255
|
+
type: 'reasoning',
|
|
256
|
+
text: choice.message.reasoning_content,
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// extract tool calls
|
|
261
|
+
if (choice.message.tool_calls != null) {
|
|
262
|
+
for (const toolCall of choice.message.tool_calls) {
|
|
263
|
+
content.push({
|
|
264
|
+
type: 'tool-call',
|
|
265
|
+
toolCallId: toolCall.id,
|
|
266
|
+
toolName: toolCall.function.name,
|
|
267
|
+
input: toolCall.function.arguments,
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// extract citations
|
|
273
|
+
if (response.citations != null) {
|
|
274
|
+
for (const url of response.citations) {
|
|
275
|
+
content.push({
|
|
276
|
+
type: 'source',
|
|
277
|
+
sourceType: 'url',
|
|
278
|
+
id: this.config.generateId(),
|
|
279
|
+
url,
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
return {
|
|
285
|
+
content,
|
|
286
|
+
finishReason: {
|
|
287
|
+
unified: mapXaiFinishReason(choice.finish_reason),
|
|
288
|
+
raw: choice.finish_reason ?? undefined,
|
|
289
|
+
},
|
|
290
|
+
usage: convertXaiChatUsage(response.usage!), // defined when there is no error
|
|
291
|
+
request: { body },
|
|
292
|
+
response: {
|
|
293
|
+
...getResponseMetadata(response),
|
|
294
|
+
headers: responseHeaders,
|
|
295
|
+
body: rawResponse,
|
|
296
|
+
},
|
|
297
|
+
warnings,
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
async doStream(
|
|
302
|
+
options: LanguageModelV3CallOptions,
|
|
303
|
+
): Promise<LanguageModelV3StreamResult> {
|
|
304
|
+
const { args, warnings } = await this.getArgs(options);
|
|
305
|
+
const body = {
|
|
306
|
+
...args,
|
|
307
|
+
stream: true,
|
|
308
|
+
stream_options: {
|
|
309
|
+
include_usage: true,
|
|
310
|
+
},
|
|
311
|
+
};
|
|
312
|
+
|
|
313
|
+
const url = `${this.config.baseURL ?? 'https://api.x.ai/v1'}/chat/completions`;
|
|
314
|
+
|
|
315
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
316
|
+
url,
|
|
317
|
+
headers: combineHeaders(this.config.headers(), options.headers),
|
|
318
|
+
body,
|
|
319
|
+
failedResponseHandler: xaiFailedResponseHandler,
|
|
320
|
+
successfulResponseHandler: async ({ response }) => {
|
|
321
|
+
const responseHeaders = extractResponseHeaders(response);
|
|
322
|
+
const contentType = response.headers.get('content-type');
|
|
323
|
+
|
|
324
|
+
if (contentType?.includes('application/json')) {
|
|
325
|
+
const responseBody = await response.text();
|
|
326
|
+
const parsedError = await safeParseJSON({
|
|
327
|
+
text: responseBody,
|
|
328
|
+
schema: xaiStreamErrorSchema,
|
|
329
|
+
});
|
|
330
|
+
|
|
331
|
+
if (parsedError.success) {
|
|
332
|
+
throw new APICallError({
|
|
333
|
+
message: parsedError.value.error,
|
|
334
|
+
url,
|
|
335
|
+
requestBodyValues: body,
|
|
336
|
+
statusCode: 200,
|
|
337
|
+
responseHeaders,
|
|
338
|
+
responseBody,
|
|
339
|
+
isRetryable:
|
|
340
|
+
parsedError.value.code ===
|
|
341
|
+
'The service is currently unavailable',
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
throw new APICallError({
|
|
346
|
+
message: 'Invalid JSON response',
|
|
347
|
+
url,
|
|
348
|
+
requestBodyValues: body,
|
|
349
|
+
statusCode: 200,
|
|
350
|
+
responseHeaders,
|
|
351
|
+
responseBody,
|
|
352
|
+
});
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
return createEventSourceResponseHandler(xaiChatChunkSchema)({
|
|
356
|
+
response,
|
|
357
|
+
url,
|
|
358
|
+
requestBodyValues: body,
|
|
359
|
+
});
|
|
360
|
+
},
|
|
361
|
+
abortSignal: options.abortSignal,
|
|
362
|
+
fetch: this.config.fetch,
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
let finishReason: LanguageModelV3FinishReason = {
|
|
366
|
+
unified: 'other',
|
|
367
|
+
raw: undefined,
|
|
368
|
+
};
|
|
369
|
+
let usage: LanguageModelV3Usage | undefined = undefined;
|
|
370
|
+
let isFirstChunk = true;
|
|
371
|
+
const contentBlocks: Record<
|
|
372
|
+
string,
|
|
373
|
+
{ type: 'text' | 'reasoning'; ended: boolean }
|
|
374
|
+
> = {};
|
|
375
|
+
const lastReasoningDeltas: Record<string, string> = {};
|
|
376
|
+
let activeReasoningBlockId: string | undefined = undefined;
|
|
377
|
+
|
|
378
|
+
const self = this;
|
|
379
|
+
|
|
380
|
+
return {
|
|
381
|
+
stream: response.pipeThrough(
|
|
382
|
+
new TransformStream<
|
|
383
|
+
ParseResult<z.infer<typeof xaiChatChunkSchema>>,
|
|
384
|
+
LanguageModelV3StreamPart
|
|
385
|
+
>({
|
|
386
|
+
start(controller) {
|
|
387
|
+
controller.enqueue({ type: 'stream-start', warnings });
|
|
388
|
+
},
|
|
389
|
+
|
|
390
|
+
transform(chunk, controller) {
|
|
391
|
+
// Emit raw chunk if requested (before anything else)
|
|
392
|
+
if (options.includeRawChunks) {
|
|
393
|
+
controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
if (!chunk.success) {
|
|
397
|
+
controller.enqueue({ type: 'error', error: chunk.error });
|
|
398
|
+
return;
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
const value = chunk.value;
|
|
402
|
+
|
|
403
|
+
// emit response metadata on first chunk
|
|
404
|
+
if (isFirstChunk) {
|
|
405
|
+
controller.enqueue({
|
|
406
|
+
type: 'response-metadata',
|
|
407
|
+
...getResponseMetadata(value),
|
|
408
|
+
});
|
|
409
|
+
isFirstChunk = false;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// emit citations if present (they come in the last chunk according to docs)
|
|
413
|
+
if (value.citations != null) {
|
|
414
|
+
for (const url of value.citations) {
|
|
415
|
+
controller.enqueue({
|
|
416
|
+
type: 'source',
|
|
417
|
+
sourceType: 'url',
|
|
418
|
+
id: self.config.generateId(),
|
|
419
|
+
url,
|
|
420
|
+
});
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// update usage if present
|
|
425
|
+
if (value.usage != null) {
|
|
426
|
+
usage = convertXaiChatUsage(value.usage);
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
const choice = value.choices[0];
|
|
430
|
+
|
|
431
|
+
// update finish reason if present
|
|
432
|
+
if (choice?.finish_reason != null) {
|
|
433
|
+
finishReason = {
|
|
434
|
+
unified: mapXaiFinishReason(choice.finish_reason),
|
|
435
|
+
raw: choice.finish_reason,
|
|
436
|
+
};
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
// exit if no delta to process
|
|
440
|
+
if (choice?.delta == null) {
|
|
441
|
+
return;
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
const delta = choice.delta;
|
|
445
|
+
const choiceIndex = choice.index;
|
|
446
|
+
|
|
447
|
+
// process text content
|
|
448
|
+
if (delta.content != null && delta.content.length > 0) {
|
|
449
|
+
const textContent = delta.content;
|
|
450
|
+
|
|
451
|
+
// end active reasoning block when text content arrives
|
|
452
|
+
if (
|
|
453
|
+
activeReasoningBlockId != null &&
|
|
454
|
+
!contentBlocks[activeReasoningBlockId].ended
|
|
455
|
+
) {
|
|
456
|
+
controller.enqueue({
|
|
457
|
+
type: 'reasoning-end',
|
|
458
|
+
id: activeReasoningBlockId,
|
|
459
|
+
});
|
|
460
|
+
contentBlocks[activeReasoningBlockId].ended = true;
|
|
461
|
+
activeReasoningBlockId = undefined;
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
// skip if this content duplicates the last assistant message
|
|
465
|
+
const lastMessage = body.messages[body.messages.length - 1];
|
|
466
|
+
if (
|
|
467
|
+
lastMessage?.role === 'assistant' &&
|
|
468
|
+
textContent === lastMessage.content
|
|
469
|
+
) {
|
|
470
|
+
return;
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
const blockId = `text-${value.id || choiceIndex}`;
|
|
474
|
+
|
|
475
|
+
if (contentBlocks[blockId] == null) {
|
|
476
|
+
contentBlocks[blockId] = { type: 'text', ended: false };
|
|
477
|
+
controller.enqueue({
|
|
478
|
+
type: 'text-start',
|
|
479
|
+
id: blockId,
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
controller.enqueue({
|
|
484
|
+
type: 'text-delta',
|
|
485
|
+
id: blockId,
|
|
486
|
+
delta: textContent,
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// process reasoning content
|
|
491
|
+
if (
|
|
492
|
+
delta.reasoning_content != null &&
|
|
493
|
+
delta.reasoning_content.length > 0
|
|
494
|
+
) {
|
|
495
|
+
const blockId = `reasoning-${value.id || choiceIndex}`;
|
|
496
|
+
|
|
497
|
+
// skip if this reasoning content duplicates the last delta
|
|
498
|
+
if (lastReasoningDeltas[blockId] === delta.reasoning_content) {
|
|
499
|
+
return;
|
|
500
|
+
}
|
|
501
|
+
lastReasoningDeltas[blockId] = delta.reasoning_content;
|
|
502
|
+
|
|
503
|
+
if (contentBlocks[blockId] == null) {
|
|
504
|
+
contentBlocks[blockId] = { type: 'reasoning', ended: false };
|
|
505
|
+
activeReasoningBlockId = blockId;
|
|
506
|
+
controller.enqueue({
|
|
507
|
+
type: 'reasoning-start',
|
|
508
|
+
id: blockId,
|
|
509
|
+
});
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
controller.enqueue({
|
|
513
|
+
type: 'reasoning-delta',
|
|
514
|
+
id: blockId,
|
|
515
|
+
delta: delta.reasoning_content,
|
|
516
|
+
});
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
// process tool calls
|
|
520
|
+
if (delta.tool_calls != null) {
|
|
521
|
+
// end active reasoning block before tool calls start
|
|
522
|
+
if (
|
|
523
|
+
activeReasoningBlockId != null &&
|
|
524
|
+
!contentBlocks[activeReasoningBlockId].ended
|
|
525
|
+
) {
|
|
526
|
+
controller.enqueue({
|
|
527
|
+
type: 'reasoning-end',
|
|
528
|
+
id: activeReasoningBlockId,
|
|
529
|
+
});
|
|
530
|
+
contentBlocks[activeReasoningBlockId].ended = true;
|
|
531
|
+
activeReasoningBlockId = undefined;
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
for (const toolCall of delta.tool_calls) {
|
|
535
|
+
// xai tool calls come in one piece (like mistral)
|
|
536
|
+
const toolCallId = toolCall.id;
|
|
537
|
+
|
|
538
|
+
controller.enqueue({
|
|
539
|
+
type: 'tool-input-start',
|
|
540
|
+
id: toolCallId,
|
|
541
|
+
toolName: toolCall.function.name,
|
|
542
|
+
});
|
|
543
|
+
|
|
544
|
+
controller.enqueue({
|
|
545
|
+
type: 'tool-input-delta',
|
|
546
|
+
id: toolCallId,
|
|
547
|
+
delta: toolCall.function.arguments,
|
|
548
|
+
});
|
|
549
|
+
|
|
550
|
+
controller.enqueue({
|
|
551
|
+
type: 'tool-input-end',
|
|
552
|
+
id: toolCallId,
|
|
553
|
+
});
|
|
554
|
+
|
|
555
|
+
controller.enqueue({
|
|
556
|
+
type: 'tool-call',
|
|
557
|
+
toolCallId,
|
|
558
|
+
toolName: toolCall.function.name,
|
|
559
|
+
input: toolCall.function.arguments,
|
|
560
|
+
});
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
},
|
|
564
|
+
|
|
565
|
+
flush(controller) {
|
|
566
|
+
// end any blocks that haven't been ended yet
|
|
567
|
+
for (const [blockId, block] of Object.entries(contentBlocks)) {
|
|
568
|
+
if (!block.ended) {
|
|
569
|
+
controller.enqueue({
|
|
570
|
+
type: block.type === 'text' ? 'text-end' : 'reasoning-end',
|
|
571
|
+
id: blockId,
|
|
572
|
+
});
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
controller.enqueue({ type: 'finish', finishReason, usage: usage! });
|
|
577
|
+
},
|
|
578
|
+
}),
|
|
579
|
+
),
|
|
580
|
+
request: { body },
|
|
581
|
+
response: { headers: responseHeaders },
|
|
582
|
+
};
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
// XAI API Response Schemas
|
|
587
|
+
const xaiUsageSchema = z.object({
|
|
588
|
+
prompt_tokens: z.number(),
|
|
589
|
+
completion_tokens: z.number(),
|
|
590
|
+
total_tokens: z.number(),
|
|
591
|
+
prompt_tokens_details: z
|
|
592
|
+
.object({
|
|
593
|
+
text_tokens: z.number().nullish(),
|
|
594
|
+
audio_tokens: z.number().nullish(),
|
|
595
|
+
image_tokens: z.number().nullish(),
|
|
596
|
+
cached_tokens: z.number().nullish(),
|
|
597
|
+
})
|
|
598
|
+
.nullish(),
|
|
599
|
+
completion_tokens_details: z
|
|
600
|
+
.object({
|
|
601
|
+
reasoning_tokens: z.number().nullish(),
|
|
602
|
+
audio_tokens: z.number().nullish(),
|
|
603
|
+
accepted_prediction_tokens: z.number().nullish(),
|
|
604
|
+
rejected_prediction_tokens: z.number().nullish(),
|
|
605
|
+
})
|
|
606
|
+
.nullish(),
|
|
607
|
+
});
|
|
608
|
+
|
|
609
|
+
export type XaiChatUsage = z.infer<typeof xaiUsageSchema>;
|
|
610
|
+
|
|
611
|
+
const xaiChatResponseSchema = z.object({
|
|
612
|
+
id: z.string().nullish(),
|
|
613
|
+
created: z.number().nullish(),
|
|
614
|
+
model: z.string().nullish(),
|
|
615
|
+
choices: z
|
|
616
|
+
.array(
|
|
617
|
+
z.object({
|
|
618
|
+
message: z.object({
|
|
619
|
+
role: z.literal('assistant'),
|
|
620
|
+
content: z.string().nullish(),
|
|
621
|
+
reasoning_content: z.string().nullish(),
|
|
622
|
+
tool_calls: z
|
|
623
|
+
.array(
|
|
624
|
+
z.object({
|
|
625
|
+
id: z.string(),
|
|
626
|
+
type: z.literal('function'),
|
|
627
|
+
function: z.object({
|
|
628
|
+
name: z.string(),
|
|
629
|
+
arguments: z.string(),
|
|
630
|
+
}),
|
|
631
|
+
}),
|
|
632
|
+
)
|
|
633
|
+
.nullish(),
|
|
634
|
+
}),
|
|
635
|
+
index: z.number(),
|
|
636
|
+
finish_reason: z.string().nullish(),
|
|
637
|
+
}),
|
|
638
|
+
)
|
|
639
|
+
.nullish(),
|
|
640
|
+
object: z.literal('chat.completion').nullish(),
|
|
641
|
+
usage: xaiUsageSchema.nullish(),
|
|
642
|
+
citations: z.array(z.string().url()).nullish(),
|
|
643
|
+
code: z.string().nullish(),
|
|
644
|
+
error: z.string().nullish(),
|
|
645
|
+
});
|
|
646
|
+
|
|
647
|
+
const xaiChatChunkSchema = z.object({
|
|
648
|
+
id: z.string().nullish(),
|
|
649
|
+
created: z.number().nullish(),
|
|
650
|
+
model: z.string().nullish(),
|
|
651
|
+
choices: z.array(
|
|
652
|
+
z.object({
|
|
653
|
+
delta: z.object({
|
|
654
|
+
role: z.enum(['assistant']).optional(),
|
|
655
|
+
content: z.string().nullish(),
|
|
656
|
+
reasoning_content: z.string().nullish(),
|
|
657
|
+
tool_calls: z
|
|
658
|
+
.array(
|
|
659
|
+
z.object({
|
|
660
|
+
id: z.string(),
|
|
661
|
+
type: z.literal('function'),
|
|
662
|
+
function: z.object({
|
|
663
|
+
name: z.string(),
|
|
664
|
+
arguments: z.string(),
|
|
665
|
+
}),
|
|
666
|
+
}),
|
|
667
|
+
)
|
|
668
|
+
.nullish(),
|
|
669
|
+
}),
|
|
670
|
+
finish_reason: z.string().nullish(),
|
|
671
|
+
index: z.number(),
|
|
672
|
+
}),
|
|
673
|
+
),
|
|
674
|
+
usage: xaiUsageSchema.nullish(),
|
|
675
|
+
citations: z.array(z.string().url()).nullish(),
|
|
676
|
+
});
|
|
677
|
+
|
|
678
|
+
const xaiStreamErrorSchema = z.object({
|
|
679
|
+
code: z.string(),
|
|
680
|
+
error: z.string(),
|
|
681
|
+
});
|