@ai-sdk/xai 1.0.6 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -26,620 +26,46 @@ __export(src_exports, {
26
26
  module.exports = __toCommonJS(src_exports);
27
27
 
28
28
  // src/xai-provider.ts
29
- var import_provider4 = require("@ai-sdk/provider");
30
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
31
-
32
- // src/xai-chat-language-model.ts
33
- var import_provider3 = require("@ai-sdk/provider");
34
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
35
- var import_zod2 = require("zod");
36
-
37
- // src/convert-to-xai-chat-messages.ts
38
29
  var import_provider = require("@ai-sdk/provider");
30
+ var import_openai_compatible = require("@ai-sdk/openai-compatible");
39
31
  var import_provider_utils = require("@ai-sdk/provider-utils");
40
- function convertToXaiChatMessages(prompt) {
41
- const messages = [];
42
- for (const { role, content } of prompt) {
43
- switch (role) {
44
- case "system": {
45
- messages.push({ role: "system", content });
46
- break;
47
- }
48
- case "user": {
49
- if (content.length === 1 && content[0].type === "text") {
50
- messages.push({ role: "user", content: content[0].text });
51
- break;
52
- }
53
- messages.push({
54
- role: "user",
55
- content: content.map((part) => {
56
- var _a;
57
- switch (part.type) {
58
- case "text": {
59
- return { type: "text", text: part.text };
60
- }
61
- case "image": {
62
- return {
63
- type: "image_url",
64
- image_url: {
65
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`
66
- }
67
- };
68
- }
69
- case "file": {
70
- throw new import_provider.UnsupportedFunctionalityError({
71
- functionality: "File content parts in user messages"
72
- });
73
- }
74
- }
75
- })
76
- });
77
- break;
78
- }
79
- case "assistant": {
80
- let text = "";
81
- const toolCalls = [];
82
- for (const part of content) {
83
- switch (part.type) {
84
- case "text": {
85
- text += part.text;
86
- break;
87
- }
88
- case "tool-call": {
89
- toolCalls.push({
90
- id: part.toolCallId,
91
- type: "function",
92
- function: {
93
- name: part.toolName,
94
- arguments: JSON.stringify(part.args)
95
- }
96
- });
97
- break;
98
- }
99
- default: {
100
- const _exhaustiveCheck = part;
101
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
102
- }
103
- }
104
- }
105
- messages.push({
106
- role: "assistant",
107
- content: text,
108
- tool_calls: toolCalls.length > 0 ? toolCalls : void 0
109
- });
110
- break;
111
- }
112
- case "tool": {
113
- for (const toolResponse of content) {
114
- messages.push({
115
- role: "tool",
116
- tool_call_id: toolResponse.toolCallId,
117
- content: JSON.stringify(toolResponse.result)
118
- });
119
- }
120
- break;
121
- }
122
- default: {
123
- const _exhaustiveCheck = role;
124
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
125
- }
126
- }
127
- }
128
- return messages;
129
- }
130
-
131
- // src/get-response-metadata.ts
132
- function getResponseMetadata({
133
- id,
134
- model,
135
- created
136
- }) {
137
- return {
138
- id: id != null ? id : void 0,
139
- modelId: model != null ? model : void 0,
140
- timestamp: created != null ? new Date(created * 1e3) : void 0
141
- };
142
- }
143
-
144
- // src/xai-error.ts
145
32
  var import_zod = require("zod");
146
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
147
- var xaiErrorDataSchema = import_zod.z.object({
33
+ var xaiErrorSchema = import_zod.z.object({
148
34
  code: import_zod.z.string(),
149
35
  error: import_zod.z.string()
150
36
  });
151
- var xaiFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
152
- errorSchema: xaiErrorDataSchema,
37
+ var xaiErrorStructure = {
38
+ errorSchema: xaiErrorSchema,
153
39
  errorToMessage: (data) => data.error
154
- });
155
-
156
- // src/xai-prepare-tools.ts
157
- var import_provider2 = require("@ai-sdk/provider");
158
- function prepareTools({
159
- mode
160
- }) {
161
- var _a;
162
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
163
- const toolWarnings = [];
164
- if (tools == null) {
165
- return { tools: void 0, tool_choice: void 0, toolWarnings };
166
- }
167
- const toolChoice = mode.toolChoice;
168
- const xaiTools = [];
169
- for (const tool of tools) {
170
- if (tool.type === "provider-defined") {
171
- toolWarnings.push({ type: "unsupported-tool", tool });
172
- } else {
173
- xaiTools.push({
174
- type: "function",
175
- function: {
176
- name: tool.name,
177
- description: tool.description,
178
- parameters: tool.parameters
179
- }
180
- });
181
- }
182
- }
183
- if (toolChoice == null) {
184
- return { tools: xaiTools, tool_choice: void 0, toolWarnings };
185
- }
186
- const type = toolChoice.type;
187
- switch (type) {
188
- case "auto":
189
- case "none":
190
- case "required":
191
- return { tools: xaiTools, tool_choice: type, toolWarnings };
192
- case "tool":
193
- return {
194
- tools: xaiTools,
195
- tool_choice: {
196
- type: "function",
197
- function: {
198
- name: toolChoice.toolName
199
- }
200
- },
201
- toolWarnings
202
- };
203
- default: {
204
- const _exhaustiveCheck = type;
205
- throw new import_provider2.UnsupportedFunctionalityError({
206
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
207
- });
208
- }
209
- }
210
- }
211
-
212
- // src/map-xai-finish-reason.ts
213
- function mapXaiFinishReason(finishReason) {
214
- switch (finishReason) {
215
- case "stop":
216
- return "stop";
217
- case "length":
218
- return "length";
219
- case "content_filter":
220
- return "content-filter";
221
- case "function_call":
222
- case "tool_calls":
223
- return "tool-calls";
224
- default:
225
- return "unknown";
226
- }
227
- }
228
-
229
- // src/xai-chat-language-model.ts
230
- var XaiChatLanguageModel = class {
231
- constructor(modelId, settings, config) {
232
- this.specificationVersion = "v1";
233
- this.supportsStructuredOutputs = false;
234
- this.defaultObjectGenerationMode = "tool";
235
- this.modelId = modelId;
236
- this.settings = settings;
237
- this.config = config;
238
- }
239
- get provider() {
240
- return this.config.provider;
241
- }
242
- getArgs({
243
- mode,
244
- prompt,
245
- maxTokens,
246
- temperature,
247
- topP,
248
- topK,
249
- frequencyPenalty,
250
- presencePenalty,
251
- stopSequences,
252
- responseFormat,
253
- seed,
254
- stream
255
- }) {
256
- const type = mode.type;
257
- const warnings = [];
258
- if (topK != null) {
259
- warnings.push({
260
- type: "unsupported-setting",
261
- setting: "topK"
262
- });
263
- }
264
- if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
265
- warnings.push({
266
- type: "unsupported-setting",
267
- setting: "responseFormat",
268
- details: "JSON response format schema is not supported"
269
- });
270
- }
271
- const baseArgs = {
272
- // model id:
273
- model: this.modelId,
274
- // model specific settings:
275
- user: this.settings.user,
276
- // standardized settings:
277
- max_tokens: maxTokens,
278
- temperature,
279
- top_p: topP,
280
- frequency_penalty: frequencyPenalty,
281
- presence_penalty: presencePenalty,
282
- stop: stopSequences,
283
- seed,
284
- // response format:
285
- response_format: (
286
- // json object response format is not currently supported
287
- void 0
288
- ),
289
- // messages:
290
- messages: convertToXaiChatMessages(prompt)
291
- };
292
- switch (type) {
293
- case "regular": {
294
- const { tools, tool_choice, toolWarnings } = prepareTools({ mode });
295
- return {
296
- args: {
297
- ...baseArgs,
298
- tools,
299
- tool_choice
300
- },
301
- warnings: [...warnings, ...toolWarnings]
302
- };
303
- }
304
- case "object-json": {
305
- throw new import_provider3.UnsupportedFunctionalityError({
306
- functionality: "object-json mode"
307
- });
308
- }
309
- case "object-tool": {
310
- return {
311
- args: {
312
- ...baseArgs,
313
- tool_choice: {
314
- type: "function",
315
- function: { name: mode.tool.name }
316
- },
317
- tools: [
318
- {
319
- type: "function",
320
- function: {
321
- name: mode.tool.name,
322
- description: mode.tool.description,
323
- parameters: mode.tool.parameters
324
- }
325
- }
326
- ]
327
- },
328
- warnings
329
- };
330
- }
331
- default: {
332
- const _exhaustiveCheck = type;
333
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
334
- }
335
- }
336
- }
337
- async doGenerate(options) {
338
- var _a, _b, _c, _d, _e, _f;
339
- const { args, warnings } = this.getArgs({ ...options, stream: false });
340
- const body = JSON.stringify(args);
341
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
342
- url: this.config.url({
343
- path: "/chat/completions",
344
- modelId: this.modelId
345
- }),
346
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
347
- body: args,
348
- failedResponseHandler: xaiFailedResponseHandler,
349
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
350
- xaiChatResponseSchema
351
- ),
352
- abortSignal: options.abortSignal,
353
- fetch: this.config.fetch
354
- });
355
- const { messages: rawPrompt, ...rawSettings } = args;
356
- const choice = response.choices[0];
357
- return {
358
- text: (_a = choice.message.content) != null ? _a : void 0,
359
- toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
360
- var _a2;
361
- return {
362
- toolCallType: "function",
363
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
364
- toolName: toolCall.function.name,
365
- args: toolCall.function.arguments
366
- };
367
- }),
368
- finishReason: mapXaiFinishReason(choice.finish_reason),
369
- usage: {
370
- promptTokens: (_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens) != null ? _d : NaN,
371
- completionTokens: (_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens) != null ? _f : NaN
372
- },
373
- rawCall: { rawPrompt, rawSettings },
374
- rawResponse: { headers: responseHeaders },
375
- response: getResponseMetadata(response),
376
- warnings,
377
- request: { body }
378
- };
379
- }
380
- async doStream(options) {
381
- const { args, warnings } = this.getArgs({ ...options, stream: true });
382
- const body = JSON.stringify({ ...args, stream: true });
383
- const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
384
- url: this.config.url({
385
- path: "/chat/completions",
386
- modelId: this.modelId
387
- }),
388
- headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
389
- body: {
390
- ...args,
391
- stream: true
392
- },
393
- failedResponseHandler: xaiFailedResponseHandler,
394
- successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(xaiChatChunkSchema),
395
- abortSignal: options.abortSignal,
396
- fetch: this.config.fetch
397
- });
398
- const { messages: rawPrompt, ...rawSettings } = args;
399
- const toolCalls = [];
400
- let finishReason = "unknown";
401
- let usage = {
402
- promptTokens: void 0,
403
- completionTokens: void 0
404
- };
405
- let isFirstChunk = true;
406
- let providerMetadata;
407
- return {
408
- stream: response.pipeThrough(
409
- new TransformStream({
410
- transform(chunk, controller) {
411
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
412
- if (!chunk.success) {
413
- finishReason = "error";
414
- controller.enqueue({ type: "error", error: chunk.error });
415
- return;
416
- }
417
- const value = chunk.value;
418
- if ("error" in value) {
419
- finishReason = "error";
420
- controller.enqueue({ type: "error", error: value.error });
421
- return;
422
- }
423
- if (isFirstChunk) {
424
- isFirstChunk = false;
425
- controller.enqueue({
426
- type: "response-metadata",
427
- ...getResponseMetadata(value)
428
- });
429
- }
430
- if (value.usage != null) {
431
- usage = {
432
- promptTokens: (_a = value.usage.prompt_tokens) != null ? _a : void 0,
433
- completionTokens: (_b = value.usage.completion_tokens) != null ? _b : void 0
434
- };
435
- }
436
- const choice = value.choices[0];
437
- if ((choice == null ? void 0 : choice.finish_reason) != null) {
438
- finishReason = mapXaiFinishReason(choice.finish_reason);
439
- }
440
- if ((choice == null ? void 0 : choice.delta) == null) {
441
- return;
442
- }
443
- const delta = choice.delta;
444
- if (delta.content != null) {
445
- controller.enqueue({
446
- type: "text-delta",
447
- textDelta: delta.content
448
- });
449
- }
450
- if (delta.tool_calls != null) {
451
- for (const toolCallDelta of delta.tool_calls) {
452
- const index = toolCallDelta.index;
453
- if (toolCalls[index] == null) {
454
- if (toolCallDelta.type !== "function") {
455
- throw new import_provider3.InvalidResponseDataError({
456
- data: toolCallDelta,
457
- message: `Expected 'function' type.`
458
- });
459
- }
460
- if (toolCallDelta.id == null) {
461
- throw new import_provider3.InvalidResponseDataError({
462
- data: toolCallDelta,
463
- message: `Expected 'id' to be a string.`
464
- });
465
- }
466
- if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
467
- throw new import_provider3.InvalidResponseDataError({
468
- data: toolCallDelta,
469
- message: `Expected 'function.name' to be a string.`
470
- });
471
- }
472
- toolCalls[index] = {
473
- id: toolCallDelta.id,
474
- type: "function",
475
- function: {
476
- name: toolCallDelta.function.name,
477
- arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
478
- }
479
- };
480
- const toolCall2 = toolCalls[index];
481
- if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
482
- if (toolCall2.function.arguments.length > 0) {
483
- controller.enqueue({
484
- type: "tool-call-delta",
485
- toolCallType: "function",
486
- toolCallId: toolCall2.id,
487
- toolName: toolCall2.function.name,
488
- argsTextDelta: toolCall2.function.arguments
489
- });
490
- }
491
- if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
492
- controller.enqueue({
493
- type: "tool-call",
494
- toolCallType: "function",
495
- toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
496
- toolName: toolCall2.function.name,
497
- args: toolCall2.function.arguments
498
- });
499
- }
500
- }
501
- continue;
502
- }
503
- const toolCall = toolCalls[index];
504
- if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
505
- toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
506
- }
507
- controller.enqueue({
508
- type: "tool-call-delta",
509
- toolCallType: "function",
510
- toolCallId: toolCall.id,
511
- toolName: toolCall.function.name,
512
- argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
513
- });
514
- if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
515
- controller.enqueue({
516
- type: "tool-call",
517
- toolCallType: "function",
518
- toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
519
- toolName: toolCall.function.name,
520
- args: toolCall.function.arguments
521
- });
522
- }
523
- }
524
- }
525
- },
526
- flush(controller) {
527
- var _a, _b;
528
- controller.enqueue({
529
- type: "finish",
530
- finishReason,
531
- usage: {
532
- promptTokens: (_a = usage.promptTokens) != null ? _a : NaN,
533
- completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
534
- },
535
- ...providerMetadata != null ? { providerMetadata } : {}
536
- });
537
- }
538
- })
539
- ),
540
- rawCall: { rawPrompt, rawSettings },
541
- rawResponse: { headers: responseHeaders },
542
- warnings,
543
- request: { body }
544
- };
545
- }
546
40
  };
547
- var xaiChatResponseSchema = import_zod2.z.object({
548
- id: import_zod2.z.string().nullish(),
549
- created: import_zod2.z.number().nullish(),
550
- model: import_zod2.z.string().nullish(),
551
- choices: import_zod2.z.array(
552
- import_zod2.z.object({
553
- message: import_zod2.z.object({
554
- role: import_zod2.z.literal("assistant").nullish(),
555
- content: import_zod2.z.string().nullish(),
556
- tool_calls: import_zod2.z.array(
557
- import_zod2.z.object({
558
- id: import_zod2.z.string().nullish(),
559
- type: import_zod2.z.literal("function"),
560
- function: import_zod2.z.object({
561
- name: import_zod2.z.string(),
562
- arguments: import_zod2.z.string()
563
- })
564
- })
565
- ).nullish()
566
- }),
567
- index: import_zod2.z.number(),
568
- finish_reason: import_zod2.z.string().nullish()
569
- })
570
- ),
571
- usage: import_zod2.z.object({
572
- prompt_tokens: import_zod2.z.number().nullish(),
573
- completion_tokens: import_zod2.z.number().nullish()
574
- }).nullish()
575
- });
576
- var xaiChatChunkSchema = import_zod2.z.union([
577
- import_zod2.z.object({
578
- id: import_zod2.z.string().nullish(),
579
- created: import_zod2.z.number().nullish(),
580
- model: import_zod2.z.string().nullish(),
581
- choices: import_zod2.z.array(
582
- import_zod2.z.object({
583
- delta: import_zod2.z.object({
584
- role: import_zod2.z.enum(["assistant"]).nullish(),
585
- content: import_zod2.z.string().nullish(),
586
- tool_calls: import_zod2.z.array(
587
- import_zod2.z.object({
588
- index: import_zod2.z.number(),
589
- id: import_zod2.z.string().nullish(),
590
- type: import_zod2.z.literal("function").optional(),
591
- function: import_zod2.z.object({
592
- name: import_zod2.z.string().nullish(),
593
- arguments: import_zod2.z.string().nullish()
594
- })
595
- })
596
- ).nullish()
597
- }).nullish(),
598
- finish_reason: import_zod2.z.string().nullable().optional(),
599
- index: import_zod2.z.number()
600
- })
601
- ),
602
- usage: import_zod2.z.object({
603
- prompt_tokens: import_zod2.z.number().nullish(),
604
- completion_tokens: import_zod2.z.number().nullish()
605
- }).nullish()
606
- }),
607
- xaiErrorDataSchema
608
- ]);
609
-
610
- // src/xai-provider.ts
611
41
  function createXai(options = {}) {
612
42
  var _a;
613
- const baseURL = (_a = (0, import_provider_utils4.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.x.ai/v1";
43
+ const baseURL = (0, import_provider_utils.withoutTrailingSlash)(
44
+ (_a = options.baseURL) != null ? _a : "https://api.x.ai/v1"
45
+ );
614
46
  const getHeaders = () => ({
615
- Authorization: `Bearer ${(0, import_provider_utils4.loadApiKey)({
47
+ Authorization: `Bearer ${(0, import_provider_utils.loadApiKey)({
616
48
  apiKey: options.apiKey,
617
49
  environmentVariableName: "XAI_API_KEY",
618
- description: "xAI"
50
+ description: "xAI API key"
619
51
  })}`,
620
52
  ...options.headers
621
53
  });
622
- const createChatModel = (modelId, settings = {}) => new XaiChatLanguageModel(modelId, settings, {
623
- provider: "xai.chat",
624
- url: ({ path }) => `${baseURL}${path}`,
625
- headers: getHeaders,
626
- fetch: options.fetch
627
- });
628
- const createLanguageModel = (modelId, settings) => {
629
- if (new.target) {
630
- throw new Error(
631
- "The xAI model function cannot be called with the new keyword."
632
- );
633
- }
634
- return createChatModel(modelId, settings);
635
- };
636
- const provider = function(modelId, settings) {
637
- return createLanguageModel(modelId, settings);
54
+ const createLanguageModel = (modelId, settings = {}) => {
55
+ return new import_openai_compatible.OpenAICompatibleChatLanguageModel(modelId, settings, {
56
+ provider: "xai.chat",
57
+ url: ({ path }) => `${baseURL}${path}`,
58
+ headers: getHeaders,
59
+ fetch: options.fetch,
60
+ defaultObjectGenerationMode: "tool",
61
+ errorStructure: xaiErrorStructure
62
+ });
638
63
  };
64
+ const provider = (modelId, settings) => createLanguageModel(modelId, settings);
639
65
  provider.languageModel = createLanguageModel;
640
- provider.chat = createChatModel;
66
+ provider.chat = createLanguageModel;
641
67
  provider.textEmbeddingModel = (modelId) => {
642
- throw new import_provider4.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
68
+ throw new import_provider.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
643
69
  };
644
70
  return provider;
645
71
  }