@providerprotocol/ai 0.0.21 → 0.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/README.md +188 -6
  2. package/dist/anthropic/index.d.ts +1 -1
  3. package/dist/anthropic/index.js +115 -39
  4. package/dist/anthropic/index.js.map +1 -1
  5. package/dist/{chunk-Y3GBJNA2.js → chunk-55X3W2MN.js} +4 -3
  6. package/dist/chunk-55X3W2MN.js.map +1 -0
  7. package/dist/chunk-73IIE3QT.js +120 -0
  8. package/dist/chunk-73IIE3QT.js.map +1 -0
  9. package/dist/{chunk-M4BMM5IB.js → chunk-MF5ETY5O.js} +13 -4
  10. package/dist/chunk-MF5ETY5O.js.map +1 -0
  11. package/dist/{chunk-SKY2JLA7.js → chunk-MKDLXV4O.js} +1 -1
  12. package/dist/chunk-MKDLXV4O.js.map +1 -0
  13. package/dist/{chunk-Z7RBRCRN.js → chunk-NWS5IKNR.js} +37 -11
  14. package/dist/chunk-NWS5IKNR.js.map +1 -0
  15. package/dist/{chunk-EDENPF3E.js → chunk-QNJO7DSD.js} +152 -53
  16. package/dist/chunk-QNJO7DSD.js.map +1 -0
  17. package/dist/{chunk-Z4ILICF5.js → chunk-SBCATNHA.js} +43 -14
  18. package/dist/chunk-SBCATNHA.js.map +1 -0
  19. package/dist/chunk-Z6DKC37J.js +50 -0
  20. package/dist/chunk-Z6DKC37J.js.map +1 -0
  21. package/dist/google/index.d.ts +22 -7
  22. package/dist/google/index.js +286 -85
  23. package/dist/google/index.js.map +1 -1
  24. package/dist/http/index.d.ts +3 -3
  25. package/dist/http/index.js +4 -4
  26. package/dist/index.d.ts +10 -6
  27. package/dist/index.js +331 -204
  28. package/dist/index.js.map +1 -1
  29. package/dist/ollama/index.d.ts +5 -2
  30. package/dist/ollama/index.js +87 -28
  31. package/dist/ollama/index.js.map +1 -1
  32. package/dist/openai/index.d.ts +1 -1
  33. package/dist/openai/index.js +226 -81
  34. package/dist/openai/index.js.map +1 -1
  35. package/dist/openrouter/index.d.ts +1 -1
  36. package/dist/openrouter/index.js +199 -64
  37. package/dist/openrouter/index.js.map +1 -1
  38. package/dist/{provider-DGQHYE6I.d.ts → provider-DR1yins0.d.ts} +159 -53
  39. package/dist/proxy/index.d.ts +2 -2
  40. package/dist/proxy/index.js +178 -17
  41. package/dist/proxy/index.js.map +1 -1
  42. package/dist/{retry-Pcs3hnbu.d.ts → retry-DJiqAslw.d.ts} +11 -2
  43. package/dist/{stream-Di9acos2.d.ts → stream-BuTrqt_j.d.ts} +103 -41
  44. package/dist/xai/index.d.ts +1 -1
  45. package/dist/xai/index.js +189 -75
  46. package/dist/xai/index.js.map +1 -1
  47. package/package.json +1 -1
  48. package/dist/chunk-EDENPF3E.js.map +0 -1
  49. package/dist/chunk-M4BMM5IB.js.map +0 -1
  50. package/dist/chunk-SKY2JLA7.js.map +0 -1
  51. package/dist/chunk-Y3GBJNA2.js.map +0 -1
  52. package/dist/chunk-Z4ILICF5.js.map +0 -1
  53. package/dist/chunk-Z7RBRCRN.js.map +0 -1
package/dist/xai/index.js CHANGED
@@ -1,25 +1,35 @@
1
1
  import {
2
2
  Image
3
3
  } from "../chunk-WAKD3OO5.js";
4
+ import {
5
+ parseJsonResponse
6
+ } from "../chunk-Z6DKC37J.js";
7
+ import {
8
+ StreamEventType
9
+ } from "../chunk-73IIE3QT.js";
4
10
  import {
5
11
  AssistantMessage,
6
12
  createProvider,
13
+ generateId,
7
14
  isAssistantMessage,
8
15
  isToolResultMessage,
9
16
  isUserMessage
10
- } from "../chunk-M4BMM5IB.js";
17
+ } from "../chunk-MF5ETY5O.js";
11
18
  import {
12
19
  parseSSEStream
13
- } from "../chunk-Z7RBRCRN.js";
20
+ } from "../chunk-NWS5IKNR.js";
14
21
  import {
15
22
  resolveApiKey
16
- } from "../chunk-Y3GBJNA2.js";
23
+ } from "../chunk-55X3W2MN.js";
17
24
  import {
25
+ ErrorCode,
26
+ ModalityType,
18
27
  UPPError,
19
28
  doFetch,
20
29
  doStreamFetch,
21
- normalizeHttpError
22
- } from "../chunk-EDENPF3E.js";
30
+ normalizeHttpError,
31
+ toError
32
+ } from "../chunk-QNJO7DSD.js";
23
33
 
24
34
  // src/providers/xai/transform.completions.ts
25
35
  function transformRequest(request, modelId) {
@@ -55,9 +65,40 @@ function transformRequest(request, modelId) {
55
65
  return xaiRequest;
56
66
  }
57
67
  function normalizeSystem(system) {
58
- if (!system) return void 0;
68
+ if (system === void 0 || system === null) return void 0;
59
69
  if (typeof system === "string") return system;
60
- return system.map((block) => block.text ?? "").filter((text) => text.length > 0).join("\n\n");
70
+ if (!Array.isArray(system)) {
71
+ throw new UPPError(
72
+ "System prompt must be a string or an array of text blocks",
73
+ ErrorCode.InvalidRequest,
74
+ "xai",
75
+ ModalityType.LLM
76
+ );
77
+ }
78
+ const texts = [];
79
+ for (const block of system) {
80
+ if (!block || typeof block !== "object" || !("text" in block)) {
81
+ throw new UPPError(
82
+ "System prompt array must contain objects with a text field",
83
+ ErrorCode.InvalidRequest,
84
+ "xai",
85
+ ModalityType.LLM
86
+ );
87
+ }
88
+ const textValue = block.text;
89
+ if (typeof textValue !== "string") {
90
+ throw new UPPError(
91
+ "System prompt text must be a string",
92
+ ErrorCode.InvalidRequest,
93
+ "xai",
94
+ ModalityType.LLM
95
+ );
96
+ }
97
+ if (textValue.length > 0) {
98
+ texts.push(textValue);
99
+ }
100
+ }
101
+ return texts.length > 0 ? texts.join("\n\n") : void 0;
61
102
  }
62
103
  function transformMessages(messages, system) {
63
104
  const result = [];
@@ -216,11 +257,12 @@ function transformResponse(data) {
216
257
  });
217
258
  }
218
259
  }
260
+ const responseId = data.id || generateId();
219
261
  const message = new AssistantMessage(
220
262
  textContent,
221
263
  toolCalls.length > 0 ? toolCalls : void 0,
222
264
  {
223
- id: data.id,
265
+ id: responseId,
224
266
  metadata: {
225
267
  xai: {
226
268
  model: data.model,
@@ -281,7 +323,7 @@ function transformStreamEvent(chunk, state) {
281
323
  const events = [];
282
324
  if (chunk.id && !state.id) {
283
325
  state.id = chunk.id;
284
- events.push({ type: "message_start", index: 0, delta: {} });
326
+ events.push({ type: StreamEventType.MessageStart, index: 0, delta: {} });
285
327
  }
286
328
  if (chunk.model) {
287
329
  state.model = chunk.model;
@@ -291,7 +333,7 @@ function transformStreamEvent(chunk, state) {
291
333
  if (choice.delta.content) {
292
334
  state.text += choice.delta.content;
293
335
  events.push({
294
- type: "text_delta",
336
+ type: StreamEventType.TextDelta,
295
337
  index: 0,
296
338
  delta: { text: choice.delta.content }
297
339
  });
@@ -300,7 +342,7 @@ function transformStreamEvent(chunk, state) {
300
342
  state.hadRefusal = true;
301
343
  state.text += choice.delta.refusal;
302
344
  events.push({
303
- type: "text_delta",
345
+ type: StreamEventType.TextDelta,
304
346
  index: 0,
305
347
  delta: { text: choice.delta.refusal }
306
348
  });
@@ -322,7 +364,7 @@ function transformStreamEvent(chunk, state) {
322
364
  if (toolCallDelta.function?.arguments) {
323
365
  toolCall.arguments += toolCallDelta.function.arguments;
324
366
  events.push({
325
- type: "tool_call_delta",
367
+ type: StreamEventType.ToolCallDelta,
326
368
  index,
327
369
  delta: {
328
370
  toolCallId: toolCall.id,
@@ -335,7 +377,7 @@ function transformStreamEvent(chunk, state) {
335
377
  }
336
378
  if (choice.finish_reason) {
337
379
  state.finishReason = choice.finish_reason;
338
- events.push({ type: "message_stop", index: 0, delta: {} });
380
+ events.push({ type: StreamEventType.MessageStop, index: 0, delta: {} });
339
381
  }
340
382
  }
341
383
  if (chunk.usage) {
@@ -370,11 +412,12 @@ function buildResponseFromState(state) {
370
412
  arguments: args
371
413
  });
372
414
  }
415
+ const messageId = state.id || generateId();
373
416
  const message = new AssistantMessage(
374
417
  textContent,
375
418
  toolCalls.length > 0 ? toolCalls : void 0,
376
419
  {
377
- id: state.id,
420
+ id: messageId,
378
421
  metadata: {
379
422
  xai: {
380
423
  model: state.model,
@@ -436,9 +479,9 @@ function createCompletionsLLMHandler() {
436
479
  if (!providerRef) {
437
480
  throw new UPPError(
438
481
  "Provider reference not set. Handler must be used with createProvider() or have _setProvider called.",
439
- "INVALID_REQUEST",
482
+ ErrorCode.InvalidRequest,
440
483
  "xai",
441
- "llm"
484
+ ModalityType.LLM
442
485
  );
443
486
  }
444
487
  const model = {
@@ -479,7 +522,7 @@ function createCompletionsLLMHandler() {
479
522
  "xai",
480
523
  "llm"
481
524
  );
482
- const data = await response.json();
525
+ const data = await parseJsonResponse(response, "xai", "llm");
483
526
  return transformResponse(data);
484
527
  },
485
528
  stream(request) {
@@ -504,7 +547,8 @@ function createCompletionsLLMHandler() {
504
547
  body.stream_options = { include_usage: true };
505
548
  const headers = {
506
549
  "Content-Type": "application/json",
507
- Authorization: `Bearer ${apiKey}`
550
+ Authorization: `Bearer ${apiKey}`,
551
+ Accept: "text/event-stream"
508
552
  };
509
553
  if (request.config.headers) {
510
554
  for (const [key, value] of Object.entries(request.config.headers)) {
@@ -533,9 +577,9 @@ function createCompletionsLLMHandler() {
533
577
  if (!response.body) {
534
578
  const error = new UPPError(
535
579
  "No response body for streaming request",
536
- "PROVIDER_ERROR",
580
+ ErrorCode.ProviderError,
537
581
  "xai",
538
- "llm"
582
+ ModalityType.LLM
539
583
  );
540
584
  responseReject(error);
541
585
  throw error;
@@ -550,9 +594,9 @@ function createCompletionsLLMHandler() {
550
594
  const errorData = chunk.error;
551
595
  const error = new UPPError(
552
596
  errorData.message ?? "Unknown error",
553
- "PROVIDER_ERROR",
597
+ ErrorCode.ProviderError,
554
598
  "xai",
555
- "llm"
599
+ ModalityType.LLM
556
600
  );
557
601
  responseReject(error);
558
602
  throw error;
@@ -565,8 +609,9 @@ function createCompletionsLLMHandler() {
565
609
  }
566
610
  responseResolve(buildResponseFromState(state));
567
611
  } catch (error) {
568
- responseReject(error);
569
- throw error;
612
+ const err = toError(error);
613
+ responseReject(err);
614
+ throw err;
570
615
  }
571
616
  }
572
617
  return {
@@ -622,9 +667,40 @@ function transformRequest2(request, modelId) {
622
667
  return xaiRequest;
623
668
  }
624
669
  function normalizeSystem2(system) {
625
- if (!system) return void 0;
670
+ if (system === void 0 || system === null) return void 0;
626
671
  if (typeof system === "string") return system;
627
- return system.map((block) => block.text ?? "").filter((text) => text.length > 0).join("\n\n");
672
+ if (!Array.isArray(system)) {
673
+ throw new UPPError(
674
+ "System prompt must be a string or an array of text blocks",
675
+ ErrorCode.InvalidRequest,
676
+ "xai",
677
+ ModalityType.LLM
678
+ );
679
+ }
680
+ const texts = [];
681
+ for (const block of system) {
682
+ if (!block || typeof block !== "object" || !("text" in block)) {
683
+ throw new UPPError(
684
+ "System prompt array must contain objects with a text field",
685
+ ErrorCode.InvalidRequest,
686
+ "xai",
687
+ ModalityType.LLM
688
+ );
689
+ }
690
+ const textValue = block.text;
691
+ if (typeof textValue !== "string") {
692
+ throw new UPPError(
693
+ "System prompt text must be a string",
694
+ ErrorCode.InvalidRequest,
695
+ "xai",
696
+ ModalityType.LLM
697
+ );
698
+ }
699
+ if (textValue.length > 0) {
700
+ texts.push(textValue);
701
+ }
702
+ }
703
+ return texts.length > 0 ? texts.join("\n\n") : void 0;
628
704
  }
629
705
  function transformInputItems(messages, system) {
630
706
  const result = [];
@@ -805,16 +881,17 @@ function transformResponse2(data) {
805
881
  });
806
882
  }
807
883
  }
884
+ const responseId = data.id || generateId();
808
885
  const message = new AssistantMessage(
809
886
  textContent,
810
887
  toolCalls.length > 0 ? toolCalls : void 0,
811
888
  {
812
- id: data.id,
889
+ id: responseId,
813
890
  metadata: {
814
891
  xai: {
815
892
  model: data.model,
816
893
  status: data.status,
817
- response_id: data.id,
894
+ response_id: responseId,
818
895
  functionCallItems: functionCallItems.length > 0 ? functionCallItems : void 0,
819
896
  citations: data.citations,
820
897
  inline_citations: data.inline_citations
@@ -866,7 +943,7 @@ function transformStreamEvent2(event, state) {
866
943
  case "response.created":
867
944
  state.id = event.response.id;
868
945
  state.model = event.response.model;
869
- events.push({ type: "message_start", index: 0, delta: {} });
946
+ events.push({ type: StreamEventType.MessageStart, index: 0, delta: {} });
870
947
  break;
871
948
  case "response.in_progress":
872
949
  state.status = "in_progress";
@@ -878,11 +955,11 @@ function transformStreamEvent2(event, state) {
878
955
  state.outputTokens = event.response.usage.output_tokens;
879
956
  state.cacheReadTokens = event.response.usage.input_tokens_details?.cached_tokens ?? 0;
880
957
  }
881
- events.push({ type: "message_stop", index: 0, delta: {} });
958
+ events.push({ type: StreamEventType.MessageStop, index: 0, delta: {} });
882
959
  break;
883
960
  case "response.failed":
884
961
  state.status = "failed";
885
- events.push({ type: "message_stop", index: 0, delta: {} });
962
+ events.push({ type: StreamEventType.MessageStop, index: 0, delta: {} });
886
963
  break;
887
964
  case "response.output_item.added":
888
965
  if (event.item.type === "function_call") {
@@ -899,7 +976,7 @@ function transformStreamEvent2(event, state) {
899
976
  state.toolCalls.set(event.output_index, existing);
900
977
  }
901
978
  events.push({
902
- type: "content_block_start",
979
+ type: StreamEventType.ContentBlockStart,
903
980
  index: event.output_index,
904
981
  delta: {}
905
982
  });
@@ -919,7 +996,7 @@ function transformStreamEvent2(event, state) {
919
996
  state.toolCalls.set(event.output_index, existing);
920
997
  }
921
998
  events.push({
922
- type: "content_block_stop",
999
+ type: StreamEventType.ContentBlockStop,
923
1000
  index: event.output_index,
924
1001
  delta: {}
925
1002
  });
@@ -928,7 +1005,7 @@ function transformStreamEvent2(event, state) {
928
1005
  const currentText = state.textByIndex.get(event.output_index) ?? "";
929
1006
  state.textByIndex.set(event.output_index, currentText + event.delta);
930
1007
  events.push({
931
- type: "text_delta",
1008
+ type: StreamEventType.TextDelta,
932
1009
  index: event.output_index,
933
1010
  delta: { text: event.delta }
934
1011
  });
@@ -942,7 +1019,7 @@ function transformStreamEvent2(event, state) {
942
1019
  const currentRefusal = state.textByIndex.get(event.output_index) ?? "";
943
1020
  state.textByIndex.set(event.output_index, currentRefusal + event.delta);
944
1021
  events.push({
945
- type: "text_delta",
1022
+ type: StreamEventType.TextDelta,
946
1023
  index: event.output_index,
947
1024
  delta: { text: event.delta }
948
1025
  });
@@ -966,7 +1043,7 @@ function transformStreamEvent2(event, state) {
966
1043
  }
967
1044
  toolCall.arguments += event.delta;
968
1045
  events.push({
969
- type: "tool_call_delta",
1046
+ type: StreamEventType.ToolCallDelta,
970
1047
  index: event.output_index,
971
1048
  delta: {
972
1049
  toolCallId: toolCall.callId ?? toolCall.itemId ?? "",
@@ -1040,16 +1117,17 @@ function buildResponseFromState2(state) {
1040
1117
  });
1041
1118
  }
1042
1119
  }
1120
+ const responseId = state.id || generateId();
1043
1121
  const message = new AssistantMessage(
1044
1122
  textContent,
1045
1123
  toolCalls.length > 0 ? toolCalls : void 0,
1046
1124
  {
1047
- id: state.id,
1125
+ id: responseId,
1048
1126
  metadata: {
1049
1127
  xai: {
1050
1128
  model: state.model,
1051
1129
  status: state.status,
1052
- response_id: state.id,
1130
+ response_id: responseId,
1053
1131
  functionCallItems: functionCallItems.length > 0 ? functionCallItems : void 0
1054
1132
  }
1055
1133
  }
@@ -1099,9 +1177,9 @@ function createResponsesLLMHandler() {
1099
1177
  if (!providerRef) {
1100
1178
  throw new UPPError(
1101
1179
  "Provider reference not set. Handler must be used with createProvider() or have _setProvider called.",
1102
- "INVALID_REQUEST",
1180
+ ErrorCode.InvalidRequest,
1103
1181
  "xai",
1104
- "llm"
1182
+ ModalityType.LLM
1105
1183
  );
1106
1184
  }
1107
1185
  const model = {
@@ -1142,13 +1220,13 @@ function createResponsesLLMHandler() {
1142
1220
  "xai",
1143
1221
  "llm"
1144
1222
  );
1145
- const data = await response.json();
1223
+ const data = await parseJsonResponse(response, "xai", "llm");
1146
1224
  if (data.status === "failed" && data.error) {
1147
1225
  throw new UPPError(
1148
1226
  data.error.message,
1149
- "PROVIDER_ERROR",
1227
+ ErrorCode.ProviderError,
1150
1228
  "xai",
1151
- "llm"
1229
+ ModalityType.LLM
1152
1230
  );
1153
1231
  }
1154
1232
  return transformResponse2(data);
@@ -1174,7 +1252,8 @@ function createResponsesLLMHandler() {
1174
1252
  body.stream = true;
1175
1253
  const headers = {
1176
1254
  "Content-Type": "application/json",
1177
- Authorization: `Bearer ${apiKey}`
1255
+ Authorization: `Bearer ${apiKey}`,
1256
+ Accept: "text/event-stream"
1178
1257
  };
1179
1258
  if (request.config.headers) {
1180
1259
  for (const [key, value] of Object.entries(request.config.headers)) {
@@ -1203,9 +1282,9 @@ function createResponsesLLMHandler() {
1203
1282
  if (!response.body) {
1204
1283
  const error = new UPPError(
1205
1284
  "No response body for streaming request",
1206
- "PROVIDER_ERROR",
1285
+ ErrorCode.ProviderError,
1207
1286
  "xai",
1208
- "llm"
1287
+ ModalityType.LLM
1209
1288
  );
1210
1289
  responseReject(error);
1211
1290
  throw error;
@@ -1220,9 +1299,9 @@ function createResponsesLLMHandler() {
1220
1299
  const errorEvent = event;
1221
1300
  const error = new UPPError(
1222
1301
  errorEvent.error.message,
1223
- "PROVIDER_ERROR",
1302
+ ErrorCode.ProviderError,
1224
1303
  "xai",
1225
- "llm"
1304
+ ModalityType.LLM
1226
1305
  );
1227
1306
  responseReject(error);
1228
1307
  throw error;
@@ -1235,8 +1314,9 @@ function createResponsesLLMHandler() {
1235
1314
  }
1236
1315
  responseResolve(buildResponseFromState2(state));
1237
1316
  } catch (error) {
1238
- responseReject(error);
1239
- throw error;
1317
+ const err = toError(error);
1318
+ responseReject(err);
1319
+ throw err;
1240
1320
  }
1241
1321
  }
1242
1322
  return {
@@ -1254,9 +1334,40 @@ function createResponsesLLMHandler() {
1254
1334
 
1255
1335
  // src/providers/xai/transform.messages.ts
1256
1336
  function normalizeSystem3(system) {
1257
- if (!system) return void 0;
1337
+ if (system === void 0 || system === null) return void 0;
1258
1338
  if (typeof system === "string") return system;
1259
- return system.map((block) => block.text ?? "").filter((text) => text.length > 0).join("\n\n");
1339
+ if (!Array.isArray(system)) {
1340
+ throw new UPPError(
1341
+ "System prompt must be a string or an array of text blocks",
1342
+ ErrorCode.InvalidRequest,
1343
+ "xai",
1344
+ ModalityType.LLM
1345
+ );
1346
+ }
1347
+ const texts = [];
1348
+ for (const block of system) {
1349
+ if (!block || typeof block !== "object" || !("text" in block)) {
1350
+ throw new UPPError(
1351
+ "System prompt array must contain objects with a text field",
1352
+ ErrorCode.InvalidRequest,
1353
+ "xai",
1354
+ ModalityType.LLM
1355
+ );
1356
+ }
1357
+ const textValue = block.text;
1358
+ if (typeof textValue !== "string") {
1359
+ throw new UPPError(
1360
+ "System prompt text must be a string",
1361
+ ErrorCode.InvalidRequest,
1362
+ "xai",
1363
+ ModalityType.LLM
1364
+ );
1365
+ }
1366
+ if (textValue.length > 0) {
1367
+ texts.push(textValue);
1368
+ }
1369
+ }
1370
+ return texts.length > 0 ? texts.join("\n\n") : void 0;
1260
1371
  }
1261
1372
  function transformRequest3(request, modelId) {
1262
1373
  const params = request.params ?? {};
@@ -1455,7 +1566,7 @@ function transformStreamEvent3(event, state) {
1455
1566
  state.inputTokens = event.message.usage.input_tokens;
1456
1567
  state.cacheReadTokens = event.message.usage.cache_read_input_tokens ?? 0;
1457
1568
  state.cacheWriteTokens = event.message.usage.cache_creation_input_tokens ?? 0;
1458
- return { type: "message_start", index: 0, delta: {} };
1569
+ return { type: StreamEventType.MessageStart, index: 0, delta: {} };
1459
1570
  case "content_block_start":
1460
1571
  state.currentIndex = event.index;
1461
1572
  if (event.content_block.type === "text") {
@@ -1468,7 +1579,7 @@ function transformStreamEvent3(event, state) {
1468
1579
  input: ""
1469
1580
  };
1470
1581
  }
1471
- return { type: "content_block_start", index: event.index, delta: {} };
1582
+ return { type: StreamEventType.ContentBlockStart, index: event.index, delta: {} };
1472
1583
  case "content_block_delta": {
1473
1584
  const delta = event.delta;
1474
1585
  const index = event.index ?? state.currentIndex;
@@ -1478,7 +1589,7 @@ function transformStreamEvent3(event, state) {
1478
1589
  }
1479
1590
  state.content[index].text = (state.content[index].text ?? "") + delta.text;
1480
1591
  return {
1481
- type: "text_delta",
1592
+ type: StreamEventType.TextDelta,
1482
1593
  index,
1483
1594
  delta: { text: delta.text }
1484
1595
  };
@@ -1489,7 +1600,7 @@ function transformStreamEvent3(event, state) {
1489
1600
  }
1490
1601
  state.content[index].input = (state.content[index].input ?? "") + delta.partial_json;
1491
1602
  return {
1492
- type: "tool_call_delta",
1603
+ type: StreamEventType.ToolCallDelta,
1493
1604
  index,
1494
1605
  delta: {
1495
1606
  argumentsJson: delta.partial_json,
@@ -1500,7 +1611,7 @@ function transformStreamEvent3(event, state) {
1500
1611
  }
1501
1612
  if (delta.type === "thinking_delta") {
1502
1613
  return {
1503
- type: "reasoning_delta",
1614
+ type: StreamEventType.ReasoningDelta,
1504
1615
  index,
1505
1616
  delta: { text: delta.thinking }
1506
1617
  };
@@ -1508,13 +1619,13 @@ function transformStreamEvent3(event, state) {
1508
1619
  return null;
1509
1620
  }
1510
1621
  case "content_block_stop":
1511
- return { type: "content_block_stop", index: event.index ?? state.currentIndex, delta: {} };
1622
+ return { type: StreamEventType.ContentBlockStop, index: event.index ?? state.currentIndex, delta: {} };
1512
1623
  case "message_delta":
1513
1624
  state.stopReason = event.delta.stop_reason;
1514
1625
  state.outputTokens = event.usage.output_tokens;
1515
1626
  return null;
1516
1627
  case "message_stop":
1517
- return { type: "message_stop", index: 0, delta: {} };
1628
+ return { type: StreamEventType.MessageStop, index: 0, delta: {} };
1518
1629
  case "ping":
1519
1630
  case "error":
1520
1631
  return null;
@@ -1547,11 +1658,12 @@ function buildResponseFromState3(state) {
1547
1658
  });
1548
1659
  }
1549
1660
  }
1661
+ const messageId = state.messageId || generateId();
1550
1662
  const message = new AssistantMessage(
1551
1663
  textContent,
1552
1664
  toolCalls.length > 0 ? toolCalls : void 0,
1553
1665
  {
1554
- id: state.messageId,
1666
+ id: messageId,
1555
1667
  metadata: {
1556
1668
  xai: {
1557
1669
  stop_reason: state.stopReason,
@@ -1595,9 +1707,9 @@ function createMessagesLLMHandler() {
1595
1707
  if (!providerRef) {
1596
1708
  throw new UPPError(
1597
1709
  "Provider reference not set. Handler must be used with createProvider() or have _setProvider called.",
1598
- "INVALID_REQUEST",
1710
+ ErrorCode.InvalidRequest,
1599
1711
  "xai",
1600
- "llm"
1712
+ ModalityType.LLM
1601
1713
  );
1602
1714
  }
1603
1715
  const model = {
@@ -1639,7 +1751,7 @@ function createMessagesLLMHandler() {
1639
1751
  "xai",
1640
1752
  "llm"
1641
1753
  );
1642
- const data = await response.json();
1754
+ const data = await parseJsonResponse(response, "xai", "llm");
1643
1755
  return transformResponse3(data);
1644
1756
  },
1645
1757
  stream(request) {
@@ -1664,7 +1776,8 @@ function createMessagesLLMHandler() {
1664
1776
  const headers = {
1665
1777
  "Content-Type": "application/json",
1666
1778
  "x-api-key": apiKey,
1667
- "anthropic-version": "2023-06-01"
1779
+ "anthropic-version": "2023-06-01",
1780
+ Accept: "text/event-stream"
1668
1781
  };
1669
1782
  if (request.config.headers) {
1670
1783
  for (const [key, value] of Object.entries(request.config.headers)) {
@@ -1693,9 +1806,9 @@ function createMessagesLLMHandler() {
1693
1806
  if (!response.body) {
1694
1807
  const error = new UPPError(
1695
1808
  "No response body for streaming request",
1696
- "PROVIDER_ERROR",
1809
+ ErrorCode.ProviderError,
1697
1810
  "xai",
1698
- "llm"
1811
+ ModalityType.LLM
1699
1812
  );
1700
1813
  responseReject(error);
1701
1814
  throw error;
@@ -1706,9 +1819,9 @@ function createMessagesLLMHandler() {
1706
1819
  if (event.type === "error") {
1707
1820
  const error = new UPPError(
1708
1821
  event.error.message,
1709
- "PROVIDER_ERROR",
1822
+ ErrorCode.ProviderError,
1710
1823
  "xai",
1711
- "llm"
1824
+ ModalityType.LLM
1712
1825
  );
1713
1826
  responseReject(error);
1714
1827
  throw error;
@@ -1721,8 +1834,9 @@ function createMessagesLLMHandler() {
1721
1834
  }
1722
1835
  responseResolve(buildResponseFromState3(state));
1723
1836
  } catch (error) {
1724
- responseReject(error);
1725
- throw error;
1837
+ const err = toError(error);
1838
+ responseReject(err);
1839
+ throw err;
1726
1840
  }
1727
1841
  }
1728
1842
  return {
@@ -1758,9 +1872,9 @@ function createImageHandler() {
1758
1872
  if (!providerRef) {
1759
1873
  throw new UPPError(
1760
1874
  "Provider reference not set. Handler must be used with createProvider().",
1761
- "INVALID_REQUEST",
1875
+ ErrorCode.InvalidRequest,
1762
1876
  "xai",
1763
- "image"
1877
+ ModalityType.Image
1764
1878
  );
1765
1879
  }
1766
1880
  const capabilities = getCapabilities(modelId);
@@ -1813,7 +1927,7 @@ async function executeGenerate(modelId, request) {
1813
1927
  body: JSON.stringify(body),
1814
1928
  signal: request.signal
1815
1929
  }, request.config, "xai", "image");
1816
- const data = await response.json();
1930
+ const data = await parseJsonResponse(response, "xai", "image");
1817
1931
  return transformResponse4(data);
1818
1932
  }
1819
1933
  function transformResponse4(data) {
@@ -1826,9 +1940,9 @@ function transformResponse4(data) {
1826
1940
  } else {
1827
1941
  throw new UPPError(
1828
1942
  "No image data in response",
1829
- "PROVIDER_ERROR",
1943
+ ErrorCode.ProviderError,
1830
1944
  "xai",
1831
- "image"
1945
+ ModalityType.Image
1832
1946
  );
1833
1947
  }
1834
1948
  return {