@providerprotocol/ai 0.0.11 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.d.ts +51 -15
- package/dist/anthropic/index.js +54 -19
- package/dist/anthropic/index.js.map +1 -1
- package/dist/{chunk-SUNYWHTH.js → chunk-MOU4U3PO.js} +55 -3
- package/dist/chunk-MOU4U3PO.js.map +1 -0
- package/dist/{chunk-Y6Q7JCNP.js → chunk-MSR5P65T.js} +1 -1
- package/dist/chunk-MSR5P65T.js.map +1 -0
- package/dist/{chunk-W4BB4BG2.js → chunk-SVYROCLD.js} +31 -11
- package/dist/chunk-SVYROCLD.js.map +1 -0
- package/dist/chunk-U4JJC2YX.js +234 -0
- package/dist/chunk-U4JJC2YX.js.map +1 -0
- package/dist/{chunk-X5G4EHL7.js → chunk-Z7RBRCRN.js} +1 -1
- package/dist/chunk-Z7RBRCRN.js.map +1 -0
- package/dist/google/index.d.ts +376 -7
- package/dist/google/index.js +127 -15
- package/dist/google/index.js.map +1 -1
- package/dist/http/index.d.ts +222 -25
- package/dist/http/index.js +3 -3
- package/dist/index.d.ts +1482 -198
- package/dist/index.js +233 -49
- package/dist/index.js.map +1 -1
- package/dist/ollama/index.d.ts +92 -20
- package/dist/ollama/index.js +17 -7
- package/dist/ollama/index.js.map +1 -1
- package/dist/openai/index.d.ts +340 -61
- package/dist/openai/index.js +57 -15
- package/dist/openai/index.js.map +1 -1
- package/dist/openrouter/index.d.ts +107 -51
- package/dist/openrouter/index.js +36 -8
- package/dist/openrouter/index.js.map +1 -1
- package/dist/provider-mKkz7Q9U.d.ts +488 -0
- package/dist/retry-Dh70lgr0.d.ts +508 -0
- package/dist/xai/index.d.ts +97 -22
- package/dist/xai/index.js +55 -19
- package/dist/xai/index.js.map +1 -1
- package/package.json +8 -12
- package/dist/chunk-CUCRF5W6.js +0 -136
- package/dist/chunk-CUCRF5W6.js.map +0 -1
- package/dist/chunk-SUNYWHTH.js.map +0 -1
- package/dist/chunk-W4BB4BG2.js.map +0 -1
- package/dist/chunk-X5G4EHL7.js.map +0 -1
- package/dist/chunk-Y6Q7JCNP.js.map +0 -1
- package/dist/provider-CUJWjgNl.d.ts +0 -192
- package/dist/retry-I2661_rv.d.ts +0 -118
- package/src/anthropic/index.ts +0 -3
- package/src/core/image.ts +0 -188
- package/src/core/llm.ts +0 -650
- package/src/core/provider.ts +0 -92
- package/src/google/index.ts +0 -3
- package/src/http/errors.ts +0 -112
- package/src/http/fetch.ts +0 -210
- package/src/http/index.ts +0 -31
- package/src/http/keys.ts +0 -136
- package/src/http/retry.ts +0 -205
- package/src/http/sse.ts +0 -136
- package/src/index.ts +0 -32
- package/src/ollama/index.ts +0 -3
- package/src/openai/index.ts +0 -39
- package/src/openrouter/index.ts +0 -11
- package/src/providers/anthropic/index.ts +0 -17
- package/src/providers/anthropic/llm.ts +0 -196
- package/src/providers/anthropic/transform.ts +0 -434
- package/src/providers/anthropic/types.ts +0 -213
- package/src/providers/google/index.ts +0 -17
- package/src/providers/google/llm.ts +0 -203
- package/src/providers/google/transform.ts +0 -447
- package/src/providers/google/types.ts +0 -214
- package/src/providers/ollama/index.ts +0 -43
- package/src/providers/ollama/llm.ts +0 -272
- package/src/providers/ollama/transform.ts +0 -434
- package/src/providers/ollama/types.ts +0 -260
- package/src/providers/openai/index.ts +0 -186
- package/src/providers/openai/llm.completions.ts +0 -201
- package/src/providers/openai/llm.responses.ts +0 -211
- package/src/providers/openai/transform.completions.ts +0 -561
- package/src/providers/openai/transform.responses.ts +0 -708
- package/src/providers/openai/types.ts +0 -1249
- package/src/providers/openrouter/index.ts +0 -177
- package/src/providers/openrouter/llm.completions.ts +0 -201
- package/src/providers/openrouter/llm.responses.ts +0 -211
- package/src/providers/openrouter/transform.completions.ts +0 -538
- package/src/providers/openrouter/transform.responses.ts +0 -742
- package/src/providers/openrouter/types.ts +0 -717
- package/src/providers/xai/index.ts +0 -223
- package/src/providers/xai/llm.completions.ts +0 -201
- package/src/providers/xai/llm.messages.ts +0 -195
- package/src/providers/xai/llm.responses.ts +0 -211
- package/src/providers/xai/transform.completions.ts +0 -565
- package/src/providers/xai/transform.messages.ts +0 -448
- package/src/providers/xai/transform.responses.ts +0 -678
- package/src/providers/xai/types.ts +0 -938
- package/src/types/content.ts +0 -133
- package/src/types/errors.ts +0 -85
- package/src/types/index.ts +0 -105
- package/src/types/llm.ts +0 -211
- package/src/types/messages.ts +0 -205
- package/src/types/provider.ts +0 -195
- package/src/types/schema.ts +0 -58
- package/src/types/stream.ts +0 -188
- package/src/types/thread.ts +0 -226
- package/src/types/tool.ts +0 -88
- package/src/types/turn.ts +0 -118
- package/src/utils/id.ts +0 -28
- package/src/xai/index.ts +0 -41
package/dist/xai/index.js
CHANGED
|
@@ -3,17 +3,17 @@ import {
|
|
|
3
3
|
isAssistantMessage,
|
|
4
4
|
isToolResultMessage,
|
|
5
5
|
isUserMessage
|
|
6
|
-
} from "../chunk-
|
|
6
|
+
} from "../chunk-SVYROCLD.js";
|
|
7
7
|
import {
|
|
8
8
|
parseSSEStream
|
|
9
|
-
} from "../chunk-
|
|
9
|
+
} from "../chunk-Z7RBRCRN.js";
|
|
10
10
|
import {
|
|
11
11
|
UPPError,
|
|
12
12
|
doFetch,
|
|
13
13
|
doStreamFetch,
|
|
14
14
|
normalizeHttpError,
|
|
15
15
|
resolveApiKey
|
|
16
|
-
} from "../chunk-
|
|
16
|
+
} from "../chunk-MOU4U3PO.js";
|
|
17
17
|
|
|
18
18
|
// src/providers/xai/transform.completions.ts
|
|
19
19
|
function transformRequest(request, modelId) {
|
|
@@ -48,12 +48,18 @@ function transformRequest(request, modelId) {
|
|
|
48
48
|
}
|
|
49
49
|
return xaiRequest;
|
|
50
50
|
}
|
|
51
|
+
function normalizeSystem(system) {
|
|
52
|
+
if (!system) return void 0;
|
|
53
|
+
if (typeof system === "string") return system;
|
|
54
|
+
return system.map((block) => block.text ?? "").filter((text) => text.length > 0).join("\n\n");
|
|
55
|
+
}
|
|
51
56
|
function transformMessages(messages, system) {
|
|
52
57
|
const result = [];
|
|
53
|
-
|
|
58
|
+
const normalizedSystem = normalizeSystem(system);
|
|
59
|
+
if (normalizedSystem) {
|
|
54
60
|
result.push({
|
|
55
61
|
role: "system",
|
|
56
|
-
content:
|
|
62
|
+
content: normalizedSystem
|
|
57
63
|
});
|
|
58
64
|
}
|
|
59
65
|
for (const message of messages) {
|
|
@@ -92,7 +98,6 @@ function transformMessage(message) {
|
|
|
92
98
|
const hasToolCalls = message.toolCalls && message.toolCalls.length > 0;
|
|
93
99
|
const assistantMessage = {
|
|
94
100
|
role: "assistant",
|
|
95
|
-
// xAI/OpenAI: content should be null when tool_calls are present and there's no text
|
|
96
101
|
content: hasToolCalls && !textContent ? null : textContent
|
|
97
102
|
};
|
|
98
103
|
if (hasToolCalls) {
|
|
@@ -224,7 +229,9 @@ function transformResponse(data) {
|
|
|
224
229
|
const usage = {
|
|
225
230
|
inputTokens: data.usage.prompt_tokens,
|
|
226
231
|
outputTokens: data.usage.completion_tokens,
|
|
227
|
-
totalTokens: data.usage.total_tokens
|
|
232
|
+
totalTokens: data.usage.total_tokens,
|
|
233
|
+
cacheReadTokens: data.usage.prompt_tokens_details?.cached_tokens ?? 0,
|
|
234
|
+
cacheWriteTokens: 0
|
|
228
235
|
};
|
|
229
236
|
let stopReason = "end_turn";
|
|
230
237
|
switch (choice.finish_reason) {
|
|
@@ -260,6 +267,7 @@ function createStreamState() {
|
|
|
260
267
|
finishReason: null,
|
|
261
268
|
inputTokens: 0,
|
|
262
269
|
outputTokens: 0,
|
|
270
|
+
cacheReadTokens: 0,
|
|
263
271
|
hadRefusal: false
|
|
264
272
|
};
|
|
265
273
|
}
|
|
@@ -327,6 +335,7 @@ function transformStreamEvent(chunk, state) {
|
|
|
327
335
|
if (chunk.usage) {
|
|
328
336
|
state.inputTokens = chunk.usage.prompt_tokens;
|
|
329
337
|
state.outputTokens = chunk.usage.completion_tokens;
|
|
338
|
+
state.cacheReadTokens = chunk.usage.prompt_tokens_details?.cached_tokens ?? 0;
|
|
330
339
|
}
|
|
331
340
|
return events;
|
|
332
341
|
}
|
|
@@ -371,7 +380,9 @@ function buildResponseFromState(state) {
|
|
|
371
380
|
const usage = {
|
|
372
381
|
inputTokens: state.inputTokens,
|
|
373
382
|
outputTokens: state.outputTokens,
|
|
374
|
-
totalTokens: state.inputTokens + state.outputTokens
|
|
383
|
+
totalTokens: state.inputTokens + state.outputTokens,
|
|
384
|
+
cacheReadTokens: state.cacheReadTokens,
|
|
385
|
+
cacheWriteTokens: 0
|
|
375
386
|
};
|
|
376
387
|
let stopReason = "end_turn";
|
|
377
388
|
switch (state.finishReason) {
|
|
@@ -582,13 +593,19 @@ function transformRequest2(request, modelId) {
|
|
|
582
593
|
}
|
|
583
594
|
return xaiRequest;
|
|
584
595
|
}
|
|
596
|
+
function normalizeSystem2(system) {
|
|
597
|
+
if (!system) return void 0;
|
|
598
|
+
if (typeof system === "string") return system;
|
|
599
|
+
return system.map((block) => block.text ?? "").filter((text) => text.length > 0).join("\n\n");
|
|
600
|
+
}
|
|
585
601
|
function transformInputItems(messages, system) {
|
|
586
602
|
const result = [];
|
|
587
|
-
|
|
603
|
+
const normalizedSystem = normalizeSystem2(system);
|
|
604
|
+
if (normalizedSystem) {
|
|
588
605
|
result.push({
|
|
589
606
|
type: "message",
|
|
590
607
|
role: "system",
|
|
591
|
-
content:
|
|
608
|
+
content: normalizedSystem
|
|
592
609
|
});
|
|
593
610
|
}
|
|
594
611
|
for (const message of messages) {
|
|
@@ -769,7 +786,6 @@ function transformResponse2(data) {
|
|
|
769
786
|
xai: {
|
|
770
787
|
model: data.model,
|
|
771
788
|
status: data.status,
|
|
772
|
-
// Store response_id for multi-turn tool calling
|
|
773
789
|
response_id: data.id,
|
|
774
790
|
functionCallItems: functionCallItems.length > 0 ? functionCallItems : void 0,
|
|
775
791
|
citations: data.citations,
|
|
@@ -781,7 +797,9 @@ function transformResponse2(data) {
|
|
|
781
797
|
const usage = {
|
|
782
798
|
inputTokens: data.usage.input_tokens,
|
|
783
799
|
outputTokens: data.usage.output_tokens,
|
|
784
|
-
totalTokens: data.usage.total_tokens
|
|
800
|
+
totalTokens: data.usage.total_tokens,
|
|
801
|
+
cacheReadTokens: data.usage.input_tokens_details?.cached_tokens ?? 0,
|
|
802
|
+
cacheWriteTokens: 0
|
|
785
803
|
};
|
|
786
804
|
let stopReason = "end_turn";
|
|
787
805
|
if (data.status === "completed") {
|
|
@@ -810,6 +828,7 @@ function createStreamState2() {
|
|
|
810
828
|
status: "in_progress",
|
|
811
829
|
inputTokens: 0,
|
|
812
830
|
outputTokens: 0,
|
|
831
|
+
cacheReadTokens: 0,
|
|
813
832
|
hadRefusal: false
|
|
814
833
|
};
|
|
815
834
|
}
|
|
@@ -829,6 +848,7 @@ function transformStreamEvent2(event, state) {
|
|
|
829
848
|
if (event.response.usage) {
|
|
830
849
|
state.inputTokens = event.response.usage.input_tokens;
|
|
831
850
|
state.outputTokens = event.response.usage.output_tokens;
|
|
851
|
+
state.cacheReadTokens = event.response.usage.input_tokens_details?.cached_tokens ?? 0;
|
|
832
852
|
}
|
|
833
853
|
events.push({ type: "message_stop", index: 0, delta: {} });
|
|
834
854
|
break;
|
|
@@ -876,7 +896,7 @@ function transformStreamEvent2(event, state) {
|
|
|
876
896
|
delta: {}
|
|
877
897
|
});
|
|
878
898
|
break;
|
|
879
|
-
case "response.output_text.delta":
|
|
899
|
+
case "response.output_text.delta": {
|
|
880
900
|
const currentText = state.textByIndex.get(event.output_index) ?? "";
|
|
881
901
|
state.textByIndex.set(event.output_index, currentText + event.delta);
|
|
882
902
|
events.push({
|
|
@@ -885,6 +905,7 @@ function transformStreamEvent2(event, state) {
|
|
|
885
905
|
delta: { text: event.delta }
|
|
886
906
|
});
|
|
887
907
|
break;
|
|
908
|
+
}
|
|
888
909
|
case "response.output_text.done":
|
|
889
910
|
state.textByIndex.set(event.output_index, event.text);
|
|
890
911
|
break;
|
|
@@ -1000,7 +1021,6 @@ function buildResponseFromState2(state) {
|
|
|
1000
1021
|
xai: {
|
|
1001
1022
|
model: state.model,
|
|
1002
1023
|
status: state.status,
|
|
1003
|
-
// Store response_id for multi-turn tool calling
|
|
1004
1024
|
response_id: state.id,
|
|
1005
1025
|
functionCallItems: functionCallItems.length > 0 ? functionCallItems : void 0
|
|
1006
1026
|
}
|
|
@@ -1010,7 +1030,9 @@ function buildResponseFromState2(state) {
|
|
|
1010
1030
|
const usage = {
|
|
1011
1031
|
inputTokens: state.inputTokens,
|
|
1012
1032
|
outputTokens: state.outputTokens,
|
|
1013
|
-
totalTokens: state.inputTokens + state.outputTokens
|
|
1033
|
+
totalTokens: state.inputTokens + state.outputTokens,
|
|
1034
|
+
cacheReadTokens: state.cacheReadTokens,
|
|
1035
|
+
cacheWriteTokens: 0
|
|
1014
1036
|
};
|
|
1015
1037
|
let stopReason = "end_turn";
|
|
1016
1038
|
if (state.status === "completed") {
|
|
@@ -1187,15 +1209,21 @@ function createResponsesLLMHandler() {
|
|
|
1187
1209
|
}
|
|
1188
1210
|
|
|
1189
1211
|
// src/providers/xai/transform.messages.ts
|
|
1212
|
+
function normalizeSystem3(system) {
|
|
1213
|
+
if (!system) return void 0;
|
|
1214
|
+
if (typeof system === "string") return system;
|
|
1215
|
+
return system.map((block) => block.text ?? "").filter((text) => text.length > 0).join("\n\n");
|
|
1216
|
+
}
|
|
1190
1217
|
function transformRequest3(request, modelId) {
|
|
1191
1218
|
const params = request.params ?? {};
|
|
1219
|
+
const normalizedSystem = normalizeSystem3(request.system);
|
|
1192
1220
|
const xaiRequest = {
|
|
1193
1221
|
...params,
|
|
1194
1222
|
model: modelId,
|
|
1195
1223
|
messages: request.messages.map(transformMessage3)
|
|
1196
1224
|
};
|
|
1197
|
-
if (
|
|
1198
|
-
xaiRequest.system =
|
|
1225
|
+
if (normalizedSystem) {
|
|
1226
|
+
xaiRequest.system = normalizedSystem;
|
|
1199
1227
|
}
|
|
1200
1228
|
if (request.tools && request.tools.length > 0) {
|
|
1201
1229
|
xaiRequest.tools = request.tools.map(transformTool3);
|
|
@@ -1351,7 +1379,9 @@ function transformResponse3(data) {
|
|
|
1351
1379
|
const usage = {
|
|
1352
1380
|
inputTokens: data.usage.input_tokens,
|
|
1353
1381
|
outputTokens: data.usage.output_tokens,
|
|
1354
|
-
totalTokens: data.usage.input_tokens + data.usage.output_tokens
|
|
1382
|
+
totalTokens: data.usage.input_tokens + data.usage.output_tokens,
|
|
1383
|
+
cacheReadTokens: data.usage.cache_read_input_tokens ?? 0,
|
|
1384
|
+
cacheWriteTokens: data.usage.cache_creation_input_tokens ?? 0
|
|
1355
1385
|
};
|
|
1356
1386
|
return {
|
|
1357
1387
|
message,
|
|
@@ -1368,6 +1398,8 @@ function createStreamState3() {
|
|
|
1368
1398
|
stopReason: null,
|
|
1369
1399
|
inputTokens: 0,
|
|
1370
1400
|
outputTokens: 0,
|
|
1401
|
+
cacheReadTokens: 0,
|
|
1402
|
+
cacheWriteTokens: 0,
|
|
1371
1403
|
currentIndex: 0
|
|
1372
1404
|
};
|
|
1373
1405
|
}
|
|
@@ -1377,6 +1409,8 @@ function transformStreamEvent3(event, state) {
|
|
|
1377
1409
|
state.messageId = event.message.id;
|
|
1378
1410
|
state.model = event.message.model;
|
|
1379
1411
|
state.inputTokens = event.message.usage.input_tokens;
|
|
1412
|
+
state.cacheReadTokens = event.message.usage.cache_read_input_tokens ?? 0;
|
|
1413
|
+
state.cacheWriteTokens = event.message.usage.cache_creation_input_tokens ?? 0;
|
|
1380
1414
|
return { type: "message_start", index: 0, delta: {} };
|
|
1381
1415
|
case "content_block_start":
|
|
1382
1416
|
state.currentIndex = event.index;
|
|
@@ -1485,7 +1519,9 @@ function buildResponseFromState3(state) {
|
|
|
1485
1519
|
const usage = {
|
|
1486
1520
|
inputTokens: state.inputTokens,
|
|
1487
1521
|
outputTokens: state.outputTokens,
|
|
1488
|
-
totalTokens: state.inputTokens + state.outputTokens
|
|
1522
|
+
totalTokens: state.inputTokens + state.outputTokens,
|
|
1523
|
+
cacheReadTokens: state.cacheReadTokens,
|
|
1524
|
+
cacheWriteTokens: state.cacheWriteTokens
|
|
1489
1525
|
};
|
|
1490
1526
|
return {
|
|
1491
1527
|
message,
|