@providerprotocol/ai 0.0.22 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +188 -6
- package/dist/anthropic/index.d.ts +1 -1
- package/dist/anthropic/index.js +30 -25
- package/dist/anthropic/index.js.map +1 -1
- package/dist/{chunk-7WYBJPJJ.js → chunk-55X3W2MN.js} +4 -3
- package/dist/chunk-55X3W2MN.js.map +1 -0
- package/dist/chunk-73IIE3QT.js +120 -0
- package/dist/chunk-73IIE3QT.js.map +1 -0
- package/dist/{chunk-M4BMM5IB.js → chunk-MF5ETY5O.js} +13 -4
- package/dist/chunk-MF5ETY5O.js.map +1 -0
- package/dist/{chunk-RFWLEFAB.js → chunk-QNJO7DSD.js} +61 -16
- package/dist/chunk-QNJO7DSD.js.map +1 -0
- package/dist/{chunk-RS7C25LS.js → chunk-SBCATNHA.js} +9 -5
- package/dist/chunk-SBCATNHA.js.map +1 -0
- package/dist/{chunk-I2VHCGQE.js → chunk-Z6DKC37J.js} +6 -5
- package/dist/chunk-Z6DKC37J.js.map +1 -0
- package/dist/google/index.d.ts +3 -2
- package/dist/google/index.js +38 -33
- package/dist/google/index.js.map +1 -1
- package/dist/http/index.d.ts +2 -2
- package/dist/http/index.js +3 -3
- package/dist/index.d.ts +8 -6
- package/dist/index.js +81 -121
- package/dist/index.js.map +1 -1
- package/dist/ollama/index.d.ts +5 -2
- package/dist/ollama/index.js +34 -29
- package/dist/ollama/index.js.map +1 -1
- package/dist/openai/index.d.ts +1 -1
- package/dist/openai/index.js +58 -53
- package/dist/openai/index.js.map +1 -1
- package/dist/openrouter/index.d.ts +1 -1
- package/dist/openrouter/index.js +57 -52
- package/dist/openrouter/index.js.map +1 -1
- package/dist/{provider-DWEAzeM5.d.ts → provider-DR1yins0.d.ts} +148 -52
- package/dist/proxy/index.d.ts +2 -2
- package/dist/proxy/index.js +11 -9
- package/dist/proxy/index.js.map +1 -1
- package/dist/{retry-DmPmqZL6.d.ts → retry-DJiqAslw.d.ts} +1 -1
- package/dist/{stream-DbkLOIbJ.d.ts → stream-BuTrqt_j.d.ts} +90 -38
- package/dist/xai/index.d.ts +1 -1
- package/dist/xai/index.js +71 -66
- package/dist/xai/index.js.map +1 -1
- package/package.json +1 -1
- package/dist/chunk-7WYBJPJJ.js.map +0 -1
- package/dist/chunk-I2VHCGQE.js.map +0 -1
- package/dist/chunk-M4BMM5IB.js.map +0 -1
- package/dist/chunk-RFWLEFAB.js.map +0 -1
- package/dist/chunk-RS7C25LS.js.map +0 -1
package/dist/openai/index.js
CHANGED
|
@@ -3,7 +3,10 @@ import {
|
|
|
3
3
|
} from "../chunk-WAKD3OO5.js";
|
|
4
4
|
import {
|
|
5
5
|
parseJsonResponse
|
|
6
|
-
} from "../chunk-
|
|
6
|
+
} from "../chunk-Z6DKC37J.js";
|
|
7
|
+
import {
|
|
8
|
+
StreamEventType
|
|
9
|
+
} from "../chunk-73IIE3QT.js";
|
|
7
10
|
import {
|
|
8
11
|
AssistantMessage,
|
|
9
12
|
createProvider,
|
|
@@ -11,20 +14,22 @@ import {
|
|
|
11
14
|
isAssistantMessage,
|
|
12
15
|
isToolResultMessage,
|
|
13
16
|
isUserMessage
|
|
14
|
-
} from "../chunk-
|
|
17
|
+
} from "../chunk-MF5ETY5O.js";
|
|
15
18
|
import {
|
|
16
19
|
parseSSEStream
|
|
17
20
|
} from "../chunk-NWS5IKNR.js";
|
|
18
21
|
import {
|
|
19
22
|
resolveApiKey
|
|
20
|
-
} from "../chunk-
|
|
23
|
+
} from "../chunk-55X3W2MN.js";
|
|
21
24
|
import {
|
|
25
|
+
ErrorCode,
|
|
26
|
+
ModalityType,
|
|
22
27
|
UPPError,
|
|
23
28
|
doFetch,
|
|
24
29
|
doStreamFetch,
|
|
25
30
|
normalizeHttpError,
|
|
26
31
|
toError
|
|
27
|
-
} from "../chunk-
|
|
32
|
+
} from "../chunk-QNJO7DSD.js";
|
|
28
33
|
|
|
29
34
|
// src/providers/openai/transform.completions.ts
|
|
30
35
|
function transformRequest(request, modelId) {
|
|
@@ -65,9 +70,9 @@ function normalizeSystem(system) {
|
|
|
65
70
|
if (!Array.isArray(system)) {
|
|
66
71
|
throw new UPPError(
|
|
67
72
|
"System prompt must be a string or an array of text blocks",
|
|
68
|
-
|
|
73
|
+
ErrorCode.InvalidRequest,
|
|
69
74
|
"openai",
|
|
70
|
-
|
|
75
|
+
ModalityType.LLM
|
|
71
76
|
);
|
|
72
77
|
}
|
|
73
78
|
const texts = [];
|
|
@@ -75,18 +80,18 @@ function normalizeSystem(system) {
|
|
|
75
80
|
if (!block || typeof block !== "object" || !("text" in block)) {
|
|
76
81
|
throw new UPPError(
|
|
77
82
|
"System prompt array must contain objects with a text field",
|
|
78
|
-
|
|
83
|
+
ErrorCode.InvalidRequest,
|
|
79
84
|
"openai",
|
|
80
|
-
|
|
85
|
+
ModalityType.LLM
|
|
81
86
|
);
|
|
82
87
|
}
|
|
83
88
|
const textValue = block.text;
|
|
84
89
|
if (typeof textValue !== "string") {
|
|
85
90
|
throw new UPPError(
|
|
86
91
|
"System prompt text must be a string",
|
|
87
|
-
|
|
92
|
+
ErrorCode.InvalidRequest,
|
|
88
93
|
"openai",
|
|
89
|
-
|
|
94
|
+
ModalityType.LLM
|
|
90
95
|
);
|
|
91
96
|
}
|
|
92
97
|
if (textValue.length > 0) {
|
|
@@ -321,7 +326,7 @@ function transformStreamEvent(chunk, state) {
|
|
|
321
326
|
const events = [];
|
|
322
327
|
if (chunk.id && !state.id) {
|
|
323
328
|
state.id = chunk.id;
|
|
324
|
-
events.push({ type:
|
|
329
|
+
events.push({ type: StreamEventType.MessageStart, index: 0, delta: {} });
|
|
325
330
|
}
|
|
326
331
|
if (chunk.model) {
|
|
327
332
|
state.model = chunk.model;
|
|
@@ -331,7 +336,7 @@ function transformStreamEvent(chunk, state) {
|
|
|
331
336
|
if (choice.delta.content) {
|
|
332
337
|
state.text += choice.delta.content;
|
|
333
338
|
events.push({
|
|
334
|
-
type:
|
|
339
|
+
type: StreamEventType.TextDelta,
|
|
335
340
|
index: 0,
|
|
336
341
|
delta: { text: choice.delta.content }
|
|
337
342
|
});
|
|
@@ -340,7 +345,7 @@ function transformStreamEvent(chunk, state) {
|
|
|
340
345
|
state.hadRefusal = true;
|
|
341
346
|
state.text += choice.delta.refusal;
|
|
342
347
|
events.push({
|
|
343
|
-
type:
|
|
348
|
+
type: StreamEventType.TextDelta,
|
|
344
349
|
index: 0,
|
|
345
350
|
delta: { text: choice.delta.refusal }
|
|
346
351
|
});
|
|
@@ -362,7 +367,7 @@ function transformStreamEvent(chunk, state) {
|
|
|
362
367
|
if (toolCallDelta.function?.arguments) {
|
|
363
368
|
toolCall.arguments += toolCallDelta.function.arguments;
|
|
364
369
|
events.push({
|
|
365
|
-
type:
|
|
370
|
+
type: StreamEventType.ToolCallDelta,
|
|
366
371
|
index,
|
|
367
372
|
delta: {
|
|
368
373
|
toolCallId: toolCall.id,
|
|
@@ -375,7 +380,7 @@ function transformStreamEvent(chunk, state) {
|
|
|
375
380
|
}
|
|
376
381
|
if (choice.finish_reason) {
|
|
377
382
|
state.finishReason = choice.finish_reason;
|
|
378
|
-
events.push({ type:
|
|
383
|
+
events.push({ type: StreamEventType.MessageStop, index: 0, delta: {} });
|
|
379
384
|
}
|
|
380
385
|
}
|
|
381
386
|
if (chunk.usage) {
|
|
@@ -477,9 +482,9 @@ function createCompletionsLLMHandler() {
|
|
|
477
482
|
if (!providerRef) {
|
|
478
483
|
throw new UPPError(
|
|
479
484
|
"Provider reference not set. Handler must be used with createProvider() or have _setProvider called.",
|
|
480
|
-
|
|
485
|
+
ErrorCode.InvalidRequest,
|
|
481
486
|
"openai",
|
|
482
|
-
|
|
487
|
+
ModalityType.LLM
|
|
483
488
|
);
|
|
484
489
|
}
|
|
485
490
|
const model = {
|
|
@@ -575,9 +580,9 @@ function createCompletionsLLMHandler() {
|
|
|
575
580
|
if (!response.body) {
|
|
576
581
|
const error = new UPPError(
|
|
577
582
|
"No response body for streaming request",
|
|
578
|
-
|
|
583
|
+
ErrorCode.ProviderError,
|
|
579
584
|
"openai",
|
|
580
|
-
|
|
585
|
+
ModalityType.LLM
|
|
581
586
|
);
|
|
582
587
|
responseReject(error);
|
|
583
588
|
throw error;
|
|
@@ -592,9 +597,9 @@ function createCompletionsLLMHandler() {
|
|
|
592
597
|
const errorData = chunk.error;
|
|
593
598
|
const error = new UPPError(
|
|
594
599
|
errorData.message ?? "Unknown error",
|
|
595
|
-
|
|
600
|
+
ErrorCode.ProviderError,
|
|
596
601
|
"openai",
|
|
597
|
-
|
|
602
|
+
ModalityType.LLM
|
|
598
603
|
);
|
|
599
604
|
responseReject(error);
|
|
600
605
|
throw error;
|
|
@@ -670,9 +675,9 @@ function normalizeSystem2(system) {
|
|
|
670
675
|
if (!Array.isArray(system)) {
|
|
671
676
|
throw new UPPError(
|
|
672
677
|
"System prompt must be a string or an array of text blocks",
|
|
673
|
-
|
|
678
|
+
ErrorCode.InvalidRequest,
|
|
674
679
|
"openai",
|
|
675
|
-
|
|
680
|
+
ModalityType.LLM
|
|
676
681
|
);
|
|
677
682
|
}
|
|
678
683
|
const texts = [];
|
|
@@ -680,18 +685,18 @@ function normalizeSystem2(system) {
|
|
|
680
685
|
if (!block || typeof block !== "object" || !("text" in block)) {
|
|
681
686
|
throw new UPPError(
|
|
682
687
|
"System prompt array must contain objects with a text field",
|
|
683
|
-
|
|
688
|
+
ErrorCode.InvalidRequest,
|
|
684
689
|
"openai",
|
|
685
|
-
|
|
690
|
+
ModalityType.LLM
|
|
686
691
|
);
|
|
687
692
|
}
|
|
688
693
|
const textValue = block.text;
|
|
689
694
|
if (typeof textValue !== "string") {
|
|
690
695
|
throw new UPPError(
|
|
691
696
|
"System prompt text must be a string",
|
|
692
|
-
|
|
697
|
+
ErrorCode.InvalidRequest,
|
|
693
698
|
"openai",
|
|
694
|
-
|
|
699
|
+
ModalityType.LLM
|
|
695
700
|
);
|
|
696
701
|
}
|
|
697
702
|
if (textValue.length > 0) {
|
|
@@ -974,18 +979,18 @@ function transformStreamEvent2(event, state) {
|
|
|
974
979
|
switch (event.type) {
|
|
975
980
|
case "response.created":
|
|
976
981
|
updateFromResponse(event.response);
|
|
977
|
-
events.push({ type:
|
|
982
|
+
events.push({ type: StreamEventType.MessageStart, index: 0, delta: {} });
|
|
978
983
|
break;
|
|
979
984
|
case "response.in_progress":
|
|
980
985
|
updateFromResponse(event.response);
|
|
981
986
|
break;
|
|
982
987
|
case "response.completed":
|
|
983
988
|
updateFromResponse(event.response);
|
|
984
|
-
events.push({ type:
|
|
989
|
+
events.push({ type: StreamEventType.MessageStop, index: 0, delta: {} });
|
|
985
990
|
break;
|
|
986
991
|
case "response.failed":
|
|
987
992
|
updateFromResponse(event.response);
|
|
988
|
-
events.push({ type:
|
|
993
|
+
events.push({ type: StreamEventType.MessageStop, index: 0, delta: {} });
|
|
989
994
|
break;
|
|
990
995
|
case "response.output_item.added":
|
|
991
996
|
if (event.item.type === "function_call") {
|
|
@@ -1002,7 +1007,7 @@ function transformStreamEvent2(event, state) {
|
|
|
1002
1007
|
state.toolCalls.set(event.output_index, existing);
|
|
1003
1008
|
}
|
|
1004
1009
|
events.push({
|
|
1005
|
-
type:
|
|
1010
|
+
type: StreamEventType.ContentBlockStart,
|
|
1006
1011
|
index: event.output_index,
|
|
1007
1012
|
delta: {}
|
|
1008
1013
|
});
|
|
@@ -1030,7 +1035,7 @@ function transformStreamEvent2(event, state) {
|
|
|
1030
1035
|
}
|
|
1031
1036
|
}
|
|
1032
1037
|
events.push({
|
|
1033
|
-
type:
|
|
1038
|
+
type: StreamEventType.ContentBlockStop,
|
|
1034
1039
|
index: event.output_index,
|
|
1035
1040
|
delta: {}
|
|
1036
1041
|
});
|
|
@@ -1039,7 +1044,7 @@ function transformStreamEvent2(event, state) {
|
|
|
1039
1044
|
const currentText = state.textByIndex.get(event.output_index) ?? "";
|
|
1040
1045
|
state.textByIndex.set(event.output_index, currentText + event.delta);
|
|
1041
1046
|
events.push({
|
|
1042
|
-
type:
|
|
1047
|
+
type: StreamEventType.TextDelta,
|
|
1043
1048
|
index: event.output_index,
|
|
1044
1049
|
delta: { text: event.delta }
|
|
1045
1050
|
});
|
|
@@ -1053,7 +1058,7 @@ function transformStreamEvent2(event, state) {
|
|
|
1053
1058
|
const currentRefusal = state.textByIndex.get(event.output_index) ?? "";
|
|
1054
1059
|
state.textByIndex.set(event.output_index, currentRefusal + event.delta);
|
|
1055
1060
|
events.push({
|
|
1056
|
-
type:
|
|
1061
|
+
type: StreamEventType.TextDelta,
|
|
1057
1062
|
index: event.output_index,
|
|
1058
1063
|
delta: { text: event.delta }
|
|
1059
1064
|
});
|
|
@@ -1077,7 +1082,7 @@ function transformStreamEvent2(event, state) {
|
|
|
1077
1082
|
}
|
|
1078
1083
|
toolCall.arguments += event.delta;
|
|
1079
1084
|
events.push({
|
|
1080
|
-
type:
|
|
1085
|
+
type: StreamEventType.ToolCallDelta,
|
|
1081
1086
|
index: event.output_index,
|
|
1082
1087
|
delta: {
|
|
1083
1088
|
toolCallId: toolCall.callId ?? toolCall.itemId ?? "",
|
|
@@ -1229,9 +1234,9 @@ function createResponsesLLMHandler() {
|
|
|
1229
1234
|
if (!providerRef) {
|
|
1230
1235
|
throw new UPPError(
|
|
1231
1236
|
"Provider reference not set. Handler must be used with createProvider() or have _setProvider called.",
|
|
1232
|
-
|
|
1237
|
+
ErrorCode.InvalidRequest,
|
|
1233
1238
|
"openai",
|
|
1234
|
-
|
|
1239
|
+
ModalityType.LLM
|
|
1235
1240
|
);
|
|
1236
1241
|
}
|
|
1237
1242
|
const model = {
|
|
@@ -1276,9 +1281,9 @@ function createResponsesLLMHandler() {
|
|
|
1276
1281
|
if (data.status === "failed" && data.error) {
|
|
1277
1282
|
throw new UPPError(
|
|
1278
1283
|
data.error.message,
|
|
1279
|
-
|
|
1284
|
+
ErrorCode.ProviderError,
|
|
1280
1285
|
"openai",
|
|
1281
|
-
|
|
1286
|
+
ModalityType.LLM
|
|
1282
1287
|
);
|
|
1283
1288
|
}
|
|
1284
1289
|
return transformResponse2(data);
|
|
@@ -1334,9 +1339,9 @@ function createResponsesLLMHandler() {
|
|
|
1334
1339
|
if (!response.body) {
|
|
1335
1340
|
const error = new UPPError(
|
|
1336
1341
|
"No response body for streaming request",
|
|
1337
|
-
|
|
1342
|
+
ErrorCode.ProviderError,
|
|
1338
1343
|
"openai",
|
|
1339
|
-
|
|
1344
|
+
ModalityType.LLM
|
|
1340
1345
|
);
|
|
1341
1346
|
responseReject(error);
|
|
1342
1347
|
throw error;
|
|
@@ -1351,9 +1356,9 @@ function createResponsesLLMHandler() {
|
|
|
1351
1356
|
const errorEvent = event;
|
|
1352
1357
|
const error = new UPPError(
|
|
1353
1358
|
errorEvent.error.message,
|
|
1354
|
-
|
|
1359
|
+
ErrorCode.ProviderError,
|
|
1355
1360
|
"openai",
|
|
1356
|
-
|
|
1361
|
+
ModalityType.LLM
|
|
1357
1362
|
);
|
|
1358
1363
|
responseReject(error);
|
|
1359
1364
|
throw error;
|
|
@@ -1406,9 +1411,9 @@ function createEmbeddingHandler() {
|
|
|
1406
1411
|
if (!providerRef) {
|
|
1407
1412
|
throw new UPPError(
|
|
1408
1413
|
"Provider reference not set. Handler must be used with createProvider().",
|
|
1409
|
-
|
|
1414
|
+
ErrorCode.InvalidRequest,
|
|
1410
1415
|
"openai",
|
|
1411
|
-
|
|
1416
|
+
ModalityType.Embedding
|
|
1412
1417
|
);
|
|
1413
1418
|
}
|
|
1414
1419
|
const model = {
|
|
@@ -1436,9 +1441,9 @@ function createEmbeddingHandler() {
|
|
|
1436
1441
|
}
|
|
1437
1442
|
throw new UPPError(
|
|
1438
1443
|
"OpenAI embeddings only support text input",
|
|
1439
|
-
|
|
1444
|
+
ErrorCode.InvalidRequest,
|
|
1440
1445
|
"openai",
|
|
1441
|
-
|
|
1446
|
+
ModalityType.Embedding
|
|
1442
1447
|
);
|
|
1443
1448
|
});
|
|
1444
1449
|
const body = {
|
|
@@ -1501,9 +1506,9 @@ function createImageHandler() {
|
|
|
1501
1506
|
if (!providerRef) {
|
|
1502
1507
|
throw new UPPError(
|
|
1503
1508
|
"Provider reference not set. Handler must be used with createProvider().",
|
|
1504
|
-
|
|
1509
|
+
ErrorCode.InvalidRequest,
|
|
1505
1510
|
"openai",
|
|
1506
|
-
|
|
1511
|
+
ModalityType.Image
|
|
1507
1512
|
);
|
|
1508
1513
|
}
|
|
1509
1514
|
const capabilities = getCapabilities(modelId);
|
|
@@ -1653,9 +1658,9 @@ function executeStream(modelId, request) {
|
|
|
1653
1658
|
if (!reader) {
|
|
1654
1659
|
throw new UPPError(
|
|
1655
1660
|
"No response body for streaming",
|
|
1656
|
-
|
|
1661
|
+
ErrorCode.ProviderError,
|
|
1657
1662
|
"openai",
|
|
1658
|
-
|
|
1663
|
+
ModalityType.Image
|
|
1659
1664
|
);
|
|
1660
1665
|
}
|
|
1661
1666
|
const decoder = new TextDecoder();
|
|
@@ -1772,9 +1777,9 @@ function transformResponse3(data) {
|
|
|
1772
1777
|
} else {
|
|
1773
1778
|
throw new UPPError(
|
|
1774
1779
|
"No image data in response",
|
|
1775
|
-
|
|
1780
|
+
ErrorCode.ProviderError,
|
|
1776
1781
|
"openai",
|
|
1777
|
-
|
|
1782
|
+
ModalityType.Image
|
|
1778
1783
|
);
|
|
1779
1784
|
}
|
|
1780
1785
|
return {
|