@llmops/gateway 0.4.2 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +25 -9
- package/dist/index.mjs +25 -9
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -10375,7 +10375,7 @@ const retryRequest = async (url, options, retryCount, statusCodesToRetry, timeou
|
|
|
10375
10375
|
|
|
10376
10376
|
//#endregion
|
|
10377
10377
|
//#region package.json
|
|
10378
|
-
var version = "0.4.
|
|
10378
|
+
var version = "0.4.3";
|
|
10379
10379
|
|
|
10380
10380
|
//#endregion
|
|
10381
10381
|
//#region src/providers/bytez/api.ts
|
|
@@ -17247,7 +17247,7 @@ const transformToProviderRequestJSON = (provider, params, fn, providerOptions) =
|
|
|
17247
17247
|
providerOptions
|
|
17248
17248
|
})[fn];
|
|
17249
17249
|
else providerConfig = providerConfig[fn];
|
|
17250
|
-
if (!providerConfig) throw new GatewayError(
|
|
17250
|
+
if (!providerConfig) throw new GatewayError(`The endpoint '${fn}' is not supported by the '${provider}' provider`, 501);
|
|
17251
17251
|
return transformUsingProviderConfig(providerConfig, params, providerOptions);
|
|
17252
17252
|
};
|
|
17253
17253
|
const transformToProviderRequestFormData = (provider, params, fn, providerOptions) => {
|
|
@@ -17562,13 +17562,13 @@ const BedrockUploadFileResponseTransform = (response) => {
|
|
|
17562
17562
|
//#endregion
|
|
17563
17563
|
//#region src/providers/bedrock/listfiles.ts
|
|
17564
17564
|
const BedrockListFilesResponseTransform = () => {
|
|
17565
|
-
throw new GatewayError(`listFiles is not supported by Bedrock
|
|
17565
|
+
throw new GatewayError(`listFiles is not supported by Bedrock`, 501);
|
|
17566
17566
|
};
|
|
17567
17567
|
|
|
17568
17568
|
//#endregion
|
|
17569
17569
|
//#region src/providers/bedrock/deleteFile.ts
|
|
17570
17570
|
const BedrockDeleteFileResponseTransform = () => {
|
|
17571
|
-
throw new GatewayError(`deleteFile is not supported by Bedrock
|
|
17571
|
+
throw new GatewayError(`deleteFile is not supported by Bedrock`, 501);
|
|
17572
17572
|
};
|
|
17573
17573
|
|
|
17574
17574
|
//#endregion
|
|
@@ -21630,7 +21630,7 @@ const GoogleListFilesRequestHandler = async () => {
|
|
|
21630
21630
|
status: "failure",
|
|
21631
21631
|
provider: GOOGLE_VERTEX_AI
|
|
21632
21632
|
}), {
|
|
21633
|
-
status:
|
|
21633
|
+
status: 501,
|
|
21634
21634
|
headers: { "Content-Type": "application/json" }
|
|
21635
21635
|
});
|
|
21636
21636
|
};
|
|
@@ -26667,7 +26667,11 @@ const AzureAIInferenceAPI = {
|
|
|
26667
26667
|
retrieveBatch: path$2,
|
|
26668
26668
|
cancelBatch: path$2,
|
|
26669
26669
|
getBatchOutput: path$2,
|
|
26670
|
-
createBatch: path$2
|
|
26670
|
+
createBatch: path$2,
|
|
26671
|
+
createModelResponse: "/responses",
|
|
26672
|
+
getModelResponse: path$2,
|
|
26673
|
+
deleteModelResponse: path$2,
|
|
26674
|
+
listResponseInputItems: path$2
|
|
26671
26675
|
};
|
|
26672
26676
|
const isGithub = providerOptions.provider === GITHUB;
|
|
26673
26677
|
if (fn === "proxy" && urlToFetch) {
|
|
@@ -26695,7 +26699,11 @@ const AzureAIInferenceAPI = {
|
|
|
26695
26699
|
case "retrieveFile":
|
|
26696
26700
|
case "listFiles":
|
|
26697
26701
|
case "deleteFile":
|
|
26698
|
-
case "retrieveFileContent":
|
|
26702
|
+
case "retrieveFileContent":
|
|
26703
|
+
case "createModelResponse":
|
|
26704
|
+
case "getModelResponse":
|
|
26705
|
+
case "deleteModelResponse":
|
|
26706
|
+
case "listResponseInputItems": return `${ENDPOINT_MAPPING[mappedFn]}?${searchParamsString}`;
|
|
26699
26707
|
default: return "";
|
|
26700
26708
|
}
|
|
26701
26709
|
}
|
|
@@ -26947,6 +26955,10 @@ const AzureAIInferenceAPIConfig = {
|
|
|
26947
26955
|
cancelBatch: {},
|
|
26948
26956
|
createBatch: AzureOpenAICreateBatchConfig,
|
|
26949
26957
|
cancelFinetune: {},
|
|
26958
|
+
createModelResponse: createModelResponseParams([]),
|
|
26959
|
+
getModelResponse: {},
|
|
26960
|
+
deleteModelResponse: {},
|
|
26961
|
+
listModelsResponse: {},
|
|
26950
26962
|
requestHandlers: { getBatchOutput: AzureAIInferenceGetBatchOutputRequestHandler },
|
|
26951
26963
|
requestTransforms: { uploadFile: OpenAIFileUploadRequestTransform },
|
|
26952
26964
|
responseTransforms: {
|
|
@@ -26968,7 +26980,11 @@ const AzureAIInferenceAPIConfig = {
|
|
|
26968
26980
|
listFiles: AzureAIInferenceResponseTransform,
|
|
26969
26981
|
retrieveFile: AzureAIInferenceResponseTransform,
|
|
26970
26982
|
deleteFile: AzureAIInferenceResponseTransform,
|
|
26971
|
-
retrieveFileContent: AzureAIInferenceResponseTransform
|
|
26983
|
+
retrieveFileContent: AzureAIInferenceResponseTransform,
|
|
26984
|
+
createModelResponse: OpenAICreateModelResponseTransformer(AZURE_AI_INFERENCE),
|
|
26985
|
+
getModelResponse: OpenAIGetModelResponseTransformer(AZURE_AI_INFERENCE),
|
|
26986
|
+
deleteModelResponse: OpenAIDeleteModelResponseTransformer(AZURE_AI_INFERENCE),
|
|
26987
|
+
listModelsResponse: OpenAIListInputItemsResponseTransformer(AZURE_AI_INFERENCE)
|
|
26972
26988
|
}
|
|
26973
26989
|
};
|
|
26974
26990
|
}
|
|
@@ -30005,7 +30021,7 @@ function handleStreamingMode(response, proxyProvider, responseTransformer, reque
|
|
|
30005
30021
|
const { readable, writable } = new TransformStream();
|
|
30006
30022
|
const writer = writable.getWriter();
|
|
30007
30023
|
const reader = response.body.getReader();
|
|
30008
|
-
const isSleepTimeRequired = proxyProvider === AZURE_OPEN_AI
|
|
30024
|
+
const isSleepTimeRequired = proxyProvider === AZURE_OPEN_AI || proxyProvider === AZURE_AI_INFERENCE;
|
|
30009
30025
|
const encoder$2 = new TextEncoder();
|
|
30010
30026
|
if (proxyProvider === BEDROCK) (async () => {
|
|
30011
30027
|
try {
|
package/dist/index.mjs
CHANGED
|
@@ -10348,7 +10348,7 @@ const retryRequest = async (url, options, retryCount, statusCodesToRetry, timeou
|
|
|
10348
10348
|
|
|
10349
10349
|
//#endregion
|
|
10350
10350
|
//#region package.json
|
|
10351
|
-
var version = "0.4.
|
|
10351
|
+
var version = "0.4.3";
|
|
10352
10352
|
|
|
10353
10353
|
//#endregion
|
|
10354
10354
|
//#region src/providers/bytez/api.ts
|
|
@@ -17220,7 +17220,7 @@ const transformToProviderRequestJSON = (provider, params, fn, providerOptions) =
|
|
|
17220
17220
|
providerOptions
|
|
17221
17221
|
})[fn];
|
|
17222
17222
|
else providerConfig = providerConfig[fn];
|
|
17223
|
-
if (!providerConfig) throw new GatewayError(
|
|
17223
|
+
if (!providerConfig) throw new GatewayError(`The endpoint '${fn}' is not supported by the '${provider}' provider`, 501);
|
|
17224
17224
|
return transformUsingProviderConfig(providerConfig, params, providerOptions);
|
|
17225
17225
|
};
|
|
17226
17226
|
const transformToProviderRequestFormData = (provider, params, fn, providerOptions) => {
|
|
@@ -17535,13 +17535,13 @@ const BedrockUploadFileResponseTransform = (response) => {
|
|
|
17535
17535
|
//#endregion
|
|
17536
17536
|
//#region src/providers/bedrock/listfiles.ts
|
|
17537
17537
|
const BedrockListFilesResponseTransform = () => {
|
|
17538
|
-
throw new GatewayError(`listFiles is not supported by Bedrock
|
|
17538
|
+
throw new GatewayError(`listFiles is not supported by Bedrock`, 501);
|
|
17539
17539
|
};
|
|
17540
17540
|
|
|
17541
17541
|
//#endregion
|
|
17542
17542
|
//#region src/providers/bedrock/deleteFile.ts
|
|
17543
17543
|
const BedrockDeleteFileResponseTransform = () => {
|
|
17544
|
-
throw new GatewayError(`deleteFile is not supported by Bedrock
|
|
17544
|
+
throw new GatewayError(`deleteFile is not supported by Bedrock`, 501);
|
|
17545
17545
|
};
|
|
17546
17546
|
|
|
17547
17547
|
//#endregion
|
|
@@ -21603,7 +21603,7 @@ const GoogleListFilesRequestHandler = async () => {
|
|
|
21603
21603
|
status: "failure",
|
|
21604
21604
|
provider: GOOGLE_VERTEX_AI
|
|
21605
21605
|
}), {
|
|
21606
|
-
status:
|
|
21606
|
+
status: 501,
|
|
21607
21607
|
headers: { "Content-Type": "application/json" }
|
|
21608
21608
|
});
|
|
21609
21609
|
};
|
|
@@ -26640,7 +26640,11 @@ const AzureAIInferenceAPI = {
|
|
|
26640
26640
|
retrieveBatch: path$2,
|
|
26641
26641
|
cancelBatch: path$2,
|
|
26642
26642
|
getBatchOutput: path$2,
|
|
26643
|
-
createBatch: path$2
|
|
26643
|
+
createBatch: path$2,
|
|
26644
|
+
createModelResponse: "/responses",
|
|
26645
|
+
getModelResponse: path$2,
|
|
26646
|
+
deleteModelResponse: path$2,
|
|
26647
|
+
listResponseInputItems: path$2
|
|
26644
26648
|
};
|
|
26645
26649
|
const isGithub = providerOptions.provider === GITHUB;
|
|
26646
26650
|
if (fn === "proxy" && urlToFetch) {
|
|
@@ -26668,7 +26672,11 @@ const AzureAIInferenceAPI = {
|
|
|
26668
26672
|
case "retrieveFile":
|
|
26669
26673
|
case "listFiles":
|
|
26670
26674
|
case "deleteFile":
|
|
26671
|
-
case "retrieveFileContent":
|
|
26675
|
+
case "retrieveFileContent":
|
|
26676
|
+
case "createModelResponse":
|
|
26677
|
+
case "getModelResponse":
|
|
26678
|
+
case "deleteModelResponse":
|
|
26679
|
+
case "listResponseInputItems": return `${ENDPOINT_MAPPING[mappedFn]}?${searchParamsString}`;
|
|
26672
26680
|
default: return "";
|
|
26673
26681
|
}
|
|
26674
26682
|
}
|
|
@@ -26920,6 +26928,10 @@ const AzureAIInferenceAPIConfig = {
|
|
|
26920
26928
|
cancelBatch: {},
|
|
26921
26929
|
createBatch: AzureOpenAICreateBatchConfig,
|
|
26922
26930
|
cancelFinetune: {},
|
|
26931
|
+
createModelResponse: createModelResponseParams([]),
|
|
26932
|
+
getModelResponse: {},
|
|
26933
|
+
deleteModelResponse: {},
|
|
26934
|
+
listModelsResponse: {},
|
|
26923
26935
|
requestHandlers: { getBatchOutput: AzureAIInferenceGetBatchOutputRequestHandler },
|
|
26924
26936
|
requestTransforms: { uploadFile: OpenAIFileUploadRequestTransform },
|
|
26925
26937
|
responseTransforms: {
|
|
@@ -26941,7 +26953,11 @@ const AzureAIInferenceAPIConfig = {
|
|
|
26941
26953
|
listFiles: AzureAIInferenceResponseTransform,
|
|
26942
26954
|
retrieveFile: AzureAIInferenceResponseTransform,
|
|
26943
26955
|
deleteFile: AzureAIInferenceResponseTransform,
|
|
26944
|
-
retrieveFileContent: AzureAIInferenceResponseTransform
|
|
26956
|
+
retrieveFileContent: AzureAIInferenceResponseTransform,
|
|
26957
|
+
createModelResponse: OpenAICreateModelResponseTransformer(AZURE_AI_INFERENCE),
|
|
26958
|
+
getModelResponse: OpenAIGetModelResponseTransformer(AZURE_AI_INFERENCE),
|
|
26959
|
+
deleteModelResponse: OpenAIDeleteModelResponseTransformer(AZURE_AI_INFERENCE),
|
|
26960
|
+
listModelsResponse: OpenAIListInputItemsResponseTransformer(AZURE_AI_INFERENCE)
|
|
26945
26961
|
}
|
|
26946
26962
|
};
|
|
26947
26963
|
}
|
|
@@ -29978,7 +29994,7 @@ function handleStreamingMode(response, proxyProvider, responseTransformer, reque
|
|
|
29978
29994
|
const { readable, writable } = new TransformStream();
|
|
29979
29995
|
const writer = writable.getWriter();
|
|
29980
29996
|
const reader = response.body.getReader();
|
|
29981
|
-
const isSleepTimeRequired = proxyProvider === AZURE_OPEN_AI
|
|
29997
|
+
const isSleepTimeRequired = proxyProvider === AZURE_OPEN_AI || proxyProvider === AZURE_AI_INFERENCE;
|
|
29982
29998
|
const encoder$2 = new TextEncoder();
|
|
29983
29999
|
if (proxyProvider === BEDROCK) (async () => {
|
|
29984
30000
|
try {
|