190proof 1.0.48 → 1.0.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +3 -1
- package/dist/index.d.ts +3 -1
- package/dist/index.js +106 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +106 -7
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -27032,6 +27032,8 @@ var GPTModel = /* @__PURE__ */ ((GPTModel2) => {
|
|
|
27032
27032
|
GPTModel2["GPT4_0409"] = "gpt-4-turbo-2024-04-09";
|
|
27033
27033
|
GPTModel2["GPT4O"] = "gpt-4o";
|
|
27034
27034
|
GPTModel2["GPT4O_MINI"] = "gpt-4o-mini";
|
|
27035
|
+
GPTModel2["O1_PREVIEW"] = "o1-preview";
|
|
27036
|
+
GPTModel2["O1_MINI"] = "o1-mini";
|
|
27035
27037
|
return GPTModel2;
|
|
27036
27038
|
})(GPTModel || {});
|
|
27037
27039
|
var GroqModel = /* @__PURE__ */ ((GroqModel2) => {
|
|
@@ -31481,13 +31483,16 @@ async function callOpenAiWithRetries(identifier, openAiPayload, openAiConfig, re
|
|
|
31481
31483
|
for (let i5 = 0; i5 <= retries; i5++) {
|
|
31482
31484
|
try {
|
|
31483
31485
|
const timerId = `timer:${identifier}:${Date.now()}:callOpenAi:${openAiConfig == null ? void 0 : openAiConfig.service}-${openAiPayload.model}-${openAiConfig == null ? void 0 : openAiConfig.orgId}`;
|
|
31484
|
-
|
|
31485
|
-
identifier,
|
|
31486
|
-
|
|
31487
|
-
|
|
31488
|
-
|
|
31489
|
-
|
|
31490
|
-
|
|
31486
|
+
if (openAiPayload.model === "o1-mini" /* O1_MINI */ || openAiPayload.model === "o1-preview" /* O1_PREVIEW */) {
|
|
31487
|
+
return await callOpenAI(identifier, openAiPayload, openAiConfig);
|
|
31488
|
+
} else {
|
|
31489
|
+
return await callOpenAIStream(
|
|
31490
|
+
identifier,
|
|
31491
|
+
openAiPayload,
|
|
31492
|
+
openAiConfig,
|
|
31493
|
+
chunkTimeoutMs
|
|
31494
|
+
);
|
|
31495
|
+
}
|
|
31491
31496
|
} catch (error) {
|
|
31492
31497
|
console.error(error);
|
|
31493
31498
|
console.error(
|
|
@@ -31743,6 +31748,100 @@ async function callOpenAIStream(identifier, openAiPayload, openAiConfig, chunkTi
|
|
|
31743
31748
|
throw new Error("Stream error: no response body");
|
|
31744
31749
|
}
|
|
31745
31750
|
}
|
|
31751
|
+
async function callOpenAI(identifier, openAiPayload, openAiConfig) {
|
|
31752
|
+
const functionNames = openAiPayload.tools ? new Set(openAiPayload.tools.map((fn) => fn.function.name)) : null;
|
|
31753
|
+
if (!openAiConfig) {
|
|
31754
|
+
openAiConfig = {
|
|
31755
|
+
service: "openai",
|
|
31756
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
31757
|
+
baseUrl: ""
|
|
31758
|
+
};
|
|
31759
|
+
}
|
|
31760
|
+
let response;
|
|
31761
|
+
if (openAiConfig.service === "azure") {
|
|
31762
|
+
console.log(identifier, "Using Azure OpenAI service", openAiPayload.model);
|
|
31763
|
+
const model = openAiPayload.model;
|
|
31764
|
+
if (!openAiConfig.modelConfigMap) {
|
|
31765
|
+
throw new Error(
|
|
31766
|
+
"OpenAI config modelConfigMap is required when using Azure OpenAI service."
|
|
31767
|
+
);
|
|
31768
|
+
}
|
|
31769
|
+
const azureConfig = openAiConfig.modelConfigMap[model];
|
|
31770
|
+
let endpoint;
|
|
31771
|
+
if (azureConfig.endpoint) {
|
|
31772
|
+
endpoint = `${azureConfig.endpoint}/openai/deployments/${azureConfig.deployment}/chat/completions?api-version=${azureConfig.apiVersion}`;
|
|
31773
|
+
} else {
|
|
31774
|
+
throw new Error("Azure OpenAI endpoint is required in modelConfigMap.");
|
|
31775
|
+
}
|
|
31776
|
+
console.log(identifier, "Using endpoint", endpoint);
|
|
31777
|
+
try {
|
|
31778
|
+
const stringifiedPayload = JSON.stringify({
|
|
31779
|
+
...openAiPayload,
|
|
31780
|
+
stream: false
|
|
31781
|
+
});
|
|
31782
|
+
const parsedPayload = JSON.parse(stringifiedPayload);
|
|
31783
|
+
} catch (error) {
|
|
31784
|
+
console.error(
|
|
31785
|
+
identifier,
|
|
31786
|
+
"OpenAI JSON parsing error:",
|
|
31787
|
+
JSON.stringify(error)
|
|
31788
|
+
);
|
|
31789
|
+
throw error;
|
|
31790
|
+
}
|
|
31791
|
+
response = await fetch(endpoint, {
|
|
31792
|
+
method: "POST",
|
|
31793
|
+
headers: {
|
|
31794
|
+
"Content-Type": "application/json",
|
|
31795
|
+
"api-key": azureConfig.apiKey
|
|
31796
|
+
},
|
|
31797
|
+
body: JSON.stringify({
|
|
31798
|
+
...openAiPayload,
|
|
31799
|
+
stream: false
|
|
31800
|
+
})
|
|
31801
|
+
});
|
|
31802
|
+
} else {
|
|
31803
|
+
console.log(identifier, "Using OpenAI service", openAiPayload.model);
|
|
31804
|
+
const endpoint = `https://api.openai.com/v1/chat/completions`;
|
|
31805
|
+
if (openAiConfig.orgId) {
|
|
31806
|
+
console.log(identifier, "Using orgId", openAiConfig.orgId);
|
|
31807
|
+
}
|
|
31808
|
+
response = await fetch(endpoint, {
|
|
31809
|
+
method: "POST",
|
|
31810
|
+
headers: {
|
|
31811
|
+
"Content-Type": "application/json",
|
|
31812
|
+
Authorization: `Bearer ${openAiConfig.apiKey}`,
|
|
31813
|
+
...openAiConfig.orgId ? { "OpenAI-Organization": openAiConfig.orgId } : {}
|
|
31814
|
+
},
|
|
31815
|
+
body: JSON.stringify({
|
|
31816
|
+
...openAiPayload,
|
|
31817
|
+
stream: false
|
|
31818
|
+
})
|
|
31819
|
+
});
|
|
31820
|
+
}
|
|
31821
|
+
if (!response.ok) {
|
|
31822
|
+
const errorData = await response.json();
|
|
31823
|
+
console.error(identifier, "OpenAI API error:", JSON.stringify(errorData));
|
|
31824
|
+
throw new Error(`OpenAI API Error: ${errorData.error.message}`);
|
|
31825
|
+
}
|
|
31826
|
+
const data = await response.json();
|
|
31827
|
+
if (!data.choices || !data.choices.length) {
|
|
31828
|
+
if (data.error) {
|
|
31829
|
+
console.error(identifier, "OpenAI error:", JSON.stringify(data.error));
|
|
31830
|
+
throw new Error("OpenAI error: " + data.error.message);
|
|
31831
|
+
}
|
|
31832
|
+
throw new Error("OpenAI error: No choices returned.");
|
|
31833
|
+
}
|
|
31834
|
+
const choice = data.choices[0];
|
|
31835
|
+
const functionCall = choice.function_call ? {
|
|
31836
|
+
name: choice.function_call.name,
|
|
31837
|
+
arguments: JSON.parse(choice.function_call.arguments)
|
|
31838
|
+
} : null;
|
|
31839
|
+
return {
|
|
31840
|
+
role: "assistant",
|
|
31841
|
+
content: choice.message.content || null,
|
|
31842
|
+
function_call: functionCall
|
|
31843
|
+
};
|
|
31844
|
+
}
|
|
31746
31845
|
function truncatePayload(payload) {
|
|
31747
31846
|
return JSON.stringify(
|
|
31748
31847
|
{
|