190proof 1.0.1 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +3 -32
- package/dist/index.d.ts +3 -32
- package/dist/index.js +11 -17
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +11 -14
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -27034,12 +27034,6 @@ var GroqModel = /* @__PURE__ */ ((GroqModel2) => {
|
|
|
27034
27034
|
GroqModel2["LLAMA_3_70B_8192"] = "llama3-70b-8192";
|
|
27035
27035
|
return GroqModel2;
|
|
27036
27036
|
})(GroqModel || {});
|
|
27037
|
-
var Role = /* @__PURE__ */ ((Role2) => {
|
|
27038
|
-
Role2["User"] = "user";
|
|
27039
|
-
Role2["Assistant"] = "assistant";
|
|
27040
|
-
Role2["System"] = "system";
|
|
27041
|
-
return Role2;
|
|
27042
|
-
})(Role || {});
|
|
27043
27037
|
|
|
27044
27038
|
// ../node_modules/@aws-sdk/client-bedrock-runtime/dist-es/BedrockRuntimeClient.js
|
|
27045
27039
|
init_dist_es3();
|
|
@@ -31524,10 +31518,14 @@ async function callOpenAiWithRetries(identifier, openAiPayload, openAiConfig, re
|
|
|
31524
31518
|
async function callOpenAIStream(identifier, openAiPayload, openAiConfig, chunkTimeoutMs) {
|
|
31525
31519
|
const functionNames = openAiPayload.functions ? new Set(openAiPayload.functions.map((fn) => fn.name)) : null;
|
|
31526
31520
|
if (!openAiConfig) {
|
|
31521
|
+
const defaultOpenAIBaseUrl = (
|
|
31522
|
+
// TODO: Remove this one we have per-provider configs
|
|
31523
|
+
"https://gateway.ai.cloudflare.com/v1/932636fc124abb5171fd630afe668905/igpt"
|
|
31524
|
+
);
|
|
31527
31525
|
openAiConfig = {
|
|
31528
31526
|
service: "openai",
|
|
31529
31527
|
apiKey: process.env.OPENAI_API_KEY,
|
|
31530
|
-
baseUrl:
|
|
31528
|
+
baseUrl: defaultOpenAIBaseUrl
|
|
31531
31529
|
};
|
|
31532
31530
|
}
|
|
31533
31531
|
let response;
|
|
@@ -31556,7 +31554,7 @@ async function callOpenAIStream(identifier, openAiPayload, openAiConfig, chunkTi
|
|
|
31556
31554
|
});
|
|
31557
31555
|
} else {
|
|
31558
31556
|
console.log(identifier, "Using OpenAI service", openAiPayload.model);
|
|
31559
|
-
const endpoint = `${openAiConfig
|
|
31557
|
+
const endpoint = `${openAiConfig.baseUrl}/openai/chat/completions`;
|
|
31560
31558
|
if (openAiConfig.orgId) {
|
|
31561
31559
|
console.log(identifier, "Using orgId", openAiConfig.orgId);
|
|
31562
31560
|
}
|
|
@@ -31757,7 +31755,6 @@ async function callAnthropic(identifier, AiPayload, AiConfig) {
|
|
|
31757
31755
|
data = response2.data;
|
|
31758
31756
|
}
|
|
31759
31757
|
const answers = data.content;
|
|
31760
|
-
console.log("Anthropic API answers:", JSON.stringify({ answers }));
|
|
31761
31758
|
if (!answers[0]) {
|
|
31762
31759
|
console.error(identifier, "Missing answer in Anthropic API:", data);
|
|
31763
31760
|
throw new Error("Missing answer in Anthropic API");
|
|
@@ -31903,7 +31900,7 @@ function prepareOpenAIPayload(payload) {
|
|
|
31903
31900
|
model: payload.model,
|
|
31904
31901
|
messages: payload.messages.map((message) => ({
|
|
31905
31902
|
role: message.role,
|
|
31906
|
-
content: message.content
|
|
31903
|
+
content: normalizeMessageContent(message.content)
|
|
31907
31904
|
// TODO: Handle files
|
|
31908
31905
|
})),
|
|
31909
31906
|
functions: payload.functions
|
|
@@ -31918,7 +31915,7 @@ function prepareGroqPayload(payload) {
|
|
|
31918
31915
|
model: payload.model,
|
|
31919
31916
|
messages: payload.messages.map((message) => ({
|
|
31920
31917
|
role: message.role,
|
|
31921
|
-
content: message.content
|
|
31918
|
+
content: normalizeMessageContent(message.content)
|
|
31922
31919
|
})),
|
|
31923
31920
|
functions: (_a3 = payload.functions) == null ? void 0 : _a3.map((fn) => ({
|
|
31924
31921
|
type: "function",
|
|
@@ -31926,6 +31923,9 @@ function prepareGroqPayload(payload) {
|
|
|
31926
31923
|
}))
|
|
31927
31924
|
};
|
|
31928
31925
|
}
|
|
31926
|
+
function normalizeMessageContent(content) {
|
|
31927
|
+
return Array.isArray(content) ? content.map((c5) => c5.type === "text" ? c5.text : `[${c5.type}]`).join("\n") : content;
|
|
31928
|
+
}
|
|
31929
31929
|
async function callGroq(identifier, payload) {
|
|
31930
31930
|
const response = await axios_default.post(
|
|
31931
31931
|
"https://api.groq.com/openai/v1/chat/completions",
|
|
@@ -31989,9 +31989,6 @@ export {
|
|
|
31989
31989
|
ClaudeModel,
|
|
31990
31990
|
GPTModel,
|
|
31991
31991
|
GroqModel,
|
|
31992
|
-
Role,
|
|
31993
|
-
callAnthropicWithRetries,
|
|
31994
|
-
callOpenAiWithRetries,
|
|
31995
31992
|
callWithRetries
|
|
31996
31993
|
};
|
|
31997
31994
|
/*! Bundled license information:
|