190proof 1.0.22 → 1.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +5 -2
- package/dist/index.d.ts +5 -2
- package/dist/index.js +101 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +101 -4
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -1
package/dist/index.mjs
CHANGED
|
@@ -27035,6 +27035,10 @@ var GroqModel = /* @__PURE__ */ ((GroqModel2) => {
|
|
|
27035
27035
|
GroqModel2["LLAMA_3_70B_8192"] = "llama3-70b-8192";
|
|
27036
27036
|
return GroqModel2;
|
|
27037
27037
|
})(GroqModel || {});
|
|
27038
|
+
var GeminiModel = /* @__PURE__ */ ((GeminiModel2) => {
|
|
27039
|
+
GeminiModel2["GEMINI_15_PRO"] = "gemini-1.5-pro-latest";
|
|
27040
|
+
return GeminiModel2;
|
|
27041
|
+
})(GeminiModel || {});
|
|
27038
27042
|
|
|
27039
27043
|
// ../node_modules/@aws-sdk/client-bedrock-runtime/dist-es/BedrockRuntimeClient.js
|
|
27040
27044
|
init_dist_es3();
|
|
@@ -31425,6 +31429,7 @@ function isHeicImage(name, mime) {
|
|
|
31425
31429
|
}
|
|
31426
31430
|
|
|
31427
31431
|
// index.ts
|
|
31432
|
+
var { GoogleGenerativeAI } = __require("@google/generative-ai");
|
|
31428
31433
|
var sharp = __require("sharp");
|
|
31429
31434
|
var decode = __require("heic-decode");
|
|
31430
31435
|
function parseStreamedResponse(identifier, paragraph, functionCallName, functionCallArgs, allowedFunctionNames) {
|
|
@@ -31889,6 +31894,92 @@ Before answering you can reason about the instructions and answer using <thinkin
|
|
|
31889
31894
|
}
|
|
31890
31895
|
return jiggedMessages;
|
|
31891
31896
|
}
|
|
31897
|
+
async function prepareGoogleAIPayload(payload) {
|
|
31898
|
+
var _a3;
|
|
31899
|
+
const preparedPayload = {
|
|
31900
|
+
model: payload.model,
|
|
31901
|
+
messages: [],
|
|
31902
|
+
tools: payload.functions ? {
|
|
31903
|
+
functionDeclarations: payload.functions.map((fn) => ({
|
|
31904
|
+
name: fn.name,
|
|
31905
|
+
parameters: {
|
|
31906
|
+
// Google puts their description in the parameters object rather than in a top-level field
|
|
31907
|
+
description: fn.description,
|
|
31908
|
+
...fn.parameters
|
|
31909
|
+
}
|
|
31910
|
+
}))
|
|
31911
|
+
} : void 0
|
|
31912
|
+
};
|
|
31913
|
+
for (const message of payload.messages) {
|
|
31914
|
+
const googleAIContentParts = [];
|
|
31915
|
+
if (message.content) {
|
|
31916
|
+
googleAIContentParts.push({
|
|
31917
|
+
text: message.content
|
|
31918
|
+
});
|
|
31919
|
+
}
|
|
31920
|
+
for (const file of message.files || []) {
|
|
31921
|
+
if (!((_a3 = file.mimeType) == null ? void 0 : _a3.startsWith("image"))) {
|
|
31922
|
+
console.warn(
|
|
31923
|
+
"Google AI API does not support non-image file types. Skipping file."
|
|
31924
|
+
);
|
|
31925
|
+
continue;
|
|
31926
|
+
}
|
|
31927
|
+
if (file.url) {
|
|
31928
|
+
googleAIContentParts.push({
|
|
31929
|
+
inlineData: {
|
|
31930
|
+
mimeType: "image/png",
|
|
31931
|
+
data: await getNormalizedBase64PNG(file.url, file.mimeType)
|
|
31932
|
+
}
|
|
31933
|
+
});
|
|
31934
|
+
} else if (file.data) {
|
|
31935
|
+
if (!["image/png", "image/jpeg", "image/gif", "image/webp"].includes(
|
|
31936
|
+
file.mimeType
|
|
31937
|
+
)) {
|
|
31938
|
+
throw new Error(
|
|
31939
|
+
"Invalid image mimeType. Supported types are: image/png, image/jpeg, image/gif, image/webp"
|
|
31940
|
+
);
|
|
31941
|
+
}
|
|
31942
|
+
googleAIContentParts.push({
|
|
31943
|
+
inlineData: {
|
|
31944
|
+
mimeType: file.mimeType,
|
|
31945
|
+
data: file.data
|
|
31946
|
+
}
|
|
31947
|
+
});
|
|
31948
|
+
}
|
|
31949
|
+
}
|
|
31950
|
+
preparedPayload.messages.push({
|
|
31951
|
+
role: message.role === "user" ? "user" : "model",
|
|
31952
|
+
parts: googleAIContentParts
|
|
31953
|
+
});
|
|
31954
|
+
}
|
|
31955
|
+
return preparedPayload;
|
|
31956
|
+
}
|
|
31957
|
+
async function callGoogleAI(identifier, payload) {
|
|
31958
|
+
console.log(identifier, "Calling Google AI API");
|
|
31959
|
+
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY);
|
|
31960
|
+
const model = genAI.getGenerativeModel({
|
|
31961
|
+
model: payload.model,
|
|
31962
|
+
tools: payload.tools
|
|
31963
|
+
});
|
|
31964
|
+
const history = payload.messages.slice(0, -1);
|
|
31965
|
+
const lastMessage = payload.messages.slice(-1)[0];
|
|
31966
|
+
const chat = model.startChat({
|
|
31967
|
+
history
|
|
31968
|
+
});
|
|
31969
|
+
const result = await chat.sendMessage(lastMessage.parts);
|
|
31970
|
+
const response = await result.response;
|
|
31971
|
+
const text = response.text();
|
|
31972
|
+
const functionCalls = response.functionCalls();
|
|
31973
|
+
const parsedFunctionCalls = functionCalls == null ? void 0 : functionCalls.map((fc) => ({
|
|
31974
|
+
name: fc.name,
|
|
31975
|
+
arguments: fc.args
|
|
31976
|
+
}));
|
|
31977
|
+
return {
|
|
31978
|
+
role: "assistant",
|
|
31979
|
+
content: text || null,
|
|
31980
|
+
function_call: (parsedFunctionCalls == null ? void 0 : parsedFunctionCalls[0]) || null
|
|
31981
|
+
};
|
|
31982
|
+
}
|
|
31892
31983
|
async function callWithRetries(identifier, aiPayload, aiConfig, retries = 5, chunkTimeoutMs = 15e3) {
|
|
31893
31984
|
if (isAnthropicPayload(aiPayload)) {
|
|
31894
31985
|
console.log(identifier, "Delegating call to Anthropic API");
|
|
@@ -31913,6 +32004,12 @@ async function callWithRetries(identifier, aiPayload, aiConfig, retries = 5, chu
|
|
|
31913
32004
|
identifier,
|
|
31914
32005
|
await prepareGroqPayload(aiPayload)
|
|
31915
32006
|
);
|
|
32007
|
+
} else if (isGoogleAIPayload(aiPayload)) {
|
|
32008
|
+
console.log(identifier, "Delegating call to Google AI API");
|
|
32009
|
+
return await callGoogleAI(
|
|
32010
|
+
identifier,
|
|
32011
|
+
await prepareGoogleAIPayload(aiPayload)
|
|
32012
|
+
);
|
|
31916
32013
|
} else {
|
|
31917
32014
|
throw new Error("Invalid AI payload: Unknown model type.");
|
|
31918
32015
|
}
|
|
@@ -32013,10 +32110,7 @@ async function prepareOpenAIPayload(payload) {
|
|
|
32013
32110
|
openAIContentBlocks.push({
|
|
32014
32111
|
type: "image_url",
|
|
32015
32112
|
image_url: {
|
|
32016
|
-
url:
|
|
32017
|
-
file.url,
|
|
32018
|
-
file.mimeType
|
|
32019
|
-
)}`
|
|
32113
|
+
url: file.url
|
|
32020
32114
|
}
|
|
32021
32115
|
});
|
|
32022
32116
|
} else if (file.data) {
|
|
@@ -32060,6 +32154,9 @@ function prepareGroqPayload(payload) {
|
|
|
32060
32154
|
function normalizeMessageContent(content) {
|
|
32061
32155
|
return Array.isArray(content) ? content.map((c5) => c5.type === "text" ? c5.text : `[${c5.type}]`).join("\n") : content;
|
|
32062
32156
|
}
|
|
32157
|
+
function isGoogleAIPayload(payload) {
|
|
32158
|
+
return Object.values(GeminiModel).includes(payload.model);
|
|
32159
|
+
}
|
|
32063
32160
|
async function callGroq(identifier, payload) {
|
|
32064
32161
|
const response = await axios_default.post(
|
|
32065
32162
|
"https://api.groq.com/openai/v1/chat/completions",
|