190proof 1.0.23 → 1.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +5 -2
- package/dist/index.d.ts +5 -2
- package/dist/index.js +100 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +100 -4
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -1
package/dist/index.d.mts
CHANGED
|
@@ -14,6 +14,9 @@ declare enum GPTModel {
|
|
|
14
14
|
declare enum GroqModel {
|
|
15
15
|
LLAMA_3_70B_8192 = "llama3-70b-8192"
|
|
16
16
|
}
|
|
17
|
+
declare enum GeminiModel {
|
|
18
|
+
GEMINI_15_PRO = "gemini-1.5-pro-latest"
|
|
19
|
+
}
|
|
17
20
|
interface GenericMessage {
|
|
18
21
|
role: "user" | "assistant" | "system";
|
|
19
22
|
content: string;
|
|
@@ -56,11 +59,11 @@ interface AnthropicAIConfig {
|
|
|
56
59
|
}
|
|
57
60
|
interface FunctionDefinition {
|
|
58
61
|
name: string;
|
|
59
|
-
description
|
|
62
|
+
description?: string;
|
|
60
63
|
parameters: Record<string, any>;
|
|
61
64
|
}
|
|
62
65
|
interface GenericPayload {
|
|
63
|
-
model: GPTModel | ClaudeModel | GroqModel;
|
|
66
|
+
model: GPTModel | ClaudeModel | GroqModel | GeminiModel;
|
|
64
67
|
messages: GenericMessage[];
|
|
65
68
|
functions?: FunctionDefinition[];
|
|
66
69
|
function_call?: "none" | "auto" | {
|
package/dist/index.d.ts
CHANGED
|
@@ -14,6 +14,9 @@ declare enum GPTModel {
|
|
|
14
14
|
declare enum GroqModel {
|
|
15
15
|
LLAMA_3_70B_8192 = "llama3-70b-8192"
|
|
16
16
|
}
|
|
17
|
+
declare enum GeminiModel {
|
|
18
|
+
GEMINI_15_PRO = "gemini-1.5-pro-latest"
|
|
19
|
+
}
|
|
17
20
|
interface GenericMessage {
|
|
18
21
|
role: "user" | "assistant" | "system";
|
|
19
22
|
content: string;
|
|
@@ -56,11 +59,11 @@ interface AnthropicAIConfig {
|
|
|
56
59
|
}
|
|
57
60
|
interface FunctionDefinition {
|
|
58
61
|
name: string;
|
|
59
|
-
description
|
|
62
|
+
description?: string;
|
|
60
63
|
parameters: Record<string, any>;
|
|
61
64
|
}
|
|
62
65
|
interface GenericPayload {
|
|
63
|
-
model: GPTModel | ClaudeModel | GroqModel;
|
|
66
|
+
model: GPTModel | ClaudeModel | GroqModel | GeminiModel;
|
|
64
67
|
messages: GenericMessage[];
|
|
65
68
|
functions?: FunctionDefinition[];
|
|
66
69
|
function_call?: "none" | "auto" | {
|
package/dist/index.js
CHANGED
|
@@ -27040,6 +27040,10 @@ var GroqModel = /* @__PURE__ */ ((GroqModel2) => {
|
|
|
27040
27040
|
GroqModel2["LLAMA_3_70B_8192"] = "llama3-70b-8192";
|
|
27041
27041
|
return GroqModel2;
|
|
27042
27042
|
})(GroqModel || {});
|
|
27043
|
+
var GeminiModel = /* @__PURE__ */ ((GeminiModel2) => {
|
|
27044
|
+
GeminiModel2["GEMINI_15_PRO"] = "gemini-1.5-pro-latest";
|
|
27045
|
+
return GeminiModel2;
|
|
27046
|
+
})(GeminiModel || {});
|
|
27043
27047
|
|
|
27044
27048
|
// ../node_modules/@aws-sdk/client-bedrock-runtime/dist-es/BedrockRuntimeClient.js
|
|
27045
27049
|
init_dist_es3();
|
|
@@ -31430,6 +31434,7 @@ function isHeicImage(name, mime) {
|
|
|
31430
31434
|
}
|
|
31431
31435
|
|
|
31432
31436
|
// index.ts
|
|
31437
|
+
var { GoogleGenerativeAI } = require("@google/generative-ai");
|
|
31433
31438
|
var sharp = require("sharp");
|
|
31434
31439
|
var decode = require("heic-decode");
|
|
31435
31440
|
function parseStreamedResponse(identifier, paragraph, functionCallName, functionCallArgs, allowedFunctionNames) {
|
|
@@ -31894,6 +31899,92 @@ Before answering you can reason about the instructions and answer using <thinkin
|
|
|
31894
31899
|
}
|
|
31895
31900
|
return jiggedMessages;
|
|
31896
31901
|
}
|
|
31902
|
+
async function prepareGoogleAIPayload(payload) {
|
|
31903
|
+
var _a3;
|
|
31904
|
+
const preparedPayload = {
|
|
31905
|
+
model: payload.model,
|
|
31906
|
+
messages: [],
|
|
31907
|
+
tools: payload.functions ? {
|
|
31908
|
+
functionDeclarations: payload.functions.map((fn) => ({
|
|
31909
|
+
name: fn.name,
|
|
31910
|
+
parameters: {
|
|
31911
|
+
// Google puts their description in the parameters object rather than in a top-level field
|
|
31912
|
+
description: fn.description,
|
|
31913
|
+
...fn.parameters
|
|
31914
|
+
}
|
|
31915
|
+
}))
|
|
31916
|
+
} : void 0
|
|
31917
|
+
};
|
|
31918
|
+
for (const message of payload.messages) {
|
|
31919
|
+
const googleAIContentParts = [];
|
|
31920
|
+
if (message.content) {
|
|
31921
|
+
googleAIContentParts.push({
|
|
31922
|
+
text: message.content
|
|
31923
|
+
});
|
|
31924
|
+
}
|
|
31925
|
+
for (const file of message.files || []) {
|
|
31926
|
+
if (!((_a3 = file.mimeType) == null ? void 0 : _a3.startsWith("image"))) {
|
|
31927
|
+
console.warn(
|
|
31928
|
+
"Google AI API does not support non-image file types. Skipping file."
|
|
31929
|
+
);
|
|
31930
|
+
continue;
|
|
31931
|
+
}
|
|
31932
|
+
if (file.url) {
|
|
31933
|
+
googleAIContentParts.push({
|
|
31934
|
+
inlineData: {
|
|
31935
|
+
mimeType: "image/png",
|
|
31936
|
+
data: await getNormalizedBase64PNG(file.url, file.mimeType)
|
|
31937
|
+
}
|
|
31938
|
+
});
|
|
31939
|
+
} else if (file.data) {
|
|
31940
|
+
if (!["image/png", "image/jpeg", "image/gif", "image/webp"].includes(
|
|
31941
|
+
file.mimeType
|
|
31942
|
+
)) {
|
|
31943
|
+
throw new Error(
|
|
31944
|
+
"Invalid image mimeType. Supported types are: image/png, image/jpeg, image/gif, image/webp"
|
|
31945
|
+
);
|
|
31946
|
+
}
|
|
31947
|
+
googleAIContentParts.push({
|
|
31948
|
+
inlineData: {
|
|
31949
|
+
mimeType: file.mimeType,
|
|
31950
|
+
data: file.data
|
|
31951
|
+
}
|
|
31952
|
+
});
|
|
31953
|
+
}
|
|
31954
|
+
}
|
|
31955
|
+
preparedPayload.messages.push({
|
|
31956
|
+
role: message.role === "user" ? "user" : "model",
|
|
31957
|
+
parts: googleAIContentParts
|
|
31958
|
+
});
|
|
31959
|
+
}
|
|
31960
|
+
return preparedPayload;
|
|
31961
|
+
}
|
|
31962
|
+
async function callGoogleAI(identifier, payload) {
|
|
31963
|
+
console.log(identifier, "Calling Google AI API");
|
|
31964
|
+
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY);
|
|
31965
|
+
const model = genAI.getGenerativeModel({
|
|
31966
|
+
model: payload.model,
|
|
31967
|
+
tools: payload.tools
|
|
31968
|
+
});
|
|
31969
|
+
const history = payload.messages.slice(0, -1);
|
|
31970
|
+
const lastMessage = payload.messages.slice(-1)[0];
|
|
31971
|
+
const chat = model.startChat({
|
|
31972
|
+
history
|
|
31973
|
+
});
|
|
31974
|
+
const result = await chat.sendMessage(lastMessage.parts);
|
|
31975
|
+
const response = await result.response;
|
|
31976
|
+
const text = response.text();
|
|
31977
|
+
const functionCalls = response.functionCalls();
|
|
31978
|
+
const parsedFunctionCalls = functionCalls == null ? void 0 : functionCalls.map((fc) => ({
|
|
31979
|
+
name: fc.name,
|
|
31980
|
+
arguments: fc.args
|
|
31981
|
+
}));
|
|
31982
|
+
return {
|
|
31983
|
+
role: "assistant",
|
|
31984
|
+
content: text || null,
|
|
31985
|
+
function_call: (parsedFunctionCalls == null ? void 0 : parsedFunctionCalls[0]) || null
|
|
31986
|
+
};
|
|
31987
|
+
}
|
|
31897
31988
|
async function callWithRetries(identifier, aiPayload, aiConfig, retries = 5, chunkTimeoutMs = 15e3) {
|
|
31898
31989
|
if (isAnthropicPayload(aiPayload)) {
|
|
31899
31990
|
console.log(identifier, "Delegating call to Anthropic API");
|
|
@@ -31918,6 +32009,12 @@ async function callWithRetries(identifier, aiPayload, aiConfig, retries = 5, chu
|
|
|
31918
32009
|
identifier,
|
|
31919
32010
|
await prepareGroqPayload(aiPayload)
|
|
31920
32011
|
);
|
|
32012
|
+
} else if (isGoogleAIPayload(aiPayload)) {
|
|
32013
|
+
console.log(identifier, "Delegating call to Google AI API");
|
|
32014
|
+
return await callGoogleAI(
|
|
32015
|
+
identifier,
|
|
32016
|
+
await prepareGoogleAIPayload(aiPayload)
|
|
32017
|
+
);
|
|
31921
32018
|
} else {
|
|
31922
32019
|
throw new Error("Invalid AI payload: Unknown model type.");
|
|
31923
32020
|
}
|
|
@@ -32019,10 +32116,6 @@ async function prepareOpenAIPayload(payload) {
|
|
|
32019
32116
|
type: "image_url",
|
|
32020
32117
|
image_url: {
|
|
32021
32118
|
url: file.url
|
|
32022
|
-
// url: `data:image/png;base64,${await getNormalizedBase64PNG(
|
|
32023
|
-
// file.url,
|
|
32024
|
-
// file.mimeType
|
|
32025
|
-
// )}`,
|
|
32026
32119
|
}
|
|
32027
32120
|
});
|
|
32028
32121
|
} else if (file.data) {
|
|
@@ -32066,6 +32159,9 @@ function prepareGroqPayload(payload) {
|
|
|
32066
32159
|
function normalizeMessageContent(content) {
|
|
32067
32160
|
return Array.isArray(content) ? content.map((c5) => c5.type === "text" ? c5.text : `[${c5.type}]`).join("\n") : content;
|
|
32068
32161
|
}
|
|
32162
|
+
function isGoogleAIPayload(payload) {
|
|
32163
|
+
return Object.values(GeminiModel).includes(payload.model);
|
|
32164
|
+
}
|
|
32069
32165
|
async function callGroq(identifier, payload) {
|
|
32070
32166
|
const response = await axios_default.post(
|
|
32071
32167
|
"https://api.groq.com/openai/v1/chat/completions",
|