190proof 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -13,14 +13,9 @@ declare enum GPTModel {
13
13
  declare enum GroqModel {
14
14
  LLAMA_3_70B_8192 = "llama3-70b-8192"
15
15
  }
16
- declare enum Role {
17
- User = "user",
18
- Assistant = "assistant",
19
- System = "system"
20
- }
21
16
  interface GenericMessage {
22
- role: Role;
23
- content: string;
17
+ role: "user" | "assistant" | "system";
18
+ content: string | AnthropicContentBlock[];
24
19
  timestamp?: string;
25
20
  files?: File[];
26
21
  functionCalls?: FunctionCall[];
@@ -31,14 +26,6 @@ interface File {
31
26
  url?: string;
32
27
  data?: string;
33
28
  }
34
- interface OpenAIMessage {
35
- role: "user" | "assistant" | "system";
36
- content: string;
37
- }
38
- interface AnthropicAIMessage {
39
- role: "user" | "assistant" | "system";
40
- content: string | AnthropicContentBlock[];
41
- }
42
29
  type AnthropicContentBlock = AnthropicTextContentBlock | AnthropicImageContentBlock;
43
30
  interface AnthropicTextContentBlock {
44
31
  type: "text";
@@ -80,20 +67,6 @@ interface OpenAIConfig {
80
67
  interface AnthropicAIConfig {
81
68
  service: "anthropic" | "bedrock";
82
69
  }
83
- interface OpenAIPayload {
84
- model: GPTModel;
85
- messages: OpenAIMessage[];
86
- functions?: any[];
87
- function_call?: "none" | "auto" | {
88
- name: string;
89
- };
90
- temperature?: number;
91
- }
92
- interface AnthropicAIPayload {
93
- model: ClaudeModel;
94
- messages: AnthropicAIMessage[];
95
- functions?: any[];
96
- }
97
70
  interface GenericPayload {
98
71
  model: GPTModel | ClaudeModel | GroqModel;
99
72
  messages: GenericMessage[];
@@ -104,8 +77,6 @@ interface GenericPayload {
104
77
  temperature?: number;
105
78
  }
106
79
 
107
- declare function callOpenAiWithRetries(identifier: string, openAiPayload: OpenAIPayload, openAiConfig?: OpenAIConfig, retries?: number, chunkTimeoutMs?: number): Promise<ParsedResponseMessage>;
108
- declare function callAnthropicWithRetries(identifier: string, AiPayload: AnthropicAIPayload, AiConfig?: AnthropicAIConfig, attempts?: number): Promise<ParsedResponseMessage>;
109
80
  declare function callWithRetries(identifier: string, aiPayload: GenericPayload, aiConfig?: OpenAIConfig | AnthropicAIConfig, retries?: number, chunkTimeoutMs?: number): Promise<ParsedResponseMessage>;
110
81
 
111
- export { ClaudeModel, GPTModel, GroqModel, Role, callAnthropicWithRetries, callOpenAiWithRetries, callWithRetries };
82
+ export { ClaudeModel, GPTModel, GroqModel, callWithRetries };
package/dist/index.d.ts CHANGED
@@ -13,14 +13,9 @@ declare enum GPTModel {
13
13
  declare enum GroqModel {
14
14
  LLAMA_3_70B_8192 = "llama3-70b-8192"
15
15
  }
16
- declare enum Role {
17
- User = "user",
18
- Assistant = "assistant",
19
- System = "system"
20
- }
21
16
  interface GenericMessage {
22
- role: Role;
23
- content: string;
17
+ role: "user" | "assistant" | "system";
18
+ content: string | AnthropicContentBlock[];
24
19
  timestamp?: string;
25
20
  files?: File[];
26
21
  functionCalls?: FunctionCall[];
@@ -31,14 +26,6 @@ interface File {
31
26
  url?: string;
32
27
  data?: string;
33
28
  }
34
- interface OpenAIMessage {
35
- role: "user" | "assistant" | "system";
36
- content: string;
37
- }
38
- interface AnthropicAIMessage {
39
- role: "user" | "assistant" | "system";
40
- content: string | AnthropicContentBlock[];
41
- }
42
29
  type AnthropicContentBlock = AnthropicTextContentBlock | AnthropicImageContentBlock;
43
30
  interface AnthropicTextContentBlock {
44
31
  type: "text";
@@ -80,20 +67,6 @@ interface OpenAIConfig {
80
67
  interface AnthropicAIConfig {
81
68
  service: "anthropic" | "bedrock";
82
69
  }
83
- interface OpenAIPayload {
84
- model: GPTModel;
85
- messages: OpenAIMessage[];
86
- functions?: any[];
87
- function_call?: "none" | "auto" | {
88
- name: string;
89
- };
90
- temperature?: number;
91
- }
92
- interface AnthropicAIPayload {
93
- model: ClaudeModel;
94
- messages: AnthropicAIMessage[];
95
- functions?: any[];
96
- }
97
70
  interface GenericPayload {
98
71
  model: GPTModel | ClaudeModel | GroqModel;
99
72
  messages: GenericMessage[];
@@ -104,8 +77,6 @@ interface GenericPayload {
104
77
  temperature?: number;
105
78
  }
106
79
 
107
- declare function callOpenAiWithRetries(identifier: string, openAiPayload: OpenAIPayload, openAiConfig?: OpenAIConfig, retries?: number, chunkTimeoutMs?: number): Promise<ParsedResponseMessage>;
108
- declare function callAnthropicWithRetries(identifier: string, AiPayload: AnthropicAIPayload, AiConfig?: AnthropicAIConfig, attempts?: number): Promise<ParsedResponseMessage>;
109
80
  declare function callWithRetries(identifier: string, aiPayload: GenericPayload, aiConfig?: OpenAIConfig | AnthropicAIConfig, retries?: number, chunkTimeoutMs?: number): Promise<ParsedResponseMessage>;
110
81
 
111
- export { ClaudeModel, GPTModel, GroqModel, Role, callAnthropicWithRetries, callOpenAiWithRetries, callWithRetries };
82
+ export { ClaudeModel, GPTModel, GroqModel, callWithRetries };
package/dist/index.js CHANGED
@@ -27016,9 +27016,6 @@ __export(proof_exports, {
27016
27016
  ClaudeModel: () => ClaudeModel,
27017
27017
  GPTModel: () => GPTModel,
27018
27018
  GroqModel: () => GroqModel,
27019
- Role: () => Role,
27020
- callAnthropicWithRetries: () => callAnthropicWithRetries,
27021
- callOpenAiWithRetries: () => callOpenAiWithRetries,
27022
27019
  callWithRetries: () => callWithRetries
27023
27020
  });
27024
27021
  module.exports = __toCommonJS(proof_exports);
@@ -27042,12 +27039,6 @@ var GroqModel = /* @__PURE__ */ ((GroqModel2) => {
27042
27039
  GroqModel2["LLAMA_3_70B_8192"] = "llama3-70b-8192";
27043
27040
  return GroqModel2;
27044
27041
  })(GroqModel || {});
27045
- var Role = /* @__PURE__ */ ((Role2) => {
27046
- Role2["User"] = "user";
27047
- Role2["Assistant"] = "assistant";
27048
- Role2["System"] = "system";
27049
- return Role2;
27050
- })(Role || {});
27051
27042
 
27052
27043
  // ../node_modules/@aws-sdk/client-bedrock-runtime/dist-es/BedrockRuntimeClient.js
27053
27044
  init_dist_es3();
@@ -31532,10 +31523,14 @@ async function callOpenAiWithRetries(identifier, openAiPayload, openAiConfig, re
31532
31523
  async function callOpenAIStream(identifier, openAiPayload, openAiConfig, chunkTimeoutMs) {
31533
31524
  const functionNames = openAiPayload.functions ? new Set(openAiPayload.functions.map((fn) => fn.name)) : null;
31534
31525
  if (!openAiConfig) {
31526
+ const defaultOpenAIBaseUrl = (
31527
+ // TODO: Remove this one we have per-provider configs
31528
+ "https://gateway.ai.cloudflare.com/v1/932636fc124abb5171fd630afe668905/igpt"
31529
+ );
31535
31530
  openAiConfig = {
31536
31531
  service: "openai",
31537
31532
  apiKey: process.env.OPENAI_API_KEY,
31538
- baseUrl: "https://api.openai.com/v1"
31533
+ baseUrl: defaultOpenAIBaseUrl
31539
31534
  };
31540
31535
  }
31541
31536
  let response;
@@ -31564,7 +31559,7 @@ async function callOpenAIStream(identifier, openAiPayload, openAiConfig, chunkTi
31564
31559
  });
31565
31560
  } else {
31566
31561
  console.log(identifier, "Using OpenAI service", openAiPayload.model);
31567
- const endpoint = `${openAiConfig == null ? void 0 : openAiConfig.baseUrl}/chat/completions`;
31562
+ const endpoint = `${openAiConfig.baseUrl}/openai/chat/completions`;
31568
31563
  if (openAiConfig.orgId) {
31569
31564
  console.log(identifier, "Using orgId", openAiConfig.orgId);
31570
31565
  }
@@ -31719,10 +31714,6 @@ async function callAnthropic(identifier, AiPayload, AiConfig) {
31719
31714
  if ((AiConfig == null ? void 0 : AiConfig.service) === "bedrock") {
31720
31715
  const AWS_REGION = "us-east-1";
31721
31716
  const MODEL_ID = "anthropic.claude-3-haiku-20240307-v1:0";
31722
- const AWS_ACCESS_KEY_ID = "AKIAZI2LICXWZC3QQ4O2";
31723
- const AWS_SECRET_ACCESS_KEY = "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";
31724
- process.env.AWS_ACCESS_KEY_ID = AWS_ACCESS_KEY_ID;
31725
- process.env.AWS_SECRET_ACCESS_KEY = AWS_SECRET_ACCESS_KEY;
31726
31717
  const client = new BedrockRuntimeClient({ region: AWS_REGION });
31727
31718
  const payload = {
31728
31719
  anthropic_version: "bedrock-2023-05-31",
@@ -31769,7 +31760,6 @@ async function callAnthropic(identifier, AiPayload, AiConfig) {
31769
31760
  data = response2.data;
31770
31761
  }
31771
31762
  const answers = data.content;
31772
- console.log("Anthropic API answers:", JSON.stringify({ answers }));
31773
31763
  if (!answers[0]) {
31774
31764
  console.error(identifier, "Missing answer in Anthropic API:", data);
31775
31765
  throw new Error("Missing answer in Anthropic API");
@@ -31915,7 +31905,7 @@ function prepareOpenAIPayload(payload) {
31915
31905
  model: payload.model,
31916
31906
  messages: payload.messages.map((message) => ({
31917
31907
  role: message.role,
31918
- content: message.content
31908
+ content: normalizeMessageContent(message.content)
31919
31909
  // TODO: Handle files
31920
31910
  })),
31921
31911
  functions: payload.functions
@@ -31930,7 +31920,7 @@ function prepareGroqPayload(payload) {
31930
31920
  model: payload.model,
31931
31921
  messages: payload.messages.map((message) => ({
31932
31922
  role: message.role,
31933
- content: message.content
31923
+ content: normalizeMessageContent(message.content)
31934
31924
  })),
31935
31925
  functions: (_a3 = payload.functions) == null ? void 0 : _a3.map((fn) => ({
31936
31926
  type: "function",
@@ -31938,6 +31928,9 @@ function prepareGroqPayload(payload) {
31938
31928
  }))
31939
31929
  };
31940
31930
  }
31931
+ function normalizeMessageContent(content) {
31932
+ return Array.isArray(content) ? content.map((c5) => c5.type === "text" ? c5.text : `[${c5.type}]`).join("\n") : content;
31933
+ }
31941
31934
  async function callGroq(identifier, payload) {
31942
31935
  const response = await axios_default.post(
31943
31936
  "https://api.groq.com/openai/v1/chat/completions",
@@ -32002,9 +31995,6 @@ async function callGroqWithRetries(identifier, payload, retries = 5) {
32002
31995
  ClaudeModel,
32003
31996
  GPTModel,
32004
31997
  GroqModel,
32005
- Role,
32006
- callAnthropicWithRetries,
32007
- callOpenAiWithRetries,
32008
31998
  callWithRetries
32009
31999
  });
32010
32000
  /*! Bundled license information: