genlayer 0.0.26 → 0.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,7 @@
1
1
 
2
2
 
3
+ ## 0.0.27 (2024-05-08)
4
+
3
5
  ## 0.0.26 (2024-05-07)
4
6
 
5
7
  ## 0.0.25 (2024-05-06)
package/dist/index.js CHANGED
@@ -39896,7 +39896,7 @@ var {
39896
39896
  } = import_index.default;
39897
39897
 
39898
39898
  // package.json
39899
- var version = "0.0.26";
39899
+ var version = "0.0.27";
39900
39900
 
39901
39901
  // src/lib/config/text.ts
39902
39902
  var CLI_DESCRIPTION = "GenLayer CLI is a development environment for the GenLayer ecosystem. It allows developers to interact with the protocol by creating accounts, sending transactions, and working with Intelligent Contracts by testing, debugging, and deploying them.";
@@ -42505,8 +42505,20 @@ var AVAILABLE_PLATFORMS = ["darwin", "win32", "linux"];
42505
42505
  var STARTING_TIMEOUT_WAIT_CYLCE = 2e3;
42506
42506
  var STARTING_TIMEOUT_ATTEMPTS = 120;
42507
42507
  var AI_PROVIDERS_CONFIG = {
42508
- ollama: { name: "Ollama (This will download and run a local instance of Llama 2)", envVar: "ollama", cliOptionValue: "ollama" },
42509
- openai: { name: "OpenAI (You will need to provide an OpenAI API key)", envVar: "OPENAIKEY", cliOptionValue: "openai" }
42508
+ ollama: {
42509
+ name: "Ollama (This will download and run a local instance of Llama 2)",
42510
+ cliOptionValue: "ollama"
42511
+ },
42512
+ openai: {
42513
+ name: "OpenAI (You will need to provide an OpenAI API key)",
42514
+ envVar: "OPENAIKEY",
42515
+ cliOptionValue: "openai"
42516
+ },
42517
+ heurist: {
42518
+ name: 'Heurist (You will need to provide an API key. Get free API credits at https://dev-api-form.heurist.ai/ with referral code: "genlayer"):',
42519
+ envVar: "HEURISTAIAPIKEY",
42520
+ cliOptionValue: "heurist"
42521
+ }
42510
42522
  };
42511
42523
 
42512
42524
  // src/lib/services/simulator.ts
@@ -44624,7 +44636,9 @@ function initAction(options) {
44624
44636
  const llmProvidersAnswer = yield inquirer_default.prompt(questions);
44625
44637
  const selectedLlmProviders = llmProvidersAnswer.selectedLlmProviders;
44626
44638
  const aiProvidersEnvVars = {};
44627
- const configurableAiProviders = selectedLlmProviders.filter((provider) => provider !== "ollama");
44639
+ const configurableAiProviders = selectedLlmProviders.filter(
44640
+ (provider) => AI_PROVIDERS_CONFIG[provider].envVar
44641
+ );
44628
44642
  for (let i2 = 0; i2 < configurableAiProviders.length; i2++) {
44629
44643
  const provider = configurableAiProviders[i2];
44630
44644
  const providerConfig = AI_PROVIDERS_CONFIG[provider];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "genlayer",
3
- "version": "0.0.26",
3
+ "version": "0.0.27",
4
4
  "description": "GenLayer Command Line Tool",
5
5
  "main": "src/index.ts",
6
6
  "bin": {
@@ -100,7 +100,9 @@ export async function initAction(options: InitActionOptions) {
100
100
 
101
101
  // Gather the API Keys
102
102
  const aiProvidersEnvVars: Record<string, string> = {};
103
- const configurableAiProviders = selectedLlmProviders.filter((provider: string) => provider !== "ollama");
103
+ const configurableAiProviders = selectedLlmProviders.filter(
104
+ (provider: AiProviders) => AI_PROVIDERS_CONFIG[provider].envVar,
105
+ );
104
106
  for (let i = 0; i < configurableAiProviders.length; i++) {
105
107
  const provider = configurableAiProviders[i];
106
108
  const providerConfig = AI_PROVIDERS_CONFIG[provider];
@@ -119,7 +121,7 @@ export async function initAction(options: InitActionOptions) {
119
121
  ];
120
122
 
121
123
  const apiKeyAnswer = await inquirer.prompt(questions);
122
- aiProvidersEnvVars[providerConfig.envVar] = apiKeyAnswer[providerConfig.cliOptionValue];
124
+ aiProvidersEnvVars[providerConfig.envVar!] = apiKeyAnswer[providerConfig.cliOptionValue];
123
125
  }
124
126
 
125
127
  console.log("Configuring GenLayer Simulator environment...");
@@ -15,13 +15,25 @@ export type RunningPlatform = (typeof AVAILABLE_PLATFORMS)[number];
15
15
  export const STARTING_TIMEOUT_WAIT_CYLCE = 2000;
16
16
  export const STARTING_TIMEOUT_ATTEMPTS = 120;
17
17
 
18
- export type AiProviders = "ollama" | "openai";
19
- export type AiProvidersEnvVars = "ollama" | "OPENAIKEY";
18
+ export type AiProviders = "ollama" | "openai" | "heurist";
19
+ export type AiProvidersEnvVars = "ollama" | "OPENAIKEY" | "HEURISTAIAPIKEY";
20
20
  export type AiProvidersConfigType = {
21
- [key in AiProviders]: {name: string; envVar: AiProvidersEnvVars; cliOptionValue: string};
21
+ [key in AiProviders]: {name: string; envVar?: AiProvidersEnvVars; cliOptionValue: string};
22
22
  };
23
23
 
24
24
  export const AI_PROVIDERS_CONFIG: AiProvidersConfigType = {
25
- ollama: {name: "Ollama (This will download and run a local instance of Llama 2)", envVar: "ollama", cliOptionValue: "ollama"},
26
- openai: {name: "OpenAI (You will need to provide an OpenAI API key)", envVar: "OPENAIKEY", cliOptionValue: "openai"},
25
+ ollama: {
26
+ name: "Ollama (This will download and run a local instance of Llama 2)",
27
+ cliOptionValue: "ollama",
28
+ },
29
+ openai: {
30
+ name: "OpenAI (You will need to provide an OpenAI API key)",
31
+ envVar: "OPENAIKEY",
32
+ cliOptionValue: "openai",
33
+ },
34
+ heurist: {
35
+ name: 'Heurist (You will need to provide an API key. Get free API credits at https://dev-api-form.heurist.ai/ with referral code: "genlayer"):',
36
+ envVar: "HEURISTAIAPIKEY",
37
+ cliOptionValue: "heurist",
38
+ },
27
39
  };