genlayer 0.0.26 → 0.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/dist/index.js +26 -13
- package/package.json +1 -1
- package/src/commands/general/init.ts +6 -4
- package/src/lib/clients/system.ts +0 -1
- package/src/lib/config/simulator.ts +20 -8
- package/src/lib/services/simulator.ts +8 -3
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
|
@@ -39896,7 +39896,7 @@ var {
|
|
|
39896
39896
|
} = import_index.default;
|
|
39897
39897
|
|
|
39898
39898
|
// package.json
|
|
39899
|
-
var version = "0.0.
|
|
39899
|
+
var version = "0.0.28";
|
|
39900
39900
|
|
|
39901
39901
|
// src/lib/config/text.ts
|
|
39902
39902
|
var CLI_DESCRIPTION = "GenLayer CLI is a development environment for the GenLayer ecosystem. It allows developers to interact with the protocol by creating accounts, sending transactions, and working with Intelligent Contracts by testing, debugging, and deploying them.";
|
|
@@ -42497,16 +42497,28 @@ var DEFAULT_RUN_SIMULATOR_COMMAND = (simulatorLocation) => ({
|
|
|
42497
42497
|
linux: `x-terminal-emulator -e bash -c 'cd ${simulatorLocation} && docker compose build && docker compose up; echo "Press enter to exit"; read'`
|
|
42498
42498
|
});
|
|
42499
42499
|
var DEFAULT_PULL_OLLAMA_COMMAND = (simulatorLocation) => ({
|
|
42500
|
-
darwin: `
|
|
42501
|
-
win32: `
|
|
42502
|
-
linux: `
|
|
42500
|
+
darwin: `cd ${simulatorLocation} && docker exec ollama ollama pull llama3`,
|
|
42501
|
+
win32: `cd /d ${simulatorLocation} && docker exec ollama ollama pull llama3`,
|
|
42502
|
+
linux: `cd ${simulatorLocation} && docker exec ollama ollama pull llama3`
|
|
42503
42503
|
});
|
|
42504
42504
|
var AVAILABLE_PLATFORMS = ["darwin", "win32", "linux"];
|
|
42505
42505
|
var STARTING_TIMEOUT_WAIT_CYLCE = 2e3;
|
|
42506
42506
|
var STARTING_TIMEOUT_ATTEMPTS = 120;
|
|
42507
42507
|
var AI_PROVIDERS_CONFIG = {
|
|
42508
|
-
ollama: {
|
|
42509
|
-
|
|
42508
|
+
ollama: {
|
|
42509
|
+
name: "Ollama (This will download and run a local instance of Llama 3)",
|
|
42510
|
+
cliOptionValue: "ollama"
|
|
42511
|
+
},
|
|
42512
|
+
openai: {
|
|
42513
|
+
name: "OpenAI (You will need to provide an OpenAI API key)",
|
|
42514
|
+
envVar: "OPENAIKEY",
|
|
42515
|
+
cliOptionValue: "openai"
|
|
42516
|
+
},
|
|
42517
|
+
heurist: {
|
|
42518
|
+
name: 'Heurist (You will need to provide an API key. Get free API credits at https://dev-api-form.heurist.ai/ with referral code: "genlayer"):',
|
|
42519
|
+
envVar: "HEURISTAIAPIKEY",
|
|
42520
|
+
cliOptionValue: "heurist"
|
|
42521
|
+
}
|
|
42510
42522
|
};
|
|
42511
42523
|
|
|
42512
42524
|
// src/lib/services/simulator.ts
|
|
@@ -44369,7 +44381,6 @@ function checkCommand(command, toolName) {
|
|
|
44369
44381
|
if (stderr) {
|
|
44370
44382
|
throw new MissingRequirementError(toolName);
|
|
44371
44383
|
}
|
|
44372
|
-
console.log(`${toolName} is installed.`);
|
|
44373
44384
|
});
|
|
44374
44385
|
}
|
|
44375
44386
|
function executeCommand(cmdsByPlatform, toolName) {
|
|
@@ -44517,7 +44528,7 @@ function waitForSimulatorToBeReady() {
|
|
|
44517
44528
|
return waitForSimulatorToBeReady(retries - 1);
|
|
44518
44529
|
}
|
|
44519
44530
|
} catch (error) {
|
|
44520
|
-
if ((error.message.includes("ECONNREFUSED") || error.message.includes("socket hang up")) && retries > 0) {
|
|
44531
|
+
if ((error.message.includes("ECONNRESET") || error.message.includes("ECONNREFUSED") || error.message.includes("socket hang up")) && retries > 0) {
|
|
44521
44532
|
yield sleep(STARTING_TIMEOUT_WAIT_CYLCE * 2);
|
|
44522
44533
|
return waitForSimulatorToBeReady(retries - 1);
|
|
44523
44534
|
}
|
|
@@ -44536,8 +44547,8 @@ function initializeDatabase() {
|
|
|
44536
44547
|
return { createResponse, tablesResponse };
|
|
44537
44548
|
});
|
|
44538
44549
|
}
|
|
44539
|
-
function createRandomValidators() {
|
|
44540
|
-
return rpcClient.request({ method: "create_random_validators", params: [
|
|
44550
|
+
function createRandomValidators(numValidators) {
|
|
44551
|
+
return rpcClient.request({ method: "create_random_validators", params: [numValidators, 1, 10] });
|
|
44541
44552
|
}
|
|
44542
44553
|
function deleteAllValidators() {
|
|
44543
44554
|
return rpcClient.request({ method: "delete_all_validators", params: [] });
|
|
@@ -44624,7 +44635,9 @@ function initAction(options) {
|
|
|
44624
44635
|
const llmProvidersAnswer = yield inquirer_default.prompt(questions);
|
|
44625
44636
|
const selectedLlmProviders = llmProvidersAnswer.selectedLlmProviders;
|
|
44626
44637
|
const aiProvidersEnvVars = {};
|
|
44627
|
-
const configurableAiProviders = selectedLlmProviders.filter(
|
|
44638
|
+
const configurableAiProviders = selectedLlmProviders.filter(
|
|
44639
|
+
(provider) => AI_PROVIDERS_CONFIG[provider].envVar
|
|
44640
|
+
);
|
|
44628
44641
|
for (let i2 = 0; i2 < configurableAiProviders.length; i2++) {
|
|
44629
44642
|
const provider = configurableAiProviders[i2];
|
|
44630
44643
|
const providerConfig = AI_PROVIDERS_CONFIG[provider];
|
|
@@ -44677,7 +44690,7 @@ function initAction(options) {
|
|
|
44677
44690
|
return;
|
|
44678
44691
|
}
|
|
44679
44692
|
if (selectedLlmProviders.includes("ollama")) {
|
|
44680
|
-
console.log("Pulling
|
|
44693
|
+
console.log("Pulling llama3 from Ollama...");
|
|
44681
44694
|
yield pullOllamaModel();
|
|
44682
44695
|
}
|
|
44683
44696
|
console.log("Initializing the database...");
|
|
@@ -44695,7 +44708,7 @@ function initAction(options) {
|
|
|
44695
44708
|
console.log("Initializing validators...");
|
|
44696
44709
|
try {
|
|
44697
44710
|
yield deleteAllValidators();
|
|
44698
|
-
yield createRandomValidators();
|
|
44711
|
+
yield createRandomValidators(Number(options.numValidators));
|
|
44699
44712
|
} catch (error) {
|
|
44700
44713
|
console.error("Unable to initialize the validators.");
|
|
44701
44714
|
console.error(error);
|
package/package.json
CHANGED
|
@@ -100,7 +100,9 @@ export async function initAction(options: InitActionOptions) {
|
|
|
100
100
|
|
|
101
101
|
// Gather the API Keys
|
|
102
102
|
const aiProvidersEnvVars: Record<string, string> = {};
|
|
103
|
-
const configurableAiProviders = selectedLlmProviders.filter(
|
|
103
|
+
const configurableAiProviders = selectedLlmProviders.filter(
|
|
104
|
+
(provider: AiProviders) => AI_PROVIDERS_CONFIG[provider].envVar,
|
|
105
|
+
);
|
|
104
106
|
for (let i = 0; i < configurableAiProviders.length; i++) {
|
|
105
107
|
const provider = configurableAiProviders[i];
|
|
106
108
|
const providerConfig = AI_PROVIDERS_CONFIG[provider];
|
|
@@ -119,7 +121,7 @@ export async function initAction(options: InitActionOptions) {
|
|
|
119
121
|
];
|
|
120
122
|
|
|
121
123
|
const apiKeyAnswer = await inquirer.prompt(questions);
|
|
122
|
-
aiProvidersEnvVars[providerConfig.envVar] = apiKeyAnswer[providerConfig.cliOptionValue];
|
|
124
|
+
aiProvidersEnvVars[providerConfig.envVar!] = apiKeyAnswer[providerConfig.cliOptionValue];
|
|
123
125
|
}
|
|
124
126
|
|
|
125
127
|
console.log("Configuring GenLayer Simulator environment...");
|
|
@@ -160,7 +162,7 @@ export async function initAction(options: InitActionOptions) {
|
|
|
160
162
|
|
|
161
163
|
// Ollama doesn't need changes in configuration, we just run it
|
|
162
164
|
if (selectedLlmProviders.includes("ollama")) {
|
|
163
|
-
console.log("Pulling
|
|
165
|
+
console.log("Pulling llama3 from Ollama...");
|
|
164
166
|
await pullOllamaModel();
|
|
165
167
|
}
|
|
166
168
|
|
|
@@ -186,7 +188,7 @@ export async function initAction(options: InitActionOptions) {
|
|
|
186
188
|
//remove all validators
|
|
187
189
|
await deleteAllValidators();
|
|
188
190
|
// create random validators
|
|
189
|
-
await createRandomValidators();
|
|
191
|
+
await createRandomValidators(Number(options.numValidators));
|
|
190
192
|
} catch (error) {
|
|
191
193
|
console.error("Unable to initialize the validators.");
|
|
192
194
|
console.error(error);
|
|
@@ -6,22 +6,34 @@ export const DEFAULT_RUN_SIMULATOR_COMMAND = (simulatorLocation: string) => ({
|
|
|
6
6
|
linux: `x-terminal-emulator -e bash -c 'cd ${simulatorLocation} && docker compose build && docker compose up; echo "Press enter to exit"; read'`,
|
|
7
7
|
});
|
|
8
8
|
export const DEFAULT_PULL_OLLAMA_COMMAND = (simulatorLocation: string) => ({
|
|
9
|
-
darwin: `
|
|
10
|
-
win32: `
|
|
11
|
-
linux: `
|
|
9
|
+
darwin: `cd ${simulatorLocation} && docker exec ollama ollama pull llama3`,
|
|
10
|
+
win32: `cd /d ${simulatorLocation} && docker exec ollama ollama pull llama3`,
|
|
11
|
+
linux: `cd ${simulatorLocation} && docker exec ollama ollama pull llama3`,
|
|
12
12
|
});
|
|
13
13
|
export const AVAILABLE_PLATFORMS = ["darwin", "win32", "linux"] as const;
|
|
14
14
|
export type RunningPlatform = (typeof AVAILABLE_PLATFORMS)[number];
|
|
15
15
|
export const STARTING_TIMEOUT_WAIT_CYLCE = 2000;
|
|
16
16
|
export const STARTING_TIMEOUT_ATTEMPTS = 120;
|
|
17
17
|
|
|
18
|
-
export type AiProviders = "ollama" | "openai";
|
|
19
|
-
export type AiProvidersEnvVars = "ollama" | "OPENAIKEY";
|
|
18
|
+
export type AiProviders = "ollama" | "openai" | "heurist";
|
|
19
|
+
export type AiProvidersEnvVars = "ollama" | "OPENAIKEY" | "HEURISTAIAPIKEY";
|
|
20
20
|
export type AiProvidersConfigType = {
|
|
21
|
-
[key in AiProviders]: {name: string; envVar
|
|
21
|
+
[key in AiProviders]: {name: string; envVar?: AiProvidersEnvVars; cliOptionValue: string};
|
|
22
22
|
};
|
|
23
23
|
|
|
24
24
|
export const AI_PROVIDERS_CONFIG: AiProvidersConfigType = {
|
|
25
|
-
ollama: {
|
|
26
|
-
|
|
25
|
+
ollama: {
|
|
26
|
+
name: "Ollama (This will download and run a local instance of Llama 3)",
|
|
27
|
+
cliOptionValue: "ollama",
|
|
28
|
+
},
|
|
29
|
+
openai: {
|
|
30
|
+
name: "OpenAI (You will need to provide an OpenAI API key)",
|
|
31
|
+
envVar: "OPENAIKEY",
|
|
32
|
+
cliOptionValue: "openai",
|
|
33
|
+
},
|
|
34
|
+
heurist: {
|
|
35
|
+
name: 'Heurist (You will need to provide an API key. Get free API credits at https://dev-api-form.heurist.ai/ with referral code: "genlayer"):',
|
|
36
|
+
envVar: "HEURISTAIAPIKEY",
|
|
37
|
+
cliOptionValue: "heurist",
|
|
38
|
+
},
|
|
27
39
|
};
|
|
@@ -159,7 +159,12 @@ export async function waitForSimulatorToBeReady(
|
|
|
159
159
|
return waitForSimulatorToBeReady(retries - 1);
|
|
160
160
|
}
|
|
161
161
|
} catch (error: any) {
|
|
162
|
-
if (
|
|
162
|
+
if (
|
|
163
|
+
(error.message.includes("ECONNRESET") ||
|
|
164
|
+
error.message.includes("ECONNREFUSED") ||
|
|
165
|
+
error.message.includes("socket hang up")) &&
|
|
166
|
+
retries > 0
|
|
167
|
+
) {
|
|
163
168
|
await sleep(STARTING_TIMEOUT_WAIT_CYLCE * 2);
|
|
164
169
|
return waitForSimulatorToBeReady(retries - 1);
|
|
165
170
|
}
|
|
@@ -184,8 +189,8 @@ export async function initializeDatabase(): Promise<InitializeDatabaseResultType
|
|
|
184
189
|
return {createResponse, tablesResponse};
|
|
185
190
|
}
|
|
186
191
|
|
|
187
|
-
export function createRandomValidators(): Promise<any> {
|
|
188
|
-
return rpcClient.request({method: "create_random_validators", params: [
|
|
192
|
+
export function createRandomValidators(numValidators: number): Promise<any> {
|
|
193
|
+
return rpcClient.request({method: "create_random_validators", params: [numValidators, 1, 10]});
|
|
189
194
|
}
|
|
190
195
|
|
|
191
196
|
export function deleteAllValidators(): Promise<any> {
|