@constellation-network/node-pilot 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -15
- package/bin/dev.js +1 -6
- package/bin/run.js +1 -1
- package/dist/base-command.d.ts +9 -0
- package/dist/base-command.js +20 -0
- package/dist/checks/check-hardware.js +3 -3
- package/dist/checks/check-layers.js +7 -7
- package/dist/checks/check-network.d.ts +2 -0
- package/dist/checks/check-network.js +46 -11
- package/dist/checks/check-node-ctl.js +4 -4
- package/dist/checks/check-project.js +13 -8
- package/dist/checks/check-wallet.d.ts +3 -0
- package/dist/checks/check-wallet.js +37 -0
- package/dist/clm.d.ts +1 -0
- package/dist/clm.js +3 -0
- package/dist/commands/config/get.d.ts +6 -0
- package/dist/commands/config/get.js +57 -11
- package/dist/commands/config/set.d.ts +0 -1
- package/dist/commands/config/set.js +13 -11
- package/dist/commands/config.js +17 -22
- package/dist/commands/info.js +3 -2
- package/dist/commands/logs.d.ts +1 -1
- package/dist/commands/logs.js +7 -3
- package/dist/commands/restart.d.ts +5 -2
- package/dist/commands/restart.js +25 -5
- package/dist/commands/shutdown.js +3 -3
- package/dist/commands/status.js +4 -0
- package/dist/commands/test.js +11 -3
- package/dist/config-store.d.ts +43 -30
- package/dist/config-store.js +71 -33
- package/dist/helpers/config-helper.js +2 -2
- package/dist/helpers/env-templates.d.ts +4 -3
- package/dist/helpers/env-templates.js +28 -20
- package/dist/helpers/key-file-helper.d.ts +2 -0
- package/dist/helpers/key-file-helper.js +51 -16
- package/dist/helpers/project-helper.d.ts +2 -2
- package/dist/helpers/project-helper.js +37 -38
- package/dist/helpers/prompt-helper.d.ts +0 -1
- package/dist/helpers/prompt-helper.js +15 -15
- package/dist/services/archiver-service.d.ts +17 -0
- package/dist/services/archiver-service.js +104 -0
- package/dist/services/cluster-service.d.ts +10 -6
- package/dist/services/cluster-service.js +45 -45
- package/dist/services/docker-service.d.ts +9 -0
- package/dist/{helpers/docker-helper.js → services/docker-service.js} +8 -5
- package/dist/services/fastforward-service.js +3 -3
- package/dist/services/get-random-node.js +1 -1
- package/dist/{helpers/github-helper.d.ts → services/github-service.d.ts} +1 -1
- package/dist/{helpers/github-helper.js → services/github-service.js} +1 -1
- package/dist/services/node-service.js +14 -14
- package/dist/test.d.ts +1 -0
- package/dist/test.js +50 -0
- package/dist/types.d.ts +6 -0
- package/install-dependencies.sh +0 -2
- package/oclif.manifest.json +23 -4
- package/package.json +9 -8
- package/projects/hypergraph/Dockerfile +27 -18
- package/projects/hypergraph/docker-compose.yml +14 -12
- package/projects/hypergraph/networks/integrationnet/gl0.env +4 -0
- package/projects/hypergraph/networks/integrationnet/gl1.env +4 -0
- package/projects/hypergraph/networks/integrationnet/network.env +8 -0
- package/projects/hypergraph/networks/{integrationnet.env → integrationnet/source-nodes.env} +1 -9
- package/projects/hypergraph/networks/mainnet/gl0.env +4 -0
- package/projects/hypergraph/networks/mainnet/gl1.env +4 -0
- package/projects/hypergraph/networks/mainnet/network.env +8 -0
- package/projects/hypergraph/networks/{mainnet.env → mainnet/source-nodes.env} +0 -8
- package/projects/hypergraph/networks/testnet/gl0.env +5 -0
- package/projects/hypergraph/networks/testnet/gl1.env +4 -0
- package/projects/hypergraph/networks/testnet/network.env +8 -0
- package/projects/hypergraph/networks/{testnet.env → testnet/source-nodes.env} +0 -8
- package/projects/hypergraph/scripts/check-version.sh +31 -0
- package/projects/hypergraph/scripts/docker-build.sh +12 -1
- package/projects/hypergraph/scripts/install.sh +30 -25
- package/projects/hypergraph/seedlist +268 -0
- package/scripts/autoheal.sh +8 -0
- package/scripts/services/io.constellationnetwork.nodepilot.Updater.plist +16 -0
- package/scripts/services/node-pilot-autoheal.service +14 -0
- package/scripts/services/node-pilot-updater-hypergraph.service +15 -0
- package/scripts/updater.sh +13 -0
- package/dist/helpers/docker-helper.d.ts +0 -7
- package/projects/hypergraph/layers/gl1.env +0 -3
- package/projects/scripts/docker-cleanup.sh +0 -64
@@ -1,4 +1,5 @@
|
|
1
|
-
import {
|
1
|
+
import { EnvCombinedInfo, EnvInfo } from "../config-store.js";
|
2
2
|
import { TessellationLayer } from "../types.js";
|
3
|
-
export declare function getLayerEnvFileContent(layer: TessellationLayer,
|
4
|
-
export declare function getKeyFileContent(
|
3
|
+
export declare function getLayerEnvFileContent(layer: TessellationLayer, env: EnvCombinedInfo): string;
|
4
|
+
export declare function getKeyFileContent(env: EnvInfo): string;
|
5
|
+
export declare function getObjectToEnvContent(obj: object): string;
|
@@ -1,33 +1,41 @@
|
|
1
|
-
export function getLayerEnvFileContent(layer,
|
1
|
+
export function getLayerEnvFileContent(layer, env) {
|
2
2
|
return `
|
3
|
+
DEBUG=${process.env.DEBUG || ''}
|
4
|
+
NODE_PILOT_SESSION=${Date.now()}
|
5
|
+
|
3
6
|
# Node
|
4
|
-
|
5
|
-
|
7
|
+
CL_ARCHIVE_NODE=${env.CL_ARCHIVE_NODE || ''}
|
8
|
+
CL_EXTERNAL_IP=${env.CL_EXTERNAL_IP}
|
9
|
+
CL_DOCKER_JAVA_OPTS='${env.CL_DOCKER_JAVA_OPTS}'
|
6
10
|
CL_KEYSTORE='/app/key.p12'
|
7
|
-
CL_KEYALIAS='${
|
8
|
-
CL_PASSWORD='${
|
11
|
+
CL_KEYALIAS='${env.CL_KEYALIAS}'
|
12
|
+
CL_PASSWORD='${env.CL_PASSWORD}'
|
9
13
|
CL_TESSELATION_LAYER=${layer}
|
10
14
|
|
11
15
|
# NETWORK
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
16
|
+
CL_LB=${env.CL_LB}
|
17
|
+
CL_APP_ENV=${env.CL_APP_ENV}
|
18
|
+
CL_COLLATERAL=${env.CL_COLLATERAL}
|
19
|
+
CL_L0_PEER_HTTP_PORT=${env.CL_L0_PEER_HTTP_PORT}
|
20
|
+
CL_L0_PEER_HTTP_HOST=${env.CL_L0_PEER_HTTP_HOST}
|
21
|
+
CL_L0_PEER_ID=${env.CL_L0_PEER_ID}
|
22
|
+
CL_GLOBAL_L0_PEER_HTTP_PORT=${env.CL_GLOBAL_L0_PEER_HTTP_PORT}
|
23
|
+
CL_GLOBAL_L0_PEER_HOST=${env.CL_GLOBAL_L0_PEER_HOST}
|
24
|
+
CL_GLOBAL_L0_PEER_ID=${env.CL_GLOBAL_L0_PEER_ID}
|
20
25
|
|
21
26
|
# LAYER
|
22
|
-
CL_PUBLIC_HTTP_PORT=${
|
23
|
-
CL_P2P_HTTP_PORT=${
|
24
|
-
CL_CLI_HTTP_PORT=${
|
27
|
+
CL_PUBLIC_HTTP_PORT=${env.CL_PUBLIC_HTTP_PORT}
|
28
|
+
CL_P2P_HTTP_PORT=${env.CL_P2P_HTTP_PORT}
|
29
|
+
CL_CLI_HTTP_PORT=${env.CL_CLI_HTTP_PORT}
|
25
30
|
`;
|
26
31
|
}
|
27
|
-
export function getKeyFileContent(
|
32
|
+
export function getKeyFileContent(env) {
|
28
33
|
return `
|
29
|
-
export CL_KEYSTORE='${
|
30
|
-
export CL_KEYALIAS='${
|
31
|
-
export CL_PASSWORD='${
|
34
|
+
export CL_KEYSTORE='${env.CL_KEYSTORE}'
|
35
|
+
export CL_KEYALIAS='${env.CL_KEYALIAS}'
|
36
|
+
export CL_PASSWORD='${env.CL_PASSWORD}'
|
32
37
|
`;
|
33
38
|
}
|
39
|
+
export function getObjectToEnvContent(obj) {
|
40
|
+
return Object.entries(obj).map(([k, v]) => `${k}='${v}'`).join('\n') + '\n';
|
41
|
+
}
|
@@ -1,9 +1,11 @@
|
|
1
1
|
export declare const keyFileHelper: {
|
2
|
+
changePassword(newPassword: string): Promise<void>;
|
2
3
|
generate(): Promise<void>;
|
3
4
|
getAddress(): Promise<string>;
|
4
5
|
getId(): Promise<string>;
|
5
6
|
importKeyFile(): Promise<void>;
|
6
7
|
promptForKeyFile(): Promise<void>;
|
7
8
|
promptIfNoKeyFile(): Promise<void>;
|
9
|
+
promptSaveBackup(env: object): Promise<void>;
|
8
10
|
showKeyFileInfo(prompt4ShowPassword?: boolean): Promise<void>;
|
9
11
|
};
|
@@ -7,12 +7,27 @@ import { clm } from "../clm.js";
|
|
7
7
|
import { configStore } from "../config-store.js";
|
8
8
|
import { shellService } from "../services/shell-service.js";
|
9
9
|
import { configHelper } from "./config-helper.js";
|
10
|
-
import {
|
10
|
+
import { getObjectToEnvContent } from "./env-templates.js";
|
11
11
|
export const keyFileHelper = {
|
12
|
+
async changePassword(newPassword) {
|
13
|
+
const { projectDir } = configStore.getProjectInfo();
|
14
|
+
const keyFilePath = path.join(projectDir, "key.p12");
|
15
|
+
if (!fs.existsSync(keyFilePath)) {
|
16
|
+
clm.error('No key file found. Please generate a new key file first.');
|
17
|
+
}
|
18
|
+
const { CL_PASSWORD: currentPassword } = configStore.getEnvInfo();
|
19
|
+
if (currentPassword === newPassword) {
|
20
|
+
clm.error('The new password is the same as the current password. Please try again with a different password.');
|
21
|
+
}
|
22
|
+
await shellService.runCommand(`keytool -importkeystore -srckeystore ${keyFilePath} -srcstoretype PKCS12 -srcstorepass '${currentPassword}' -destkeystore ${path.join(projectDir, "temp.p12")} -deststoretype PKCS12 -deststorepass '${newPassword}' -destkeypass '${newPassword}'`);
|
23
|
+
await this.promptSaveBackup({ CL_KEYALIAS: 'alias', CL_KEYSTORE: keyFilePath, CL_PASSWORD: newPassword });
|
24
|
+
},
|
12
25
|
async generate() {
|
13
26
|
const { projectDir } = configStore.getProjectInfo();
|
14
27
|
const keyFilePath = path.join(projectDir, "key.p12");
|
28
|
+
let modifier = '';
|
15
29
|
if (fs.existsSync(keyFilePath)) {
|
30
|
+
modifier = 'new ';
|
16
31
|
const answer = await input({ default: 'n', message: 'A key file already exists. Do you want to overwrite it? (y/n): ' });
|
17
32
|
if (answer.toLowerCase() === 'y') {
|
18
33
|
fs.rmSync(keyFilePath, { force: true });
|
@@ -22,31 +37,26 @@ export const keyFileHelper = {
|
|
22
37
|
process.exit(0);
|
23
38
|
}
|
24
39
|
}
|
25
|
-
const keyPassword = await password({ message:
|
40
|
+
const keyPassword = await password({ message: `Enter the ${modifier}key file password:`, validate: value => value.length > 0 });
|
26
41
|
const env = {
|
27
42
|
CL_KEYALIAS: "alias", CL_KEYSTORE: keyFilePath, CL_PASSWORD: keyPassword
|
28
43
|
};
|
29
44
|
await shellService.runCommand(`java -jar ${projectDir}/dist/keytool.jar generate`, env);
|
30
|
-
configStore.
|
45
|
+
configStore.setEnvInfo(env);
|
31
46
|
const dagAddress = await this.getAddress();
|
32
47
|
const nodeId = await this.getId();
|
33
48
|
configStore.setProjectInfo({ dagAddress, nodeId });
|
34
49
|
clm.postStep('Key file generated successfully.\n');
|
35
|
-
|
36
|
-
if (answer.toLowerCase() === 'y') {
|
37
|
-
fs.cpSync(keyFilePath, path.join(os.homedir(), 'key.p12'));
|
38
|
-
fs.writeFileSync(path.join(os.homedir(), 'key-env.sh'), getKeyFileContent({ ...env, CL_KEYSTORE: 'key.p12' }));
|
39
|
-
clm.postStep(`A copy of the Key file has been saved to your home directory - ${chalk.cyan(path.join(os.homedir(), 'key.p12'))}`);
|
40
|
-
}
|
50
|
+
await this.promptSaveBackup(env);
|
41
51
|
},
|
42
52
|
async getAddress() {
|
43
53
|
const { projectDir } = configStore.getProjectInfo();
|
44
|
-
const env = configStore.
|
54
|
+
const env = configStore.getEnvInfo();
|
45
55
|
return shellService.runCommandWithOutput(`java -jar ${projectDir}/dist/wallet.jar show-address`, env);
|
46
56
|
},
|
47
57
|
async getId() {
|
48
58
|
const { projectDir } = configStore.getProjectInfo();
|
49
|
-
const env = configStore.
|
59
|
+
const env = configStore.getEnvInfo();
|
50
60
|
return shellService.runCommandWithOutput(`java -jar ${projectDir}/dist/wallet.jar show-id`, env);
|
51
61
|
},
|
52
62
|
async importKeyFile() {
|
@@ -72,17 +82,17 @@ export const keyFileHelper = {
|
|
72
82
|
// prompt for password
|
73
83
|
const keyPassword = await password({ message: 'Enter the key file password:' });
|
74
84
|
const keyAlias = await input({ message: 'Enter the key file alias:' });
|
75
|
-
configStore.
|
85
|
+
configStore.setEnvInfo({ CL_KEYALIAS: keyAlias, CL_KEYSTORE: keyStorePath, CL_PASSWORD: keyPassword });
|
76
86
|
try {
|
77
87
|
const dagAddress = await this.getAddress();
|
78
88
|
const nodeId = await this.getId();
|
79
89
|
configStore.setProjectInfo({ dagAddress, nodeId });
|
80
90
|
}
|
81
91
|
catch {
|
82
|
-
clm.warn('Failed to unlock the key file. Please check your key file information and try again.');
|
83
92
|
fs.rmSync(keyStorePath);
|
84
|
-
|
85
|
-
|
93
|
+
clm.error('Failed to unlock the key file. Please check your key file information and try again.');
|
94
|
+
// await this.promptForKeyFile();
|
95
|
+
// return;
|
86
96
|
}
|
87
97
|
clm.postStep('Key file imported successfully.\n');
|
88
98
|
},
|
@@ -108,6 +118,8 @@ export const keyFileHelper = {
|
|
108
118
|
await this.importKeyFile();
|
109
119
|
await this.showKeyFileInfo(false);
|
110
120
|
}
|
121
|
+
configStore.setProjectFlag('duplicateNodeIdChecked', false);
|
122
|
+
configStore.setProjectFlag('seedListChecked', false);
|
111
123
|
},
|
112
124
|
async promptIfNoKeyFile() {
|
113
125
|
const { projectDir } = configStore.getProjectInfo();
|
@@ -117,12 +129,35 @@ export const keyFileHelper = {
|
|
117
129
|
}
|
118
130
|
await this.promptForKeyFile();
|
119
131
|
},
|
132
|
+
async promptSaveBackup(env) {
|
133
|
+
const { projectDir } = configStore.getProjectInfo();
|
134
|
+
const keyFilePath = path.join(projectDir, "key.p12");
|
135
|
+
const { dagAddress, nodeId } = configStore.getProjectInfo();
|
136
|
+
const answer = await input({ default: 'y', message: 'Would you like to save a backup of the key file to your home directory? (y/n): ' });
|
137
|
+
if (answer.toLowerCase() === 'y') {
|
138
|
+
const homeKeyPath = path.join(os.homedir(), 'key.p12');
|
139
|
+
const homeKeyInfoPath = path.join(os.homedir(), 'key-info');
|
140
|
+
if (fs.existsSync(homeKeyPath)) {
|
141
|
+
const backupUniqueName = new Date().toISOString().replaceAll(':', '-');
|
142
|
+
const backupKeyName = `key-${backupUniqueName}.p12`;
|
143
|
+
const backupKeyPath = path.join(os.homedir(), backupKeyName);
|
144
|
+
fs.renameSync(homeKeyPath, backupKeyPath);
|
145
|
+
clm.postStep(`An existing key file was found in your home directory and has been renamed to ${chalk.cyan(backupKeyName)}`);
|
146
|
+
if (fs.existsSync(homeKeyInfoPath)) {
|
147
|
+
fs.renameSync(homeKeyInfoPath, path.join(os.homedir(), `key-info-${backupUniqueName}`));
|
148
|
+
}
|
149
|
+
}
|
150
|
+
fs.cpSync(keyFilePath, homeKeyPath);
|
151
|
+
fs.writeFileSync(path.join(os.homedir(), 'key-info'), getObjectToEnvContent({ ...env, CL_KEYSTORE: 'key.p12', CL_PASSWORD: '****', NODE_ADDRESS: dagAddress, NODE_ID: nodeId }));
|
152
|
+
clm.postStep(`A copy of the Key file has been saved to your home directory - ${chalk.cyan(homeKeyPath)}`);
|
153
|
+
}
|
154
|
+
},
|
120
155
|
async showKeyFileInfo(prompt4ShowPassword = true) {
|
121
156
|
clm.preStep('Current key file information:');
|
122
157
|
const { dagAddress, nodeId } = configStore.getProjectInfo();
|
123
158
|
configHelper.showEnvInfo('Node ID', nodeId);
|
124
159
|
configHelper.showEnvInfo('DAG Address', dagAddress);
|
125
|
-
const { CL_KEYALIAS, CL_KEYSTORE, CL_PASSWORD } = configStore.
|
160
|
+
const { CL_KEYALIAS, CL_KEYSTORE, CL_PASSWORD } = configStore.getEnvInfo();
|
126
161
|
configHelper.showEnvInfo('CL_KEYSTORE', CL_KEYSTORE || '');
|
127
162
|
configHelper.showEnvInfo('CL_KEYALIAS', CL_KEYALIAS || '');
|
128
163
|
configHelper.showEnvInfo('CL_PASSWORD', '*********');
|
@@ -1,10 +1,10 @@
|
|
1
1
|
import { TessellationLayer } from "../types.js";
|
2
2
|
export declare const projectHelper: {
|
3
3
|
generateLayerEnvFiles(layers?: TessellationLayer[]): Promise<void>;
|
4
|
-
|
5
|
-
importNetworkEnvFiles(): Promise<void>;
|
4
|
+
importEnvFiles(): void;
|
6
5
|
installEmbedded(name: string): Promise<void>;
|
7
6
|
installFromGithub(_repo: string): Promise<never>;
|
8
7
|
installHypergraph(): Promise<void>;
|
8
|
+
installProject(name: string, projectFolder: string): Promise<void>;
|
9
9
|
selectProject(): Promise<void>;
|
10
10
|
};
|
@@ -5,50 +5,46 @@ import path from "node:path";
|
|
5
5
|
import { fileURLToPath } from 'node:url';
|
6
6
|
import { clm } from "../clm.js";
|
7
7
|
import { configStore } from "../config-store.js";
|
8
|
+
import { githubService } from "../services/github-service.js";
|
8
9
|
import { configHelper } from "./config-helper.js";
|
9
10
|
import { getLayerEnvFileContent } from "./env-templates.js";
|
10
|
-
import { githubHelper } from "./github-helper.js";
|
11
11
|
export const projectHelper = {
|
12
12
|
async generateLayerEnvFiles(layers) {
|
13
13
|
const { layersToRun, projectDir } = configStore.getProjectInfo();
|
14
|
-
const { type } = configStore.getNetworkInfo();
|
15
|
-
const
|
14
|
+
const { type: network } = configStore.getNetworkInfo();
|
15
|
+
const envInfo = configStore.getEnvInfo();
|
16
|
+
const envNetworkInfo = configStore.getEnvNetworkInfo(network);
|
16
17
|
layers = layers || layersToRun;
|
17
|
-
for (const
|
18
|
-
const filePath = path.join(projectDir, `${
|
19
|
-
const
|
20
|
-
const fileContents = getLayerEnvFileContent(
|
18
|
+
for (const layer of layers) {
|
19
|
+
const filePath = path.join(projectDir, `${layer}.env`);
|
20
|
+
const envLayerInfo = configStore.getEnvLayerInfo(network, layer);
|
21
|
+
const fileContents = getLayerEnvFileContent(layer, { ...envInfo, ...envNetworkInfo, ...envLayerInfo });
|
21
22
|
clm.debug(`Writing layer env file: ${filePath}`);
|
22
23
|
fs.writeFileSync(filePath, fileContents);
|
23
24
|
}
|
24
25
|
},
|
25
|
-
|
26
|
-
const { projectDir } = configStore.getProjectInfo();
|
27
|
-
const possibleLayers = ['gl0', 'gl1', 'ml0', 'cl1', 'dl1'];
|
28
|
-
for (const n of possibleLayers) {
|
29
|
-
const filePath = path.join(projectDir, 'layers', `${n}.env`);
|
30
|
-
if (fs.existsSync(filePath)) {
|
31
|
-
configStore.setEnvLayerInfo(n, configHelper.parseEnvFile(filePath));
|
32
|
-
}
|
33
|
-
}
|
34
|
-
},
|
35
|
-
async importNetworkEnvFiles() {
|
26
|
+
importEnvFiles() {
|
36
27
|
const { projectDir } = configStore.getProjectInfo();
|
37
28
|
const possibleNetworks = ['mainnet', 'testnet', 'integrationnet'];
|
29
|
+
const possibleLayers = ['gl0', 'gl1', 'ml0', 'cl1', 'dl1'];
|
38
30
|
const supportedTypes = [];
|
39
|
-
const
|
40
|
-
|
41
|
-
const filePath = path.join(projectDir, 'networks', `${n}.env`);
|
31
|
+
for (const network of possibleNetworks) {
|
32
|
+
const filePath = path.join(projectDir, 'networks', network, 'network.env');
|
42
33
|
if (fs.existsSync(filePath)) {
|
43
|
-
supportedTypes.push(
|
44
|
-
|
34
|
+
supportedTypes.push(network);
|
35
|
+
configStore.setEnvNetworkInfo(network, configHelper.parseEnvFile(filePath));
|
36
|
+
}
|
37
|
+
for (const layer of possibleLayers) {
|
38
|
+
const filePath = path.join(projectDir, 'networks', network, `${layer}.env`);
|
39
|
+
if (fs.existsSync(filePath)) {
|
40
|
+
configStore.setEnvLayerInfo(network, layer, configHelper.parseEnvFile(filePath));
|
41
|
+
}
|
45
42
|
}
|
46
43
|
}
|
47
44
|
if (supportedTypes.length === 0) {
|
48
45
|
clm.error('No supported networks found in the project folder.');
|
49
46
|
}
|
50
47
|
configStore.setNetworkInfo({ supportedTypes });
|
51
|
-
configStore.setNetworkEnvInfo(networkEnvInfo);
|
52
48
|
// eslint-disable-next-line no-warning-comments
|
53
49
|
// TODO: verify all required env variables are present
|
54
50
|
},
|
@@ -57,10 +53,7 @@ export const projectHelper = {
|
|
57
53
|
if (!fs.existsSync(projectFolder)) {
|
58
54
|
clm.error(`Project folder not found: ${projectFolder}`);
|
59
55
|
}
|
60
|
-
await
|
61
|
-
const { projectDir } = configStore.getProjectInfo();
|
62
|
-
clm.debug(`Installing project from ${projectFolder} to ${projectDir}`);
|
63
|
-
fs.cpSync(projectFolder, projectDir, { recursive: true });
|
56
|
+
await this.installProject(name, projectFolder);
|
64
57
|
},
|
65
58
|
// curl -s https://api.github.com/repos/Constellation-Labs/pacaswap-metagraph/releases/latest | jq -r '.assets[] | select(.name | contains("node-pilot"))'
|
66
59
|
// use .tag_name for the release version
|
@@ -75,15 +68,21 @@ export const projectHelper = {
|
|
75
68
|
fs.mkdirSync(path.join(gl0DataDir, 'incremental_snapshot'), { recursive: true });
|
76
69
|
fs.mkdirSync(path.join(gl0DataDir, 'snapshot_info'));
|
77
70
|
fs.mkdirSync(path.join(gl0DataDir, 'tmp'));
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
71
|
+
this.importEnvFiles();
|
72
|
+
},
|
73
|
+
async installProject(name, projectFolder) {
|
74
|
+
if (!configStore.hasProjects()) {
|
75
|
+
// On first install, copy scripts
|
76
|
+
const scriptsFolder = path.resolve(path.dirname(fileURLToPath(import.meta.url)), `../../scripts`);
|
77
|
+
const projectDir = path.join(configStore.getAppDir(), 'scripts');
|
78
|
+
clm.debug(`Installing node pilot scripts from ${scriptsFolder} to ${projectDir}`);
|
79
|
+
fs.mkdirSync(projectDir, { recursive: true });
|
80
|
+
fs.cpSync(scriptsFolder, projectDir, { recursive: true });
|
81
|
+
}
|
82
|
+
await configStore.applyNewProjectStore(name);
|
83
|
+
const { projectDir } = configStore.getProjectInfo();
|
84
|
+
clm.debug(`Installing project from ${projectFolder} to ${projectDir}`);
|
85
|
+
fs.cpSync(projectFolder, projectDir, { recursive: true });
|
87
86
|
},
|
88
87
|
async selectProject() {
|
89
88
|
// prompt user to install hypergraph or metagraph
|
@@ -124,7 +123,7 @@ export const projectHelper = {
|
|
124
123
|
const m = repo.trim().match(ghRepoRegex);
|
125
124
|
const userRepo = `${m[1]}/${m[2]}`; // owner/repo
|
126
125
|
clm.preStep(`Installing from Github repository: ${chalk.cyan(userRepo)}`);
|
127
|
-
if (await
|
126
|
+
if (await githubService.hasAssetInRelease('node-pilot', userRepo)) {
|
128
127
|
await this.installFromGithub(userRepo);
|
129
128
|
}
|
130
129
|
else {
|
@@ -3,22 +3,16 @@ import chalk from "chalk";
|
|
3
3
|
import { clm } from "../clm.js";
|
4
4
|
import { configStore } from "../config-store.js";
|
5
5
|
export const promptHelper = {
|
6
|
-
async configureAutoRestart() {
|
7
|
-
const answer = await input({
|
8
|
-
default: 'y',
|
9
|
-
message: 'Do you want to enable auto-restart? (y/n): '
|
10
|
-
});
|
11
|
-
configStore.setProjectInfo({ autoRestart: answer === 'y' });
|
12
|
-
},
|
13
6
|
async configureJavaMemoryArguments() {
|
14
7
|
const { memory } = configStore.getSystemInfo();
|
15
8
|
const { layersToRun, name } = configStore.getProjectInfo();
|
9
|
+
const { type: currentNetwork } = configStore.getNetworkInfo();
|
16
10
|
const xmx = Number(memory);
|
17
11
|
if (xmx === 8 && layersToRun.length > 1) {
|
18
12
|
clm.warn('Minimum 8GB memory detected. Only a single layer will be allowed to run');
|
19
13
|
await promptHelper.doYouWishToContinue();
|
20
14
|
configStore.setProjectInfo({ layersToRun: [layersToRun[0]] });
|
21
|
-
configStore.setEnvLayerInfo(layersToRun[0], { CL_DOCKER_JAVA_OPTS: '-Xms1024M -Xmx7G -Xss256K' });
|
15
|
+
configStore.setEnvLayerInfo(currentNetwork, layersToRun[0], { CL_DOCKER_JAVA_OPTS: '-Xms1024M -Xmx7G -Xss256K' });
|
22
16
|
}
|
23
17
|
else if (name === 'hypergraph') {
|
24
18
|
// prompt to use all detected memory
|
@@ -31,11 +25,16 @@ export const promptHelper = {
|
|
31
25
|
answer--;
|
32
26
|
const subLayerMem = layersToRun.length > 1 ? Math.floor(answer / 3) : 0;
|
33
27
|
const mainLayerMem = answer - subLayerMem;
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
clm.postStep
|
38
|
-
|
28
|
+
const { supportedTypes } = configStore.getNetworkInfo();
|
29
|
+
for (const type of supportedTypes) {
|
30
|
+
const network = type.toUpperCase();
|
31
|
+
const logMethod = type === currentNetwork ? clm.postStep : clm.debug;
|
32
|
+
logMethod(`${network}:: ${layersToRun[0]} memory allocation: ${mainLayerMem}GB`);
|
33
|
+
configStore.setEnvLayerInfo(type, layersToRun[0], { CL_DOCKER_JAVA_OPTS: `-Xms1024M -Xmx${mainLayerMem}G -Xss256K` });
|
34
|
+
if (subLayerMem) {
|
35
|
+
logMethod(`${network}:: ${layersToRun[1]} memory allocation: ${subLayerMem}GB`);
|
36
|
+
configStore.setEnvLayerInfo(type, layersToRun[1], { CL_DOCKER_JAVA_OPTS: `-Xms1024M -Xmx${subLayerMem}G -Xss256K` });
|
37
|
+
}
|
39
38
|
}
|
40
39
|
}
|
41
40
|
},
|
@@ -67,7 +66,7 @@ export const promptHelper = {
|
|
67
66
|
}
|
68
67
|
if (supportedTypes.length === 1) {
|
69
68
|
configStore.setNetworkInfo({ type: supportedTypes[0], version: "latest" });
|
70
|
-
configStore.
|
69
|
+
// configStore.setEnvNetworkInfo(configStore.getNetworkEnvInfo(supportedTypes[0]));
|
71
70
|
return;
|
72
71
|
}
|
73
72
|
const networkType = await select({
|
@@ -79,6 +78,7 @@ export const promptHelper = {
|
|
79
78
|
message: 'Select network type:'
|
80
79
|
});
|
81
80
|
configStore.setNetworkInfo({ type: networkType, version: "latest" });
|
82
|
-
configStore.
|
81
|
+
configStore.setProjectFlag('duplicateNodeIdChecked', false);
|
82
|
+
configStore.setProjectFlag('seedListChecked', false);
|
83
83
|
}
|
84
84
|
};
|
@@ -0,0 +1,17 @@
|
|
1
|
+
export declare const archiverService: {
|
2
|
+
checkLogsForMissingSnapshots(): Promise<void>;
|
3
|
+
getArchiveSnapshotInfo(): Promise<{
|
4
|
+
clusterOrdinal: number;
|
5
|
+
distance: number;
|
6
|
+
endOrdinal: number;
|
7
|
+
startOrdinal: number;
|
8
|
+
total: number;
|
9
|
+
url: string;
|
10
|
+
}>;
|
11
|
+
getDownloadedSnapshotRange(): {
|
12
|
+
chunkOrdinal: number;
|
13
|
+
endOrdinal: number;
|
14
|
+
startOrdinal: number;
|
15
|
+
};
|
16
|
+
syncToLatestSnapshot(): Promise<void>;
|
17
|
+
};
|
@@ -0,0 +1,104 @@
|
|
1
|
+
import chalk from "chalk";
|
2
|
+
import fs from "node:fs";
|
3
|
+
import path from "node:path";
|
4
|
+
import { clm } from "../clm.js";
|
5
|
+
import { configStore } from "../config-store.js";
|
6
|
+
import { clusterService } from "./cluster-service.js";
|
7
|
+
import { FastforwardService } from "./fastforward-service.js";
|
8
|
+
import { shellService } from "./shell-service.js";
|
9
|
+
// http://5.161.243.241:7777/hash.txt
|
10
|
+
const remoteIndexMap = {
|
11
|
+
integrationnet: "http://5.161.243.241:7777",
|
12
|
+
mainnet: "http://128.140.33.142:7777",
|
13
|
+
testnet: "http://65.108.87.84:7777"
|
14
|
+
};
|
15
|
+
export const archiverService = {
|
16
|
+
async checkLogsForMissingSnapshots() {
|
17
|
+
const { projectDir } = configStore.getProjectInfo();
|
18
|
+
const dataDir = path.join(projectDir, 'app-data', 'gl0-logs', 'app.log');
|
19
|
+
const result = await shellService.runCommandWithOutput(`grep -i 'Global snapshot not found for ordinal' ${dataDir}`).catch(() => '');
|
20
|
+
let oldestOrdinal = Number.MAX_SAFE_INTEGER;
|
21
|
+
for (const line of result.split('\n')) {
|
22
|
+
const number = line.match(/\d+/);
|
23
|
+
if (number) {
|
24
|
+
oldestOrdinal = Math.min(oldestOrdinal, Number(number[0]));
|
25
|
+
}
|
26
|
+
}
|
27
|
+
console.log(`Oldest ordinal: ${oldestOrdinal}`);
|
28
|
+
},
|
29
|
+
async getArchiveSnapshotInfo() {
|
30
|
+
const { type } = configStore.getNetworkInfo();
|
31
|
+
const clusterOrdinal = await clusterService.getSourceNodeLatestOrdinal('gl0');
|
32
|
+
return fetch(remoteIndexMap[type] + '/hash.txt')
|
33
|
+
.then(res => res.text())
|
34
|
+
.then(txt => {
|
35
|
+
const lines = txt.trim().split('\n');
|
36
|
+
const lastLine = lines.at(-1);
|
37
|
+
const filename = lastLine.split(' ')[1];
|
38
|
+
const parseName = filename.split('.')[0].split('-');
|
39
|
+
const startOrdinal = Number(parseName.at(1)?.slice(1));
|
40
|
+
const endOrdinal = parseName.length < 4 ? startOrdinal + 20_000 - 1 : Number(parseName.at(3)?.slice(1));
|
41
|
+
const distance = clusterOrdinal - endOrdinal;
|
42
|
+
const total = endOrdinal - startOrdinal + 1;
|
43
|
+
clm.debug(`Cluster Ordinal: ${chalk.yellow(clusterOrdinal)}, Archive End Ordinal: ${chalk.yellow(endOrdinal)}, Total Archive Snapshots: ${chalk.yellow(total)}, Distance: ${chalk.yellow(distance)}`);
|
44
|
+
return { clusterOrdinal, distance, endOrdinal, startOrdinal, total, url: remoteIndexMap[type] + '/' + filename };
|
45
|
+
});
|
46
|
+
},
|
47
|
+
getDownloadedSnapshotRange() {
|
48
|
+
const { projectDir } = configStore.getProjectInfo();
|
49
|
+
const dataDir = path.join(projectDir, 'app-data', 'gl0-data', 'incremental_snapshot', 'ordinal');
|
50
|
+
const result = { chunkOrdinal: 0, endOrdinal: 0, startOrdinal: 0 };
|
51
|
+
if (!fs.existsSync(dataDir)) {
|
52
|
+
return result;
|
53
|
+
}
|
54
|
+
// get last filename in directory
|
55
|
+
let files = fs.readdirSync(dataDir);
|
56
|
+
if (files.length === 0)
|
57
|
+
return result;
|
58
|
+
const latestChunk = files.sort().at(-1);
|
59
|
+
if (!latestChunk)
|
60
|
+
return result;
|
61
|
+
result.chunkOrdinal = Number(latestChunk);
|
62
|
+
files = fs.readdirSync(path.join(dataDir, latestChunk));
|
63
|
+
if (files.length === 0)
|
64
|
+
return result;
|
65
|
+
const filesSorted = files.sort();
|
66
|
+
const firstFile = filesSorted.at(0);
|
67
|
+
const lastFile = filesSorted.at(-1);
|
68
|
+
result.startOrdinal = Number(firstFile);
|
69
|
+
result.endOrdinal = Number(lastFile);
|
70
|
+
return result;
|
71
|
+
},
|
72
|
+
async syncToLatestSnapshot() {
|
73
|
+
const { clusterOrdinal, distance: archiveDistance, endOrdinal: remoteArchiveEndOrdinal, startOrdinal: remoteArchiveStartOrdinal, total, url } = await this.getArchiveSnapshotInfo();
|
74
|
+
if (archiveDistance > 1000) {
|
75
|
+
clm.preStep('Archive is far behind cluster. Initiating fast forward...');
|
76
|
+
await FastforwardService.synctoLatestSnapshot();
|
77
|
+
return;
|
78
|
+
}
|
79
|
+
const { endOrdinal: localEndOrdinal, startOrdinal: localStartOrdinal } = this.getDownloadedSnapshotRange();
|
80
|
+
const localDistanceFromCluster = clusterOrdinal - localEndOrdinal;
|
81
|
+
// if archive can improve local's snapshot range
|
82
|
+
const needToSync = remoteArchiveStartOrdinal < localStartOrdinal || remoteArchiveEndOrdinal > localEndOrdinal;
|
83
|
+
if (!needToSync) {
|
84
|
+
clm.step(`Already near latest ordinal. Skipping sync. Distance: ${localDistanceFromCluster}`);
|
85
|
+
return;
|
86
|
+
}
|
87
|
+
const requiredOldestOrdinal = clusterOrdinal - 10_000;
|
88
|
+
const archiveStartOrdinalMeetsOldestRequirement = remoteArchiveStartOrdinal <= requiredOldestOrdinal;
|
89
|
+
if (!archiveStartOrdinalMeetsOldestRequirement) {
|
90
|
+
clm.preStep('Archive is not in the optimal range, but proceeding with available data.');
|
91
|
+
}
|
92
|
+
const { projectDir } = configStore.getProjectInfo();
|
93
|
+
const dataDir = path.join(projectDir, 'app-data', 'gl0-data');
|
94
|
+
fs.mkdirSync(dataDir, { recursive: true });
|
95
|
+
clm.preStep(`Downloading latest snapshot archive ${chalk.yellow(remoteArchiveStartOrdinal)}-${chalk.yellow(remoteArchiveEndOrdinal)}; distance from cluster: ${chalk.yellow(archiveDistance)}\nCurrent oldest local ordinal: ${chalk.yellow(localStartOrdinal)}, Latest cluster ordinal: ${chalk.yellow(clusterOrdinal)}`);
|
96
|
+
// await shellService.runCommand(`curl -L ${url} -o ${dataDir}/snapshot.tar.gz`);
|
97
|
+
await shellService.runCommand(`wget --progress=bar:force -O ${dataDir}/snapshot.tar.gz ${url}`);
|
98
|
+
clm.preStep(`Extracting snapshot...`);
|
99
|
+
await shellService.runCommand(`tar -xf ${dataDir}/snapshot.tar.gz -C ${dataDir}`);
|
100
|
+
await shellService.runCommand(`rm ${dataDir}/snapshot.tar.gz`);
|
101
|
+
clm.postStep(`Total snapshots downloaded: ${chalk.yellow(total)}, Synced to ordinal: ${chalk.yellow(remoteArchiveEndOrdinal)}, Cluster Ordinal: ${chalk.yellow(clusterOrdinal)}, Distance from cluster: ${chalk.yellow(archiveDistance)}`);
|
102
|
+
clm.postStep(`Snapshot downloaded and extracted successfully.`);
|
103
|
+
}
|
104
|
+
};
|
@@ -1,10 +1,14 @@
|
|
1
|
-
import { ClusterConsensusInfo, ClusterInfo, NodeInfo } from "../types.js";
|
1
|
+
import { ClusterConsensusInfo, ClusterInfo, NodeInfo, TessellationLayer } from "../types.js";
|
2
2
|
export declare const clusterService: {
|
3
3
|
fastForwardSnapshot(): Promise<void>;
|
4
|
-
getClusterInfo(): Promise<ClusterInfo[]>;
|
5
|
-
|
6
|
-
|
7
|
-
|
4
|
+
getClusterInfo(layer?: TessellationLayer): Promise<ClusterInfo[]>;
|
5
|
+
getClusterNodeInfo(layer?: TessellationLayer): Promise<NodeInfo>;
|
6
|
+
getLatestConsensusInfo(layer?: TessellationLayer): Promise<ClusterConsensusInfo>;
|
7
|
+
getLayer0(): "gl0" | "ml0";
|
8
8
|
getReleaseVersion(): Promise<string>;
|
9
|
-
getSourceNodeInfo(): Promise<NodeInfo>;
|
9
|
+
getSourceNodeInfo(layer: TessellationLayer): Promise<NodeInfo>;
|
10
|
+
getSourceNodeLatestOrdinal(layer: TessellationLayer): Promise<number>;
|
11
|
+
getSourceNodeOrdinalHash(layer: TessellationLayer, ordinal: number): Promise<string>;
|
12
|
+
makeClusterRequest(path: string, layer?: TessellationLayer): Promise<any>;
|
13
|
+
makeSourceNodeRequest(path: string, layer: TessellationLayer): Promise<any>;
|
10
14
|
};
|