@cybermem/cli 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -0
- package/dist/commands/__tests__/backup.test.js +75 -0
- package/dist/commands/__tests__/restore.test.js +70 -0
- package/dist/commands/backup.js +52 -0
- package/dist/commands/deploy.js +242 -0
- package/dist/commands/init.js +65 -0
- package/dist/commands/restore.js +62 -0
- package/dist/index.js +30 -0
- package/dist/templates/ansible/inventory/hosts.ini +3 -0
- package/dist/templates/ansible/playbooks/deploy-cybermem.yml +71 -0
- package/dist/templates/ansible/playbooks/stop-cybermem.yml +17 -0
- package/dist/templates/charts/cybermem/Chart.yaml +6 -0
- package/dist/templates/charts/cybermem/templates/dashboard-deployment.yaml +29 -0
- package/dist/templates/charts/cybermem/templates/dashboard-service.yaml +20 -0
- package/dist/templates/charts/cybermem/templates/openmemory-deployment.yaml +40 -0
- package/dist/templates/charts/cybermem/templates/openmemory-pvc.yaml +10 -0
- package/dist/templates/charts/cybermem/templates/openmemory-service.yaml +13 -0
- package/dist/templates/charts/cybermem/values-vps.yaml +18 -0
- package/dist/templates/charts/cybermem/values.yaml +42 -0
- package/dist/templates/docker-compose.yml +236 -0
- package/dist/templates/envs/local.example +27 -0
- package/dist/templates/envs/rpi.example +27 -0
- package/dist/templates/envs/vps.example +25 -0
- package/dist/templates/mcp-responder/Dockerfile +6 -0
- package/dist/templates/mcp-responder/server.js +22 -0
- package/dist/templates/monitoring/db_exporter/Dockerfile +19 -0
- package/dist/templates/monitoring/db_exporter/exporter.py +313 -0
- package/dist/templates/monitoring/db_exporter/requirements.txt +2 -0
- package/dist/templates/monitoring/grafana/dashboards/cybermem.json +1088 -0
- package/dist/templates/monitoring/grafana/provisioning/dashboards/default.yml +12 -0
- package/dist/templates/monitoring/grafana/provisioning/datasources/prometheus.yml +9 -0
- package/dist/templates/monitoring/log_exporter/Dockerfile +13 -0
- package/dist/templates/monitoring/log_exporter/exporter.py +274 -0
- package/dist/templates/monitoring/log_exporter/requirements.txt +1 -0
- package/dist/templates/monitoring/postgres_exporter/queries.yml +22 -0
- package/dist/templates/monitoring/prometheus/prometheus.yml +22 -0
- package/dist/templates/monitoring/traefik/dynamic/.gitkeep +0 -0
- package/dist/templates/monitoring/traefik/traefik.yml +32 -0
- package/dist/templates/monitoring/vector/vector.toml/vector.yaml +77 -0
- package/dist/templates/monitoring/vector/vector.yaml +106 -0
- package/dist/templates/openmemory/Dockerfile +19 -0
- package/package.json +50 -0
- package/templates/ansible/inventory/hosts.ini +3 -0
- package/templates/ansible/playbooks/deploy-cybermem.yml +71 -0
- package/templates/ansible/playbooks/stop-cybermem.yml +17 -0
- package/templates/charts/cybermem/Chart.yaml +6 -0
- package/templates/charts/cybermem/templates/dashboard-deployment.yaml +29 -0
- package/templates/charts/cybermem/templates/dashboard-service.yaml +20 -0
- package/templates/charts/cybermem/templates/openmemory-deployment.yaml +40 -0
- package/templates/charts/cybermem/templates/openmemory-pvc.yaml +10 -0
- package/templates/charts/cybermem/templates/openmemory-service.yaml +13 -0
- package/templates/charts/cybermem/values-vps.yaml +18 -0
- package/templates/charts/cybermem/values.yaml +42 -0
- package/templates/docker-compose.yml +236 -0
- package/templates/envs/local.example +27 -0
- package/templates/envs/rpi.example +27 -0
- package/templates/envs/vps.example +25 -0
- package/templates/mcp-responder/Dockerfile +6 -0
- package/templates/mcp-responder/server.js +22 -0
- package/templates/monitoring/db_exporter/Dockerfile +19 -0
- package/templates/monitoring/db_exporter/exporter.py +313 -0
- package/templates/monitoring/db_exporter/requirements.txt +2 -0
- package/templates/monitoring/grafana/dashboards/cybermem.json +1088 -0
- package/templates/monitoring/grafana/provisioning/dashboards/default.yml +12 -0
- package/templates/monitoring/grafana/provisioning/datasources/prometheus.yml +9 -0
- package/templates/monitoring/log_exporter/Dockerfile +13 -0
- package/templates/monitoring/log_exporter/exporter.py +274 -0
- package/templates/monitoring/log_exporter/requirements.txt +1 -0
- package/templates/monitoring/postgres_exporter/queries.yml +22 -0
- package/templates/monitoring/prometheus/prometheus.yml +22 -0
- package/templates/monitoring/traefik/dynamic/.gitkeep +0 -0
- package/templates/monitoring/traefik/traefik.yml +32 -0
- package/templates/monitoring/vector/vector.toml/vector.yaml +77 -0
- package/templates/monitoring/vector/vector.yaml +106 -0
- package/templates/openmemory/Dockerfile +19 -0
package/README.md
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const execa_1 = __importDefault(require("execa"));
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const backup_1 = require("../backup");
|
|
9
|
+
// Mock dependencies
|
|
10
|
+
jest.mock('execa');
|
|
11
|
+
jest.mock('fs');
|
|
12
|
+
jest.mock('path', () => ({
|
|
13
|
+
...jest.requireActual('path'),
|
|
14
|
+
resolve: jest.fn((...args) => args.join('/')),
|
|
15
|
+
basename: jest.fn((p) => p.split('/').pop() || ''),
|
|
16
|
+
dirname: jest.fn((p) => p.split('/').slice(0, -1).join('/') || '/'),
|
|
17
|
+
}));
|
|
18
|
+
// Mock console to keep output clean
|
|
19
|
+
const originalConsoleLog = console.log;
|
|
20
|
+
const originalConsoleError = console.error;
|
|
21
|
+
describe('backup command', () => {
|
|
22
|
+
beforeAll(() => {
|
|
23
|
+
console.log = jest.fn();
|
|
24
|
+
console.error = jest.fn();
|
|
25
|
+
});
|
|
26
|
+
afterAll(() => {
|
|
27
|
+
console.log = originalConsoleLog;
|
|
28
|
+
console.error = originalConsoleError;
|
|
29
|
+
});
|
|
30
|
+
beforeEach(() => {
|
|
31
|
+
jest.clearAllMocks();
|
|
32
|
+
// Default process.cwd
|
|
33
|
+
jest.spyOn(process, 'cwd').mockReturnValue('/mock/cwd');
|
|
34
|
+
});
|
|
35
|
+
it('should create a backup successfully when openmemory container exists', async () => {
|
|
36
|
+
// Mock docker inspect success
|
|
37
|
+
execa_1.default.mockResolvedValueOnce({ exitCode: 0 });
|
|
38
|
+
// Mock docker run success
|
|
39
|
+
execa_1.default.mockResolvedValueOnce({ exitCode: 0 });
|
|
40
|
+
// Mock fs.existsSync to true (file created)
|
|
41
|
+
fs_1.default.existsSync.mockReturnValue(true);
|
|
42
|
+
// Mock fs.statSync
|
|
43
|
+
fs_1.default.statSync.mockReturnValue({ size: 5 * 1024 * 1024 }); // 5MB
|
|
44
|
+
await (0, backup_1.backup)({});
|
|
45
|
+
// Expect docker inspect check
|
|
46
|
+
expect(execa_1.default).toHaveBeenCalledWith('docker', ['inspect', 'cybermem-openmemory']);
|
|
47
|
+
// Expect docker run tar
|
|
48
|
+
const tarCall = execa_1.default.mock.calls[1];
|
|
49
|
+
expect(tarCall[0]).toBe('docker');
|
|
50
|
+
expect(tarCall[1]).toContain('run');
|
|
51
|
+
expect(tarCall[1]).toContain('tar');
|
|
52
|
+
expect(tarCall[1]).toContain('czf');
|
|
53
|
+
// Check volumes from
|
|
54
|
+
expect(tarCall[1]).toContain('--volumes-from');
|
|
55
|
+
expect(tarCall[1]).toContain('cybermem-openmemory');
|
|
56
|
+
});
|
|
57
|
+
it('should fail if openmemory container does not exist', async () => {
|
|
58
|
+
// Mock inspect failure
|
|
59
|
+
execa_1.default.mockRejectedValueOnce(new Error('No such container'));
|
|
60
|
+
const exitSpy = jest.spyOn(process, 'exit').mockImplementation((() => { }));
|
|
61
|
+
await (0, backup_1.backup)({});
|
|
62
|
+
expect(console.error).toHaveBeenCalledWith(expect.stringContaining('not found'));
|
|
63
|
+
expect(exitSpy).toHaveBeenCalledWith(1);
|
|
64
|
+
});
|
|
65
|
+
it('should error if backup file is not created', async () => {
|
|
66
|
+
execa_1.default.mockResolvedValue({ exitCode: 0 }); // inspect OK
|
|
67
|
+
execa_1.default.mockResolvedValue({ exitCode: 0 }); // run OK
|
|
68
|
+
// Mock file check false
|
|
69
|
+
fs_1.default.existsSync.mockReturnValue(false);
|
|
70
|
+
const exitSpy = jest.spyOn(process, 'exit').mockImplementation((() => { }));
|
|
71
|
+
await (0, backup_1.backup)({});
|
|
72
|
+
expect(console.error).toHaveBeenCalledWith(expect.stringContaining('Backup failed'));
|
|
73
|
+
expect(exitSpy).toHaveBeenCalledWith(1);
|
|
74
|
+
});
|
|
75
|
+
});
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const execa_1 = __importDefault(require("execa"));
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const restore_1 = require("../restore");
|
|
9
|
+
jest.mock('execa');
|
|
10
|
+
jest.mock('fs');
|
|
11
|
+
jest.mock('path', () => ({
|
|
12
|
+
...jest.requireActual('path'),
|
|
13
|
+
resolve: jest.fn((...args) => args.join('/')),
|
|
14
|
+
basename: jest.fn((p) => p.split('/').pop() || ''),
|
|
15
|
+
dirname: jest.fn((p) => p.split('/').slice(0, -1).join('/') || '/'),
|
|
16
|
+
}));
|
|
17
|
+
const originalConsoleLog = console.log;
|
|
18
|
+
const originalConsoleError = console.error;
|
|
19
|
+
describe('restore command', () => {
|
|
20
|
+
beforeAll(() => {
|
|
21
|
+
console.log = jest.fn();
|
|
22
|
+
console.error = jest.fn();
|
|
23
|
+
});
|
|
24
|
+
afterAll(() => {
|
|
25
|
+
console.log = originalConsoleLog;
|
|
26
|
+
console.error = originalConsoleError;
|
|
27
|
+
});
|
|
28
|
+
beforeEach(() => {
|
|
29
|
+
jest.clearAllMocks();
|
|
30
|
+
jest.spyOn(process, 'cwd').mockReturnValue('/mock/cwd');
|
|
31
|
+
});
|
|
32
|
+
it('should restore successfully', async () => {
|
|
33
|
+
// Mock file exists
|
|
34
|
+
fs_1.default.existsSync.mockReturnValue(true);
|
|
35
|
+
// Mock docker stop (success)
|
|
36
|
+
execa_1.default.mockResolvedValueOnce({ exitCode: 0 });
|
|
37
|
+
// Mock docker run tar (success)
|
|
38
|
+
execa_1.default.mockResolvedValueOnce({ exitCode: 0 });
|
|
39
|
+
// Mock docker start (success)
|
|
40
|
+
execa_1.default.mockResolvedValueOnce({ exitCode: 0 });
|
|
41
|
+
await (0, restore_1.restore)('backup.tar.gz', {});
|
|
42
|
+
// Check docker stop
|
|
43
|
+
expect(execa_1.default).toHaveBeenCalledWith('docker', ['stop', 'cybermem-openmemory']);
|
|
44
|
+
// Check docker run tar
|
|
45
|
+
const tarCall = execa_1.default.mock.calls[1];
|
|
46
|
+
expect(tarCall[0]).toBe('docker');
|
|
47
|
+
expect(tarCall[1]).toContain('run');
|
|
48
|
+
expect(tarCall[1]).toContain('tar xzf /backup/backup.tar.gz -C / && chown -R 1001:1001 /data');
|
|
49
|
+
// Check docker start
|
|
50
|
+
expect(execa_1.default).toHaveBeenCalledWith('docker', ['start', 'cybermem-openmemory']);
|
|
51
|
+
});
|
|
52
|
+
it('should ignore docker stop error (if container not running)', async () => {
|
|
53
|
+
fs_1.default.existsSync.mockReturnValue(true);
|
|
54
|
+
// Mock docker stop FAIL
|
|
55
|
+
execa_1.default.mockRejectedValueOnce(new Error('No such container'));
|
|
56
|
+
// Mock succeeding calls
|
|
57
|
+
execa_1.default.mockResolvedValue({ exitCode: 0 });
|
|
58
|
+
await (0, restore_1.restore)('backup.tar.gz', {});
|
|
59
|
+
// Should still proceed to restore
|
|
60
|
+
expect(execa_1.default).toHaveBeenCalledWith('docker', expect.arrayContaining(['run']));
|
|
61
|
+
expect(execa_1.default).toHaveBeenCalledWith('docker', ['start', 'cybermem-openmemory']);
|
|
62
|
+
});
|
|
63
|
+
it('should fail if backup file missing', async () => {
|
|
64
|
+
fs_1.default.existsSync.mockReturnValue(false);
|
|
65
|
+
const exitSpy = jest.spyOn(process, 'exit').mockImplementation((() => { }));
|
|
66
|
+
await (0, restore_1.restore)('mia.tar.gz', {});
|
|
67
|
+
expect(console.error).toHaveBeenCalledWith(expect.stringContaining('not found'));
|
|
68
|
+
expect(exitSpy).toHaveBeenCalledWith(1);
|
|
69
|
+
});
|
|
70
|
+
});
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.backup = backup;
|
|
7
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
8
|
+
const execa_1 = __importDefault(require("execa"));
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
async function backup(options) {
|
|
12
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
13
|
+
const filename = `cybermem-backup-${timestamp}.tar.gz`;
|
|
14
|
+
const outputPath = path_1.default.resolve(process.cwd(), filename);
|
|
15
|
+
console.log(chalk_1.default.blue(`📦 Creating backup: ${filename}...`));
|
|
16
|
+
try {
|
|
17
|
+
// Check if container exists
|
|
18
|
+
try {
|
|
19
|
+
await (0, execa_1.default)('docker', ['inspect', 'cybermem-openmemory']);
|
|
20
|
+
}
|
|
21
|
+
catch (e) {
|
|
22
|
+
console.error(chalk_1.default.red('Error: cybermem-openmemory container not found. Is CyberMem installed?'));
|
|
23
|
+
process.exit(1);
|
|
24
|
+
}
|
|
25
|
+
// Use a transient alpine container to tar the /data volume
|
|
26
|
+
// We mount the current working directory to /backup in the container
|
|
27
|
+
// And we use --volumes-from to access the data volume of the running service
|
|
28
|
+
const cmd = [
|
|
29
|
+
'run', '--rm',
|
|
30
|
+
'--volumes-from', 'cybermem-openmemory',
|
|
31
|
+
'-v', `${process.cwd()}:/backup`,
|
|
32
|
+
'alpine',
|
|
33
|
+
'tar', 'czf', `/backup/${filename}`, '-C', '/', 'data'
|
|
34
|
+
];
|
|
35
|
+
console.log(chalk_1.default.gray(`Running: docker ${cmd.join(' ')}`));
|
|
36
|
+
await (0, execa_1.default)('docker', cmd, { stdio: 'inherit' });
|
|
37
|
+
if (fs_1.default.existsSync(outputPath)) {
|
|
38
|
+
const stats = fs_1.default.statSync(outputPath);
|
|
39
|
+
const sizeMb = (stats.size / 1024 / 1024).toFixed(2);
|
|
40
|
+
console.log(chalk_1.default.green(`\n✅ Backup created successfully!`));
|
|
41
|
+
console.log(`File: ${chalk_1.default.bold(outputPath)}`);
|
|
42
|
+
console.log(`Size: ${sizeMb} MB`);
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
throw new Error('Backup file not found after generation.');
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
catch (error) {
|
|
49
|
+
console.error(chalk_1.default.red('Backup failed:'), error);
|
|
50
|
+
process.exit(1);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.deploy = deploy;
|
|
7
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
8
|
+
const crypto_1 = __importDefault(require("crypto"));
|
|
9
|
+
const execa_1 = __importDefault(require("execa"));
|
|
10
|
+
const fs_1 = __importDefault(require("fs"));
|
|
11
|
+
const inquirer_1 = __importDefault(require("inquirer"));
|
|
12
|
+
const os_1 = __importDefault(require("os"));
|
|
13
|
+
const path_1 = __importDefault(require("path"));
|
|
14
|
+
async function deploy(options) {
|
|
15
|
+
// Determine target from flags
|
|
16
|
+
let target = 'local';
|
|
17
|
+
if (options.rpi)
|
|
18
|
+
target = 'rpi';
|
|
19
|
+
if (options.vps)
|
|
20
|
+
target = 'vps';
|
|
21
|
+
const useTailscale = options.remoteAccess;
|
|
22
|
+
console.log(chalk_1.default.blue(`Deploying CyberMem (${target})...`));
|
|
23
|
+
try {
|
|
24
|
+
// Resolve Template Directory (Support both Dev and Prod)
|
|
25
|
+
let templateDir = path_1.default.resolve(__dirname, '../../templates');
|
|
26
|
+
if (!fs_1.default.existsSync(templateDir)) {
|
|
27
|
+
templateDir = path_1.default.resolve(__dirname, '../../../templates');
|
|
28
|
+
}
|
|
29
|
+
if (!fs_1.default.existsSync(templateDir)) {
|
|
30
|
+
templateDir = path_1.default.resolve(process.cwd(), 'packages/cli/templates');
|
|
31
|
+
}
|
|
32
|
+
if (!fs_1.default.existsSync(templateDir)) {
|
|
33
|
+
// Fallback for different build structures
|
|
34
|
+
templateDir = path_1.default.resolve(__dirname, '../templates');
|
|
35
|
+
}
|
|
36
|
+
if (!fs_1.default.existsSync(templateDir)) {
|
|
37
|
+
throw new Error(`Templates not found at ${templateDir}. Please ensure package is built correctly.`);
|
|
38
|
+
}
|
|
39
|
+
if (target === 'local') {
|
|
40
|
+
const composeFile = path_1.default.join(templateDir, 'docker-compose.yml');
|
|
41
|
+
const internalEnvExample = path_1.default.join(templateDir, 'envs/local.example');
|
|
42
|
+
if (!fs_1.default.existsSync(composeFile)) {
|
|
43
|
+
console.error(chalk_1.default.red(`Internal Error: Template not found at ${composeFile}`));
|
|
44
|
+
process.exit(1);
|
|
45
|
+
}
|
|
46
|
+
// Home Directory Config
|
|
47
|
+
const homeDir = os_1.default.homedir();
|
|
48
|
+
const configDir = path_1.default.join(homeDir, '.cybermem');
|
|
49
|
+
const envFile = path_1.default.join(configDir, '.env');
|
|
50
|
+
const dataDir = path_1.default.join(configDir, 'data');
|
|
51
|
+
// 1. Ensure ~/.cybermem exists
|
|
52
|
+
if (!fs_1.default.existsSync(configDir)) {
|
|
53
|
+
fs_1.default.mkdirSync(configDir, { recursive: true });
|
|
54
|
+
fs_1.default.mkdirSync(dataDir, { recursive: true });
|
|
55
|
+
}
|
|
56
|
+
// 2. Local Mode: Simplified setup without mandatory API key
|
|
57
|
+
if (!fs_1.default.existsSync(envFile)) {
|
|
58
|
+
console.log(chalk_1.default.yellow(`Initializing local configuration in ${configDir}...`));
|
|
59
|
+
const envContent = fs_1.default.readFileSync(internalEnvExample, 'utf-8');
|
|
60
|
+
fs_1.default.writeFileSync(envFile, envContent);
|
|
61
|
+
console.log(chalk_1.default.green(`Created .env at ${envFile}`));
|
|
62
|
+
}
|
|
63
|
+
console.log(chalk_1.default.blue('Starting CyberMem services in Local Mode...'));
|
|
64
|
+
await (0, execa_1.default)('docker-compose', [
|
|
65
|
+
'-f', composeFile,
|
|
66
|
+
'--env-file', envFile,
|
|
67
|
+
'--project-name', 'cybermem',
|
|
68
|
+
'up', '-d', '--remove-orphans'
|
|
69
|
+
], {
|
|
70
|
+
stdio: 'inherit',
|
|
71
|
+
env: {
|
|
72
|
+
...process.env,
|
|
73
|
+
DATA_DIR: dataDir,
|
|
74
|
+
CYBERMEM_ENV_PATH: envFile,
|
|
75
|
+
OM_API_KEY: ''
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
console.log(chalk_1.default.green('\n🎉 CyberMem Installed!'));
|
|
79
|
+
console.log('');
|
|
80
|
+
console.log(chalk_1.default.bold('Next Steps:'));
|
|
81
|
+
console.log(` 1. Open ${chalk_1.default.underline('http://localhost:3000/client-connect')} to connect your MCP clients`);
|
|
82
|
+
console.log(` 2. Default password: ${chalk_1.default.bold('admin')} (you'll be prompted to change it)`);
|
|
83
|
+
console.log('');
|
|
84
|
+
console.log(chalk_1.default.dim('Local mode is active: No API key required for connections from this laptop.'));
|
|
85
|
+
}
|
|
86
|
+
else if (target === 'rpi') {
|
|
87
|
+
const composeFile = path_1.default.join(templateDir, 'docker-compose.yml');
|
|
88
|
+
const internalEnvExample = path_1.default.join(templateDir, 'envs/rpi.example');
|
|
89
|
+
let sshHost = options.host;
|
|
90
|
+
if (!sshHost) {
|
|
91
|
+
const answers = await inquirer_1.default.prompt([
|
|
92
|
+
{
|
|
93
|
+
type: 'input',
|
|
94
|
+
name: 'host',
|
|
95
|
+
message: 'Enter SSH Host (e.g. pi@raspberrypi.local):',
|
|
96
|
+
validate: (input) => input.includes('@') ? true : 'Format must be user@host'
|
|
97
|
+
}
|
|
98
|
+
]);
|
|
99
|
+
sshHost = answers.host;
|
|
100
|
+
}
|
|
101
|
+
console.log(chalk_1.default.blue(`Remote deploying to ${sshHost}...`));
|
|
102
|
+
// 1. Create remote directory
|
|
103
|
+
await (0, execa_1.default)('ssh', [sshHost, 'mkdir -p ~/.cybermem/data']);
|
|
104
|
+
// 1.5 Check and fix Docker architecture (64-bit kernel with 32-bit Docker)
|
|
105
|
+
console.log(chalk_1.default.blue('Checking Docker architecture...'));
|
|
106
|
+
try {
|
|
107
|
+
const { stdout: kernelArch } = await (0, execa_1.default)('ssh', [sshHost, 'uname -m']);
|
|
108
|
+
const { stdout: dockerArch } = await (0, execa_1.default)('ssh', [sshHost, 'docker version --format "{{.Server.Arch}}" 2>/dev/null || echo "unknown"']);
|
|
109
|
+
if (kernelArch.trim() === 'aarch64' && dockerArch.trim() !== 'arm64') {
|
|
110
|
+
console.log(chalk_1.default.yellow(`⚠️ Docker is ${dockerArch.trim()}, kernel is aarch64. Installing arm64 Docker...`));
|
|
111
|
+
const installCmd = `
|
|
112
|
+
sudo systemctl stop docker docker.socket 2>/dev/null || true
|
|
113
|
+
curl -fsSL https://download.docker.com/linux/static/stable/aarch64/docker-27.5.1.tgz -o /tmp/docker.tgz
|
|
114
|
+
sudo tar -xzf /tmp/docker.tgz -C /usr/local/bin --strip-components=1
|
|
115
|
+
sudo /usr/local/bin/dockerd &
|
|
116
|
+
sleep 5
|
|
117
|
+
docker version --format "{{.Server.Arch}}"
|
|
118
|
+
`;
|
|
119
|
+
const { stdout } = await (0, execa_1.default)('ssh', [sshHost, installCmd], { shell: true });
|
|
120
|
+
if (stdout.includes('arm64')) {
|
|
121
|
+
console.log(chalk_1.default.green('✅ Docker arm64 installed successfully'));
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
console.log(chalk_1.default.yellow('⚠️ Docker arm64 install may need manual verification'));
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
else if (dockerArch.trim() === 'arm64') {
|
|
128
|
+
console.log(chalk_1.default.green(`✅ Docker is already arm64`));
|
|
129
|
+
}
|
|
130
|
+
else {
|
|
131
|
+
console.log(chalk_1.default.gray(`Docker arch: ${dockerArch.trim()}, kernel: ${kernelArch.trim()}`));
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
catch (e) {
|
|
135
|
+
console.log(chalk_1.default.yellow(`⚠️ Docker arch check skipped: ${e.message}`));
|
|
136
|
+
}
|
|
137
|
+
// 2. Initial Env Setup (if missing)
|
|
138
|
+
try {
|
|
139
|
+
await (0, execa_1.default)('ssh', [sshHost, '[ -f ~/.cybermem/.env ]']);
|
|
140
|
+
console.log(chalk_1.default.gray('Remote .env exists, skipping generation.'));
|
|
141
|
+
}
|
|
142
|
+
catch (e) {
|
|
143
|
+
console.log(chalk_1.default.yellow('Generating remote .env...'));
|
|
144
|
+
let envContent = fs_1.default.readFileSync(internalEnvExample, 'utf-8');
|
|
145
|
+
const newKey = `sk-${crypto_1.default.randomBytes(16).toString('hex')}`;
|
|
146
|
+
// Replace OM_API_KEY with generated key
|
|
147
|
+
if (envContent.includes('OM_API_KEY=')) {
|
|
148
|
+
envContent = envContent.replace(/OM_API_KEY=.*/, `OM_API_KEY=${newKey}`);
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
envContent += `\nOM_API_KEY=${newKey}\n`;
|
|
152
|
+
}
|
|
153
|
+
const tempEnv = path_1.default.join(os_1.default.tmpdir(), 'cybermem-rpi.env');
|
|
154
|
+
fs_1.default.writeFileSync(tempEnv, envContent);
|
|
155
|
+
await (0, execa_1.default)('scp', [tempEnv, `${sshHost}:~/.cybermem/.env`]);
|
|
156
|
+
fs_1.default.unlinkSync(tempEnv);
|
|
157
|
+
}
|
|
158
|
+
// 3. Copy Docker Compose
|
|
159
|
+
console.log(chalk_1.default.blue('Uploading templates...'));
|
|
160
|
+
await (0, execa_1.default)('scp', [composeFile, `${sshHost}:~/.cybermem/docker-compose.yml`]);
|
|
161
|
+
// 4. Run Docker Compose Remotely
|
|
162
|
+
console.log(chalk_1.default.blue('Starting services on RPi...'));
|
|
163
|
+
// DOCKER_DEFAULT_PLATFORM=linux/arm64 forces arm64 images on RPi with 64-bit kernel but 32-bit Docker
|
|
164
|
+
const remoteCmd = `
|
|
165
|
+
export CYBERMEM_ENV_PATH=~/.cybermem/.env
|
|
166
|
+
export DATA_DIR=~/.cybermem/data
|
|
167
|
+
export DOCKER_DEFAULT_PLATFORM=linux/arm64
|
|
168
|
+
docker-compose -f ~/.cybermem/docker-compose.yml up -d --remove-orphans
|
|
169
|
+
`;
|
|
170
|
+
await (0, execa_1.default)('ssh', [sshHost, remoteCmd], { stdio: 'inherit' });
|
|
171
|
+
console.log(chalk_1.default.green('\n✅ RPi deployment successful!'));
|
|
172
|
+
const hostIp = sshHost.split('@')[1];
|
|
173
|
+
console.log(chalk_1.default.bold('Access Points (LAN):'));
|
|
174
|
+
console.log(` - Dashboard: ${chalk_1.default.underline(`http://${hostIp}:3000`)} (admin/admin)`);
|
|
175
|
+
console.log(` - OpenMemory: ${chalk_1.default.underline(`http://${hostIp}:8080`)}`);
|
|
176
|
+
// Tailscale Funnel setup
|
|
177
|
+
if (useTailscale) {
|
|
178
|
+
console.log(chalk_1.default.blue('\n🔗 Setting up Remote Access (Tailscale Funnel)...'));
|
|
179
|
+
try {
|
|
180
|
+
try {
|
|
181
|
+
await (0, execa_1.default)('ssh', [sshHost, 'which tailscale']);
|
|
182
|
+
}
|
|
183
|
+
catch (e) {
|
|
184
|
+
console.log(chalk_1.default.yellow(' Tailscale not found. Installing...'));
|
|
185
|
+
await (0, execa_1.default)('ssh', [sshHost, 'curl -fsSL https://tailscale.com/install.sh | sh'], { stdio: 'inherit' });
|
|
186
|
+
}
|
|
187
|
+
console.log(chalk_1.default.blue(' Ensuring Tailscale is up...'));
|
|
188
|
+
try {
|
|
189
|
+
await (0, execa_1.default)('ssh', [sshHost, 'tailscale status']);
|
|
190
|
+
}
|
|
191
|
+
catch (e) {
|
|
192
|
+
console.log(chalk_1.default.yellow(' ⚠️ Tailscale authentication required. Please follow the prompts:'));
|
|
193
|
+
await (0, execa_1.default)('ssh', [sshHost, 'sudo tailscale up'], { stdio: 'inherit' });
|
|
194
|
+
}
|
|
195
|
+
console.log(chalk_1.default.blue(' Configuring HTTPS Funnel (requires sudo access)...'));
|
|
196
|
+
console.log(chalk_1.default.gray(' You may be prompted for your RPi password.'));
|
|
197
|
+
await (0, execa_1.default)('ssh', ['-t', sshHost, 'sudo tailscale serve reset'], { stdio: 'inherit' }).catch(() => { });
|
|
198
|
+
await (0, execa_1.default)('ssh', ['-t', sshHost, 'sudo tailscale serve --bg --set-path /cybermem http://127.0.0.1:8626'], { stdio: 'inherit' });
|
|
199
|
+
await (0, execa_1.default)('ssh', ['-t', sshHost, 'sudo tailscale serve --bg http://127.0.0.1:3000'], { stdio: 'inherit' });
|
|
200
|
+
await (0, execa_1.default)('ssh', ['-t', sshHost, 'sudo tailscale funnel --bg 443'], { stdio: 'inherit' });
|
|
201
|
+
const { stdout } = await (0, execa_1.default)('ssh', [sshHost, "tailscale status --json | jq -r '.Self.DNSName' | sed 's/\\.$//'"]);
|
|
202
|
+
const dnsName = stdout.trim();
|
|
203
|
+
console.log(chalk_1.default.green('\n🌐 Remote Access Active (HTTPS):'));
|
|
204
|
+
console.log(` - Dashboard: ${chalk_1.default.underline(`https://${dnsName}/`)}`);
|
|
205
|
+
console.log(` - MCP API: ${chalk_1.default.underline(`https://${dnsName}/cybermem/mcp`)}`);
|
|
206
|
+
}
|
|
207
|
+
catch (e) {
|
|
208
|
+
console.log(chalk_1.default.red('\n❌ Remote Access setup failed:'));
|
|
209
|
+
console.error(e);
|
|
210
|
+
console.log(chalk_1.default.gray('Manual setup: curl -fsSL https://tailscale.com/install.sh | sh && sudo tailscale up'));
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
else {
|
|
214
|
+
console.log(chalk_1.default.gray('\n💡 For remote access, re-run with: npx @cybermem/mcp --rpi --remote-access'));
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
else if (target === 'vps') {
|
|
218
|
+
console.log(chalk_1.default.yellow('VPS deployment is similar to RPi.'));
|
|
219
|
+
console.log(chalk_1.default.blue('\n📋 VPS Deployment Steps:'));
|
|
220
|
+
console.log('1. Run: npx @cybermem/mcp --rpi --host user@your-vps-ip');
|
|
221
|
+
console.log('2. For HTTPS, choose one of:');
|
|
222
|
+
console.log(chalk_1.default.gray(' a) Tailscale Funnel: --remote-access flag'));
|
|
223
|
+
console.log(chalk_1.default.gray(' b) Caddy (recommended for public VPS):'));
|
|
224
|
+
console.log(chalk_1.default.gray(' - Install Caddy: sudo apt install caddy'));
|
|
225
|
+
console.log(chalk_1.default.gray(' - Configure /etc/caddy/Caddyfile:'));
|
|
226
|
+
console.log(chalk_1.default.cyan(`
|
|
227
|
+
cybermem.yourdomain.com {
|
|
228
|
+
reverse_proxy localhost:3000
|
|
229
|
+
}
|
|
230
|
+
api.cybermem.yourdomain.com {
|
|
231
|
+
reverse_proxy localhost:8080
|
|
232
|
+
}
|
|
233
|
+
`));
|
|
234
|
+
console.log(chalk_1.default.gray(' - Restart: sudo systemctl restart caddy'));
|
|
235
|
+
console.log(chalk_1.default.green('\n📚 Full docs: https://cybermem.dev/docs#https'));
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
catch (error) {
|
|
239
|
+
console.error(chalk_1.default.red('Deployment failed:'), error);
|
|
240
|
+
process.exit(1);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.initCommand = void 0;
|
|
7
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
8
|
+
const commander_1 = require("commander");
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const inquirer_1 = __importDefault(require("inquirer"));
|
|
11
|
+
const path_1 = __importDefault(require("path"));
|
|
12
|
+
exports.initCommand = new commander_1.Command('init')
|
|
13
|
+
.description('Initialize CyberMem configuration')
|
|
14
|
+
.action(async () => {
|
|
15
|
+
console.log(chalk_1.default.bold.blue('🤖 CyberMem Setup Wizard'));
|
|
16
|
+
const { target } = await inquirer_1.default.prompt([
|
|
17
|
+
{
|
|
18
|
+
type: 'list',
|
|
19
|
+
name: 'target',
|
|
20
|
+
message: 'Where do you want to deploy?',
|
|
21
|
+
choices: [
|
|
22
|
+
{ name: 'Local (Docker Compose)', value: 'local' },
|
|
23
|
+
{ name: 'Raspberry Pi (Ansible)', value: 'rpi' },
|
|
24
|
+
{ name: 'VPS (Kubernetes/Helm)', value: 'vps' }
|
|
25
|
+
]
|
|
26
|
+
}
|
|
27
|
+
]);
|
|
28
|
+
console.log(chalk_1.default.green(`Selected target: ${target}`));
|
|
29
|
+
if (target === 'local') {
|
|
30
|
+
const { confirm } = await inquirer_1.default.prompt([
|
|
31
|
+
{
|
|
32
|
+
type: 'confirm',
|
|
33
|
+
name: 'confirm',
|
|
34
|
+
message: 'This will create docker-compose.yml and .env in current directory. Continue?',
|
|
35
|
+
default: true
|
|
36
|
+
}
|
|
37
|
+
]);
|
|
38
|
+
if (confirm) {
|
|
39
|
+
const templateDir = path_1.default.resolve(__dirname, '../../templates');
|
|
40
|
+
const envSrc = path_1.default.join(templateDir, 'envs/local.example');
|
|
41
|
+
if (!fs_1.default.existsSync(envSrc)) {
|
|
42
|
+
console.error(chalk_1.default.red(`Template not found at ${envSrc}.`));
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
// Generate .env
|
|
46
|
+
let envContent = fs_1.default.readFileSync(envSrc, 'utf-8');
|
|
47
|
+
const crypto = require('crypto');
|
|
48
|
+
const newKey = `sk-${crypto.randomBytes(16).toString('hex')}`;
|
|
49
|
+
// Replace placeholder or key
|
|
50
|
+
if (envContent.includes('key-change-me') || envContent.includes('OM_API_KEY=')) {
|
|
51
|
+
envContent = envContent.replace(/OM_API_KEY=.*/, `OM_API_KEY=${newKey}`);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
envContent += `\nOM_API_KEY=${newKey}\n`;
|
|
55
|
+
}
|
|
56
|
+
fs_1.default.writeFileSync('.env', envContent);
|
|
57
|
+
console.log(chalk_1.default.gray('Created .env with generated API Key'));
|
|
58
|
+
console.log(chalk_1.default.gray('(Docker Compose configuration is managed internally by the CLI)'));
|
|
59
|
+
console.log(chalk_1.default.green('\nInitialization complete! Run "cybermem deploy" to start.'));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
console.log(chalk_1.default.yellow('Target init not fully implemented in CLI yet.'));
|
|
64
|
+
}
|
|
65
|
+
});
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.restore = restore;
|
|
7
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
8
|
+
const execa_1 = __importDefault(require("execa"));
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const path_1 = __importDefault(require("path"));
|
|
11
|
+
async function restore(file, options) {
|
|
12
|
+
if (!file) {
|
|
13
|
+
console.error(chalk_1.default.red('Error: Please specify the backup file to restore.'));
|
|
14
|
+
console.log(`Usage: npx @cybermem/mcp restore <file>`);
|
|
15
|
+
process.exit(1);
|
|
16
|
+
}
|
|
17
|
+
const backupPath = path_1.default.resolve(process.cwd(), file);
|
|
18
|
+
if (!fs_1.default.existsSync(backupPath)) {
|
|
19
|
+
console.error(chalk_1.default.red(`Error: File not found at ${backupPath}`));
|
|
20
|
+
process.exit(1);
|
|
21
|
+
}
|
|
22
|
+
console.log(chalk_1.default.blue(`♻️ Restoring from: ${path_1.default.basename(backupPath)}...`));
|
|
23
|
+
console.log(chalk_1.default.yellow('⚠️ This will overwrite current data!'));
|
|
24
|
+
try {
|
|
25
|
+
// 1. Stop the OpenMemory service to safely write to DB
|
|
26
|
+
console.log(chalk_1.default.blue('Stopping OpenMemory service...'));
|
|
27
|
+
try {
|
|
28
|
+
await (0, execa_1.default)('docker', ['stop', 'cybermem-openmemory']);
|
|
29
|
+
}
|
|
30
|
+
catch (e) {
|
|
31
|
+
console.log(chalk_1.default.gray('Container not running (or not found), proceeding...'));
|
|
32
|
+
}
|
|
33
|
+
// 2. Restore data using transient alpine container
|
|
34
|
+
console.log(chalk_1.default.blue('Extracting data to volume...'));
|
|
35
|
+
// We handle both absolute paths (by mounting dir) or relative
|
|
36
|
+
const dir = path_1.default.dirname(backupPath);
|
|
37
|
+
const filename = path_1.default.basename(backupPath);
|
|
38
|
+
const cmd = [
|
|
39
|
+
'run', '--rm',
|
|
40
|
+
'--volumes-from', 'cybermem-openmemory', // Access the volume even if container is stopped
|
|
41
|
+
'-v', `${dir}:/backup`,
|
|
42
|
+
'alpine',
|
|
43
|
+
'sh', '-c',
|
|
44
|
+
// Extract to root / (since backup was relative to /data we need to be careful how it was packed)
|
|
45
|
+
// In backup we did: tar czf ... -C / data
|
|
46
|
+
// So it contains "data/..."
|
|
47
|
+
// Extracting to / will put it in /data
|
|
48
|
+
`tar xzf /backup/${filename} -C / && chown -R 1001:1001 /data`
|
|
49
|
+
];
|
|
50
|
+
await (0, execa_1.default)('docker', cmd, { stdio: 'inherit' });
|
|
51
|
+
// 3. Restart the service
|
|
52
|
+
console.log(chalk_1.default.blue('Restarting OpenMemory service...'));
|
|
53
|
+
await (0, execa_1.default)('docker', ['start', 'cybermem-openmemory']);
|
|
54
|
+
console.log(chalk_1.default.green(`\n✅ Restore completed successfully!`));
|
|
55
|
+
console.log('Your memory has been recovered.');
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
console.error(chalk_1.default.red('Restore failed:'), error);
|
|
59
|
+
console.log(chalk_1.default.yellow('Suggestion: Check if Docker is running and "cybermem-openmemory" container exists.'));
|
|
60
|
+
process.exit(1);
|
|
61
|
+
}
|
|
62
|
+
}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const commander_1 = require("commander");
|
|
4
|
+
const backup_1 = require("./commands/backup");
|
|
5
|
+
const deploy_1 = require("./commands/deploy");
|
|
6
|
+
const restore_1 = require("./commands/restore");
|
|
7
|
+
const program = new commander_1.Command();
|
|
8
|
+
program
|
|
9
|
+
.name('mcp')
|
|
10
|
+
.description('CyberMem - Deploy your AI memory server in one command')
|
|
11
|
+
.version('1.0.0');
|
|
12
|
+
// Default Command: Deploy
|
|
13
|
+
program
|
|
14
|
+
.command('deploy', { isDefault: true })
|
|
15
|
+
.description('Deploy CyberMem (Default)')
|
|
16
|
+
.option('--rpi', 'Deploy to Raspberry Pi (default: local)')
|
|
17
|
+
.option('--vps', 'Deploy to VPS/Cloud server')
|
|
18
|
+
.option('-h, --host <host>', 'SSH Host (user@ip) for remote deployment')
|
|
19
|
+
.option('--remote-access', 'Enable Tailscale Funnel for HTTPS remote access')
|
|
20
|
+
.action(deploy_1.deploy);
|
|
21
|
+
program
|
|
22
|
+
.command('backup')
|
|
23
|
+
.description('Backup CyberMem data to a tarball')
|
|
24
|
+
.action(backup_1.backup);
|
|
25
|
+
program
|
|
26
|
+
.command('restore')
|
|
27
|
+
.description('Restore CyberMem data from a backup file')
|
|
28
|
+
.argument('<file>', 'Backup file to restore')
|
|
29
|
+
.action(restore_1.restore);
|
|
30
|
+
program.parse(process.argv);
|