@aifabrix/server-setup 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +68 -0
  2. package/assets/builder/builder-server/nginx-builder-server.conf.template +26 -0
  3. package/assets/cron-backup.sh +25 -0
  4. package/assets/setup-dev-server-no-node.sh +227 -0
  5. package/dist/backup-db.d.ts +13 -0
  6. package/dist/backup-db.js +125 -0
  7. package/dist/backup-db.spec.d.ts +5 -0
  8. package/dist/backup-db.spec.js +260 -0
  9. package/dist/backup-schedule.d.ts +17 -0
  10. package/dist/backup-schedule.js +60 -0
  11. package/dist/backup.d.ts +15 -0
  12. package/dist/backup.js +184 -0
  13. package/dist/backup.spec.d.ts +4 -0
  14. package/dist/backup.spec.js +199 -0
  15. package/dist/cli.d.ts +6 -0
  16. package/dist/cli.js +170 -0
  17. package/dist/config.d.ts +17 -0
  18. package/dist/config.js +9 -0
  19. package/dist/config.spec.d.ts +4 -0
  20. package/dist/config.spec.js +41 -0
  21. package/dist/install.d.ts +19 -0
  22. package/dist/install.js +74 -0
  23. package/dist/local-pubkey.d.ts +13 -0
  24. package/dist/local-pubkey.js +35 -0
  25. package/dist/local-pubkey.spec.d.ts +4 -0
  26. package/dist/local-pubkey.spec.js +64 -0
  27. package/dist/restore.d.ts +17 -0
  28. package/dist/restore.js +101 -0
  29. package/dist/restore.spec.d.ts +4 -0
  30. package/dist/restore.spec.js +215 -0
  31. package/dist/ssh-cert.d.ts +18 -0
  32. package/dist/ssh-cert.js +92 -0
  33. package/dist/ssh-cert.spec.d.ts +4 -0
  34. package/dist/ssh-cert.spec.js +101 -0
  35. package/dist/ssh.d.ts +27 -0
  36. package/dist/ssh.js +122 -0
  37. package/dist/ssh.spec.d.ts +4 -0
  38. package/dist/ssh.spec.js +31 -0
  39. package/dist/ubuntu.d.ts +7 -0
  40. package/dist/ubuntu.js +33 -0
  41. package/dist/ubuntu.spec.d.ts +4 -0
  42. package/dist/ubuntu.spec.js +56 -0
  43. package/package.json +48 -0
@@ -0,0 +1,260 @@
1
+ /**
2
+ * Unit tests for backup-db: createBackupDbFromJson and copyBuilderDbAsBackup.
3
+ * Uses temp dirs and in-memory SQLite where possible; no real secrets.
4
+ */
5
+ import * as fs from 'fs';
6
+ import * as path from 'path';
7
+ import Database from 'better-sqlite3';
8
+ import { createBackupDbFromJson, copyBuilderDbAsBackup, } from './backup-db.js';
9
+ const TEST_TMP_BASE = path.join(process.cwd(), 'tmp');
10
+ const config = {
11
+ dataDir: path.join(TEST_TMP_BASE, 'test-data'),
12
+ createdAt: new Date().toISOString(),
13
+ source: 'json',
14
+ };
15
+ describe('backup-db', () => {
16
+ let tmpDir;
17
+ beforeEach(() => {
18
+ fs.mkdirSync(TEST_TMP_BASE, { recursive: true });
19
+ tmpDir = fs.mkdtempSync(path.join(TEST_TMP_BASE, 'af-backup-db-'));
20
+ });
21
+ afterEach(() => {
22
+ fs.rmSync(tmpDir, { recursive: true, force: true });
23
+ });
24
+ describe('createBackupDbFromJson', () => {
25
+ it('creates empty DB when no json files provided', () => {
26
+ const outPath = path.join(tmpDir, 'out.db');
27
+ createBackupDbFromJson(outPath, {}, config);
28
+ expect(fs.existsSync(outPath)).toBe(true);
29
+ const db = new Database(outPath, { readonly: true });
30
+ const users = db.prepare('SELECT COUNT(*) as n FROM users').get();
31
+ expect(users.n).toBe(0);
32
+ db.close();
33
+ });
34
+ it('imports users from users.json shape', () => {
35
+ const outPath = path.join(tmpDir, 'out.db');
36
+ const usersJson = JSON.stringify({
37
+ users: [
38
+ { id: '01', name: 'Alice', email: 'a@b.com', createdAt: '2025-01-01T00:00:00Z' },
39
+ ],
40
+ });
41
+ createBackupDbFromJson(outPath, { users: usersJson }, config);
42
+ const db = new Database(outPath, { readonly: true });
43
+ const row = db.prepare('SELECT id, name, email FROM users').get();
44
+ expect(row).toEqual({ id: '01', name: 'Alice', email: 'a@b.com' });
45
+ db.close();
46
+ });
47
+ it('imports users with cert_valid_not_after and groups (snake_case created_at)', () => {
48
+ const outPath = path.join(tmpDir, 'out.db');
49
+ const usersJson = JSON.stringify({
50
+ users: [
51
+ {
52
+ id: '02',
53
+ name: 'Bob',
54
+ email: 'b@b.com',
55
+ created_at: '2024-06-15T00:00:00Z',
56
+ certValidNotAfter: '2026-01-01',
57
+ groups: ['admin'],
58
+ },
59
+ ],
60
+ });
61
+ createBackupDbFromJson(outPath, { users: usersJson }, config);
62
+ const db = new Database(outPath, { readonly: true });
63
+ const row = db.prepare('SELECT id, cert_valid_not_after, groups FROM users').get();
64
+ expect(row.id).toBe('02');
65
+ expect(row.cert_valid_not_after).toBe('2026-01-01');
66
+ expect(JSON.parse(row.groups)).toEqual(['admin']);
67
+ db.close();
68
+ });
69
+ it('imports users when root is array (no .users key)', () => {
70
+ const outPath = path.join(tmpDir, 'out.db');
71
+ const usersJson = JSON.stringify([
72
+ { id: '03', name: 'C', email: 'c@b.com', created_at: '2025-01-01' },
73
+ ]);
74
+ createBackupDbFromJson(outPath, { users: usersJson }, config);
75
+ const db = new Database(outPath, { readonly: true });
76
+ const row = db.prepare('SELECT id, name FROM users').get();
77
+ expect(row).toEqual({ id: '03', name: 'C' });
78
+ db.close();
79
+ });
80
+ it('imports pins from tokens.json shape', () => {
81
+ const outPath = path.join(tmpDir, 'out.db');
82
+ const tokensJson = JSON.stringify({
83
+ pins: [
84
+ {
85
+ userId: '01',
86
+ pinHash: 'abc123',
87
+ expiresAt: '2025-12-31T23:59:59Z',
88
+ consumed: false,
89
+ createdAt: '2025-01-01T00:00:00Z',
90
+ },
91
+ ],
92
+ });
93
+ createBackupDbFromJson(outPath, { tokens: tokensJson }, config);
94
+ const db = new Database(outPath, { readonly: true });
95
+ const row = db.prepare('SELECT user_id, pin_hash, consumed FROM pin_tokens').get();
96
+ expect(row.user_id).toBe('01');
97
+ expect(row.pin_hash).toBe('abc123');
98
+ expect(row.consumed).toBe(0);
99
+ db.close();
100
+ });
101
+ it('imports pins with snake_case and consumed true', () => {
102
+ const outPath = path.join(tmpDir, 'out.db');
103
+ const tokensJson = JSON.stringify({
104
+ pins: [
105
+ {
106
+ user_id: '02',
107
+ pin_hash: 'def',
108
+ expires_at: '2025-12-31T00:00:00Z',
109
+ consumed: true,
110
+ created_at: '2025-01-01T00:00:00Z',
111
+ },
112
+ ],
113
+ });
114
+ createBackupDbFromJson(outPath, { tokens: tokensJson }, config);
115
+ const db = new Database(outPath, { readonly: true });
116
+ const row = db.prepare('SELECT user_id, consumed FROM pin_tokens').get();
117
+ expect(row.user_id).toBe('02');
118
+ expect(row.consumed).toBe(1);
119
+ db.close();
120
+ });
121
+ it('imports pins when root is array (no .pins key)', () => {
122
+ const outPath = path.join(tmpDir, 'pins-array.db');
123
+ const tokensJson = JSON.stringify([
124
+ { userId: '03', pinHash: 'x', expiresAt: '2025-12-31', consumed: false, createdAt: '2025-01-01' },
125
+ ]);
126
+ createBackupDbFromJson(outPath, { tokens: tokensJson }, config);
127
+ const db = new Database(outPath, { readonly: true });
128
+ const row = db.prepare('SELECT user_id FROM pin_tokens').get();
129
+ expect(row.user_id).toBe('03');
130
+ db.close();
131
+ });
132
+ it('imports secrets from secrets.json shape (camelCase)', () => {
133
+ const outPath = path.join(tmpDir, 'out.db');
134
+ const secretsJson = JSON.stringify({
135
+ secrets: {
136
+ mykey: { iv: 'iv1', authTag: 'tag1', cipher: 'cipher1' },
137
+ },
138
+ });
139
+ createBackupDbFromJson(outPath, { secrets: secretsJson }, config);
140
+ const db = new Database(outPath, { readonly: true });
141
+ const row = db.prepare('SELECT key, iv, auth_tag, cipher FROM secrets').get();
142
+ expect(row.key).toBe('mykey');
143
+ expect(row.iv).toBe('iv1');
144
+ expect(row.auth_tag).toBe('tag1');
145
+ expect(row.cipher).toBe('cipher1');
146
+ db.close();
147
+ });
148
+ it('imports secrets from secrets.json shape (snake_case auth_tag)', () => {
149
+ const outPath = path.join(tmpDir, 'out2.db');
150
+ const secretsJson = JSON.stringify({
151
+ secrets: {
152
+ key2: { iv: 'iv2', auth_tag: 'tag2', cipher: 'cipher2' },
153
+ },
154
+ });
155
+ createBackupDbFromJson(outPath, { secrets: secretsJson }, config);
156
+ const db = new Database(outPath, { readonly: true });
157
+ const row = db.prepare('SELECT key, iv, auth_tag, cipher FROM secrets').get();
158
+ expect(row.key).toBe('key2');
159
+ expect(row.auth_tag).toBe('tag2');
160
+ db.close();
161
+ });
162
+ it('imports ssh_keys from byUser shape', () => {
163
+ const outPath = path.join(tmpDir, 'out.db');
164
+ const sshKeysJson = JSON.stringify({
165
+ byUser: {
166
+ '01': [
167
+ { publicKey: 'ssh-rsa AAAA...', label: 'laptop', fingerprint: 'fp1' },
168
+ ],
169
+ },
170
+ });
171
+ createBackupDbFromJson(outPath, { sshKeys: sshKeysJson }, config);
172
+ const db = new Database(outPath, { readonly: true });
173
+ const row = db.prepare('SELECT user_id, public_key, label, fingerprint FROM ssh_keys').get();
174
+ expect(row.user_id).toBe('01');
175
+ expect(row.public_key).toBe('ssh-rsa AAAA...');
176
+ expect(row.label).toBe('laptop');
177
+ expect(row.fingerprint).toBe('fp1');
178
+ db.close();
179
+ });
180
+ it('imports ssh_keys with snake_case and optional fields', () => {
181
+ const outPath = path.join(tmpDir, 'ssh2.db');
182
+ const sshKeysJson = JSON.stringify({
183
+ byUser: {
184
+ '02': [
185
+ { public_key: 'ssh-ed25519 BBBB...', created_at: '2025-01-01T00:00:00Z' },
186
+ ],
187
+ },
188
+ });
189
+ createBackupDbFromJson(outPath, { sshKeys: sshKeysJson }, config);
190
+ const db = new Database(outPath, { readonly: true });
191
+ const row = db.prepare('SELECT user_id, public_key, label, fingerprint, created_at FROM ssh_keys').get();
192
+ expect(row.user_id).toBe('02');
193
+ expect(row.public_key).toBe('ssh-ed25519 BBBB...');
194
+ expect(row.label).toBeNull();
195
+ expect(row.fingerprint).toBeNull();
196
+ expect(row.created_at).toBe('2025-01-01T00:00:00Z');
197
+ db.close();
198
+ });
199
+ it('imports ssh_keys when byUser missing uses empty object', () => {
200
+ const outPath = path.join(tmpDir, 'ssh-empty.db');
201
+ createBackupDbFromJson(outPath, { sshKeys: '{}' }, config);
202
+ const db = new Database(outPath, { readonly: true });
203
+ const count = db.prepare('SELECT COUNT(*) as n FROM ssh_keys').get();
204
+ expect(count.n).toBe(0);
205
+ db.close();
206
+ });
207
+ it('imports ssh_keys when entries not array treats as empty list', () => {
208
+ const outPath = path.join(tmpDir, 'ssh-not-array.db');
209
+ const sshKeysJson = JSON.stringify({ byUser: { '01': 'not-array' } });
210
+ createBackupDbFromJson(outPath, { sshKeys: sshKeysJson }, config);
211
+ const db = new Database(outPath, { readonly: true });
212
+ const count = db.prepare('SELECT COUNT(*) as n FROM ssh_keys').get();
213
+ expect(count.n).toBe(0);
214
+ db.close();
215
+ });
216
+ });
217
+ describe('copyBuilderDbAsBackup', () => {
218
+ it('copies users and pin_tokens from source db to backup', () => {
219
+ const sourcePath = path.join(tmpDir, 'source.db');
220
+ const outPath = path.join(tmpDir, 'backup.db');
221
+ const schema = `
222
+ CREATE TABLE users (id TEXT PRIMARY KEY, name TEXT NOT NULL, email TEXT NOT NULL, created_at TEXT NOT NULL, cert_valid_not_after TEXT, groups TEXT);
223
+ CREATE TABLE pin_tokens (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id TEXT NOT NULL, pin_hash TEXT NOT NULL, expires_at TEXT NOT NULL, consumed INTEGER NOT NULL DEFAULT 0, created_at TEXT NOT NULL);
224
+ CREATE TABLE secrets (key TEXT PRIMARY KEY, iv TEXT NOT NULL, auth_tag TEXT NOT NULL, cipher TEXT NOT NULL);
225
+ CREATE TABLE ssh_keys (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id TEXT NOT NULL, public_key TEXT NOT NULL, label TEXT, fingerprint TEXT, created_at TEXT, UNIQUE(user_id, fingerprint));
226
+ `;
227
+ const src = new Database(sourcePath);
228
+ src.exec(schema);
229
+ src.prepare("INSERT INTO users (id, name, email, created_at) VALUES ('u1', 'U1', 'u1@x.com', '2025-01-01')").run();
230
+ src.prepare("INSERT INTO pin_tokens (user_id, pin_hash, expires_at, consumed, created_at) VALUES ('u1', 'h', '2025-12-31', 0, '2025-01-01')").run();
231
+ src.close();
232
+ copyBuilderDbAsBackup(sourcePath, outPath);
233
+ const backup = new Database(outPath, { readonly: true });
234
+ const user = backup.prepare('SELECT id, name FROM users').get();
235
+ expect(user).toEqual({ id: 'u1', name: 'U1' });
236
+ const pin = backup.prepare('SELECT user_id FROM pin_tokens').get();
237
+ expect(pin.user_id).toBe('u1');
238
+ backup.close();
239
+ });
240
+ it('skips empty tables and tolerates missing tables', () => {
241
+ const sourcePath = path.join(tmpDir, 'partial.db');
242
+ const outPath = path.join(tmpDir, 'backup-partial.db');
243
+ const schema = `
244
+ CREATE TABLE users (id TEXT PRIMARY KEY, name TEXT NOT NULL, email TEXT NOT NULL, created_at TEXT NOT NULL, cert_valid_not_after TEXT, groups TEXT);
245
+ CREATE TABLE pin_tokens (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id TEXT NOT NULL, pin_hash TEXT NOT NULL, expires_at TEXT NOT NULL, consumed INTEGER NOT NULL DEFAULT 0, created_at TEXT NOT NULL);
246
+ `;
247
+ const src = new Database(sourcePath);
248
+ src.exec(schema);
249
+ src.prepare("INSERT INTO users (id, name, email, created_at) VALUES ('u1', 'U1', 'u1@x.com', '2025-01-01')").run();
250
+ src.close();
251
+ copyBuilderDbAsBackup(sourcePath, outPath);
252
+ const backup = new Database(outPath, { readonly: true });
253
+ const user = backup.prepare('SELECT id FROM users').get();
254
+ expect(user.id).toBe('u1');
255
+ const pinCount = backup.prepare('SELECT COUNT(*) as n FROM pin_tokens').get();
256
+ expect(pinCount.n).toBe(0);
257
+ backup.close();
258
+ });
259
+ });
260
+ });
@@ -0,0 +1,17 @@
1
+ /**
2
+ * Install cron backup on server (or locally): upload shell script and add cron entry.
3
+ */
4
+ import { type SSHConnectionOptions } from './ssh.js';
5
+ export interface BackupScheduleOptions extends SSHConnectionOptions {
6
+ dataDir?: string;
7
+ backupDir?: string;
8
+ keepDays?: number;
9
+ }
10
+ export interface BackupScheduleLocalOptions {
11
+ dataDir?: string;
12
+ backupDir?: string;
13
+ keepDays?: number;
14
+ }
15
+ export declare function runBackupScheduleInstall(options: BackupScheduleOptions): Promise<void>;
16
+ /** Install cron backup on this machine (no SSH). Call requireUbuntu() before this. */
17
+ export declare function runBackupScheduleInstallLocal(options?: BackupScheduleLocalOptions): void;
@@ -0,0 +1,60 @@
1
+ /**
2
+ * Install cron backup on server (or locally): upload shell script and add cron entry.
3
+ */
4
+ import * as path from 'path';
5
+ import * as fs from 'fs';
6
+ import { execSync } from 'child_process';
7
+ import { fileURLToPath } from 'url';
8
+ import { createSSHClient, writeFile, exec, close } from './ssh.js';
9
+ const scriptDir = path.dirname(fileURLToPath(import.meta.url));
10
+ const ASSETS_DIR = path.resolve(scriptDir, '..', 'assets');
11
+ const DEFAULT_DATA_DIR = '/opt/aifabrix/builder-server/data';
12
+ const DEFAULT_BACKUP_DIR = '/opt/aifabrix/backups';
13
+ export async function runBackupScheduleInstall(options) {
14
+ const dataDir = options.dataDir ?? DEFAULT_DATA_DIR;
15
+ const backupDir = options.backupDir ?? DEFAULT_BACKUP_DIR;
16
+ const keepDays = options.keepDays ?? 7;
17
+ const scriptPath = path.join(ASSETS_DIR, 'cron-backup.sh');
18
+ const scriptBody = fs.readFileSync(scriptPath, 'utf8');
19
+ const conn = await createSSHClient(options);
20
+ try {
21
+ const remoteScript = '/usr/local/bin/aifabrix-cron-backup.sh';
22
+ await writeFile(conn, remoteScript, scriptBody);
23
+ await exec(conn, `sudo chmod 755 ${remoteScript}`);
24
+ await exec(conn, `sudo mkdir -p ${backupDir}`);
25
+ const cronLine = `0 2 * * * root DATA_DIR=${dataDir} BACKUP_DIR=${backupDir} KEEP_DAYS=${keepDays} ${remoteScript}\n`;
26
+ const tmpCron = '/tmp/aifabrix-backup-cron';
27
+ await writeFile(conn, tmpCron, cronLine);
28
+ await exec(conn, `sudo mv ${tmpCron} /etc/cron.d/aifabrix-backup && sudo chmod 644 /etc/cron.d/aifabrix-backup`);
29
+ }
30
+ finally {
31
+ close(conn);
32
+ }
33
+ }
34
+ /** Install cron backup on this machine (no SSH). Call requireUbuntu() before this. */
35
+ export function runBackupScheduleInstallLocal(options = {}) {
36
+ const dataDir = options.dataDir ?? DEFAULT_DATA_DIR;
37
+ const backupDir = options.backupDir ?? DEFAULT_BACKUP_DIR;
38
+ const keepDays = options.keepDays ?? 7;
39
+ const scriptPath = path.join(ASSETS_DIR, 'cron-backup.sh');
40
+ const scriptBody = fs.readFileSync(scriptPath, 'utf8');
41
+ const remoteScript = '/usr/local/bin/aifabrix-cron-backup.sh';
42
+ const tmpScript = `/tmp/aifabrix-cron-backup-${Date.now()}.sh`;
43
+ fs.writeFileSync(tmpScript, scriptBody);
44
+ try {
45
+ execSync(`sudo cp ${tmpScript} ${remoteScript} && sudo chmod 755 ${remoteScript}`);
46
+ execSync(`sudo mkdir -p ${backupDir}`);
47
+ const cronLine = `0 2 * * * root DATA_DIR=${dataDir} BACKUP_DIR=${backupDir} KEEP_DAYS=${keepDays} ${remoteScript}\n`;
48
+ const tmpCron = '/tmp/aifabrix-backup-cron';
49
+ fs.writeFileSync(tmpCron, cronLine);
50
+ execSync(`sudo mv ${tmpCron} /etc/cron.d/aifabrix-backup && sudo chmod 644 /etc/cron.d/aifabrix-backup`);
51
+ }
52
+ finally {
53
+ try {
54
+ fs.unlinkSync(tmpScript);
55
+ }
56
+ catch {
57
+ // ignore
58
+ }
59
+ }
60
+ }
@@ -0,0 +1,15 @@
1
+ /**
2
+ * On-demand backup: SSH to server, fetch config + builder.db or JSON + keys, build zip locally.
3
+ */
4
+ import { type SSHConnectionOptions } from './ssh.js';
5
+ export interface BackupOptions extends SSHConnectionOptions {
6
+ dataDir?: string;
7
+ outputPath?: string;
8
+ }
9
+ export interface BackupLocalOptions {
10
+ dataDir?: string;
11
+ outputPath?: string;
12
+ }
13
+ export declare function runBackup(options: BackupOptions): Promise<string>;
14
+ /** Run backup from local DATA_DIR (no SSH). Call requireUbuntu() before this. */
15
+ export declare function runBackupLocal(options?: BackupLocalOptions): Promise<string>;
package/dist/backup.js ADDED
@@ -0,0 +1,184 @@
1
+ /**
2
+ * On-demand backup: SSH to server, fetch config + builder.db or JSON + keys, build zip locally.
3
+ */
4
+ import * as path from 'path';
5
+ import * as fs from 'fs';
6
+ import archiver from 'archiver';
7
+ import { createSSHClient, readFile, exec, close } from './ssh.js';
8
+ import { DATA_DIR_DEFAULT, BUILDER_DB, BACKUP_DB, CONFIG_JSON, KEY_FILES, JSON_FILES, } from './config.js';
9
+ import { createBackupDbFromJson } from './backup-db.js';
10
+ function defaultOutputPath() {
11
+ const now = new Date();
12
+ const y = now.getFullYear();
13
+ const m = String(now.getMonth() + 1).padStart(2, '0');
14
+ const d = String(now.getDate()).padStart(2, '0');
15
+ const h = String(now.getHours()).padStart(2, '0');
16
+ const min = String(now.getMinutes()).padStart(2, '0');
17
+ return `./aifabrix-backup-${y}${m}${d}-${h}${min}.zip`;
18
+ }
19
+ export async function runBackup(options) {
20
+ const dataDir = options.dataDir ?? DATA_DIR_DEFAULT;
21
+ const outputPath = options.outputPath ?? defaultOutputPath();
22
+ const resolvedOut = path.resolve(process.cwd(), outputPath);
23
+ const conn = await createSSHClient(options);
24
+ try {
25
+ const config = {
26
+ dataDir,
27
+ createdAt: new Date().toISOString(),
28
+ source: 'builder.db',
29
+ };
30
+ const tmpDir = path.join(process.cwd(), `.af-backup-${Date.now()}`);
31
+ fs.mkdirSync(tmpDir, { recursive: true });
32
+ const dbPath = path.join(tmpDir, BACKUP_DB);
33
+ try {
34
+ const hasBuilderDb = await exec(conn, `test -f ${dataDir}/${BUILDER_DB} && echo 1 || echo 0`);
35
+ const useSqlite = hasBuilderDb.stdout.trim() === '1';
36
+ if (useSqlite) {
37
+ const buf = await readFile(conn, `${dataDir}/${BUILDER_DB}`);
38
+ fs.writeFileSync(dbPath, buf);
39
+ config.source = 'builder.db';
40
+ }
41
+ else {
42
+ const jsonFiles = {};
43
+ for (const f of JSON_FILES) {
44
+ try {
45
+ const buf = await readFile(conn, `${dataDir}/${f}`);
46
+ const str = buf.toString('utf8');
47
+ if (f === 'users.json')
48
+ jsonFiles.users = str;
49
+ else if (f === 'tokens.json')
50
+ jsonFiles.tokens = str;
51
+ else if (f === 'secrets.json')
52
+ jsonFiles.secrets = str;
53
+ else if (f === 'ssh-public-keys.json')
54
+ jsonFiles.sshKeys = str;
55
+ }
56
+ catch {
57
+ if (f === 'users.json')
58
+ jsonFiles.users = '{"users":[]}';
59
+ else if (f === 'tokens.json')
60
+ jsonFiles.tokens = '{"pins":[]}';
61
+ else if (f === 'secrets.json')
62
+ jsonFiles.secrets = '{"secrets":{}}';
63
+ else if (f === 'ssh-public-keys.json')
64
+ jsonFiles.sshKeys = '{"byUser":{}}';
65
+ }
66
+ }
67
+ createBackupDbFromJson(dbPath, jsonFiles, config);
68
+ config.source = 'json';
69
+ }
70
+ fs.writeFileSync(path.join(tmpDir, CONFIG_JSON), JSON.stringify(config, null, 2));
71
+ for (const k of KEY_FILES) {
72
+ try {
73
+ const buf = await readFile(conn, `${dataDir}/${k}`);
74
+ fs.writeFileSync(path.join(tmpDir, k), buf, { mode: 0o600 });
75
+ }
76
+ catch {
77
+ // key file may not exist
78
+ }
79
+ }
80
+ const archive = archiver('zip', { zlib: { level: 6 } });
81
+ const out = fs.createWriteStream(resolvedOut);
82
+ const pipe = new Promise((resolve, reject) => {
83
+ out.on('close', () => resolve());
84
+ archive.on('error', reject);
85
+ });
86
+ archive.pipe(out);
87
+ archive.file(path.join(tmpDir, BACKUP_DB), { name: BACKUP_DB });
88
+ archive.file(path.join(tmpDir, CONFIG_JSON), { name: CONFIG_JSON });
89
+ for (const k of KEY_FILES) {
90
+ const fp = path.join(tmpDir, k);
91
+ if (fs.existsSync(fp))
92
+ archive.file(fp, { name: k });
93
+ }
94
+ await archive.finalize();
95
+ await pipe;
96
+ }
97
+ finally {
98
+ fs.rmSync(tmpDir, { recursive: true, force: true });
99
+ }
100
+ return resolvedOut;
101
+ }
102
+ finally {
103
+ close(conn);
104
+ }
105
+ }
106
+ /** Run backup from local DATA_DIR (no SSH). Call requireUbuntu() before this. */
107
+ export async function runBackupLocal(options = {}) {
108
+ const dataDir = options.dataDir ?? DATA_DIR_DEFAULT;
109
+ const outputPath = options.outputPath ?? defaultOutputPath();
110
+ const resolvedOut = path.resolve(process.cwd(), outputPath);
111
+ const config = {
112
+ dataDir,
113
+ createdAt: new Date().toISOString(),
114
+ source: 'builder.db',
115
+ };
116
+ const tmpDir = path.join(process.cwd(), `.af-backup-${Date.now()}`);
117
+ fs.mkdirSync(tmpDir, { recursive: true });
118
+ const dbPath = path.join(tmpDir, BACKUP_DB);
119
+ try {
120
+ const builderDbPath = path.join(dataDir, BUILDER_DB);
121
+ const useSqlite = fs.existsSync(builderDbPath);
122
+ if (useSqlite) {
123
+ fs.copyFileSync(builderDbPath, dbPath);
124
+ config.source = 'builder.db';
125
+ }
126
+ else {
127
+ const jsonFiles = {};
128
+ for (const f of JSON_FILES) {
129
+ const fp = path.join(dataDir, f);
130
+ try {
131
+ const str = fs.readFileSync(fp, 'utf8');
132
+ if (f === 'users.json')
133
+ jsonFiles.users = str;
134
+ else if (f === 'tokens.json')
135
+ jsonFiles.tokens = str;
136
+ else if (f === 'secrets.json')
137
+ jsonFiles.secrets = str;
138
+ else if (f === 'ssh-public-keys.json')
139
+ jsonFiles.sshKeys = str;
140
+ }
141
+ catch {
142
+ if (f === 'users.json')
143
+ jsonFiles.users = '{"users":[]}';
144
+ else if (f === 'tokens.json')
145
+ jsonFiles.tokens = '{"pins":[]}';
146
+ else if (f === 'secrets.json')
147
+ jsonFiles.secrets = '{"secrets":{}}';
148
+ else if (f === 'ssh-public-keys.json')
149
+ jsonFiles.sshKeys = '{"byUser":{}}';
150
+ }
151
+ }
152
+ createBackupDbFromJson(dbPath, jsonFiles, config);
153
+ config.source = 'json';
154
+ }
155
+ fs.writeFileSync(path.join(tmpDir, CONFIG_JSON), JSON.stringify(config, null, 2));
156
+ for (const k of KEY_FILES) {
157
+ const fp = path.join(dataDir, k);
158
+ if (fs.existsSync(fp)) {
159
+ fs.copyFileSync(fp, path.join(tmpDir, k));
160
+ fs.chmodSync(path.join(tmpDir, k), 0o600);
161
+ }
162
+ }
163
+ const archive = archiver('zip', { zlib: { level: 6 } });
164
+ const out = fs.createWriteStream(resolvedOut);
165
+ const pipe = new Promise((resolve, reject) => {
166
+ out.on('close', () => resolve());
167
+ archive.on('error', reject);
168
+ });
169
+ archive.pipe(out);
170
+ archive.file(path.join(tmpDir, BACKUP_DB), { name: BACKUP_DB });
171
+ archive.file(path.join(tmpDir, CONFIG_JSON), { name: CONFIG_JSON });
172
+ for (const k of KEY_FILES) {
173
+ const fp = path.join(tmpDir, k);
174
+ if (fs.existsSync(fp))
175
+ archive.file(fp, { name: k });
176
+ }
177
+ await archive.finalize();
178
+ await pipe;
179
+ return resolvedOut;
180
+ }
181
+ finally {
182
+ fs.rmSync(tmpDir, { recursive: true, force: true });
183
+ }
184
+ }
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Tests for runBackup with mocked SSH (no real connections).
3
+ */
4
+ export {};