@aifabrix/server-setup 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +68 -0
- package/assets/builder/builder-server/nginx-builder-server.conf.template +26 -0
- package/assets/cron-backup.sh +25 -0
- package/assets/setup-dev-server-no-node.sh +227 -0
- package/dist/backup-db.d.ts +13 -0
- package/dist/backup-db.js +125 -0
- package/dist/backup-db.spec.d.ts +5 -0
- package/dist/backup-db.spec.js +260 -0
- package/dist/backup-schedule.d.ts +17 -0
- package/dist/backup-schedule.js +60 -0
- package/dist/backup.d.ts +15 -0
- package/dist/backup.js +184 -0
- package/dist/backup.spec.d.ts +4 -0
- package/dist/backup.spec.js +199 -0
- package/dist/cli.d.ts +6 -0
- package/dist/cli.js +170 -0
- package/dist/config.d.ts +17 -0
- package/dist/config.js +9 -0
- package/dist/config.spec.d.ts +4 -0
- package/dist/config.spec.js +41 -0
- package/dist/install.d.ts +19 -0
- package/dist/install.js +74 -0
- package/dist/local-pubkey.d.ts +13 -0
- package/dist/local-pubkey.js +35 -0
- package/dist/local-pubkey.spec.d.ts +4 -0
- package/dist/local-pubkey.spec.js +64 -0
- package/dist/restore.d.ts +17 -0
- package/dist/restore.js +101 -0
- package/dist/restore.spec.d.ts +4 -0
- package/dist/restore.spec.js +215 -0
- package/dist/ssh-cert.d.ts +18 -0
- package/dist/ssh-cert.js +92 -0
- package/dist/ssh-cert.spec.d.ts +4 -0
- package/dist/ssh-cert.spec.js +101 -0
- package/dist/ssh.d.ts +27 -0
- package/dist/ssh.js +122 -0
- package/dist/ssh.spec.d.ts +4 -0
- package/dist/ssh.spec.js +31 -0
- package/dist/ubuntu.d.ts +7 -0
- package/dist/ubuntu.js +33 -0
- package/dist/ubuntu.spec.d.ts +4 -0
- package/dist/ubuntu.spec.js +56 -0
- package/package.json +48 -0
package/README.md
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
# @aifabrix/server-setup (af-server)
|
|
2
|
+
|
|
3
|
+
CLI to install, backup, and restore AI Fabrix builder-server over SSH. Runs on your PC or CI; the server does **not** need Node.js or the aifabrix Builder.
|
|
4
|
+
|
|
5
|
+
- **Backup** = configuration + database only (no workspace or developer code).
|
|
6
|
+
- **Restore** = push backup zip (builder.db + keys) back to server DATA_DIR.
|
|
7
|
+
|
|
8
|
+
## Commands
|
|
9
|
+
|
|
10
|
+
Omit `user@host` to run **locally** on the current machine (Ubuntu only). With `user@host`, commands run over SSH.
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
# Install server (Docker, nginx, SSL proxy, sync user, cron).
|
|
14
|
+
af-server install [ user@host ] [ -d DATA_DIR ] [ --dev-domain DOMAIN ] [ --ssl-dir PATH ] [ -i SSH_KEY ]
|
|
15
|
+
|
|
16
|
+
# On-demand backup: fetch config + DB + keys from server (or local DATA_DIR), save zip.
|
|
17
|
+
af-server backup [ user@host ] [ -d DATA_DIR ] [ -o output.zip ] [ -i SSH_KEY ]
|
|
18
|
+
|
|
19
|
+
# Install cron backup (daily 02:00, keep last 7).
|
|
20
|
+
af-server backup [ user@host ] --schedule [ --backup-dir PATH ] [ --keep-days N ] [ -i SSH_KEY ]
|
|
21
|
+
|
|
22
|
+
# Restore backup zip to DATA_DIR.
|
|
23
|
+
af-server restore backup.zip [ user@host ] [ -d DATA_DIR ] [ --force ] [ -i SSH_KEY ]
|
|
24
|
+
|
|
25
|
+
# ssh-cert: install = append your local SSH public key for passwordless auth; request = stub for future SSH CA.
|
|
26
|
+
af-server ssh-cert request [ --user ID ] [ -i SSH_KEY ]
|
|
27
|
+
af-server ssh-cert install [ user@host ] [ -i SSH_KEY ]
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
**ssh-cert install:** Appends your **local SSH public key** (from `~/.ssh/id_ed25519.pub` or `id_rsa.pub`, or from `-i key` → `key.pub`) to the server user's `~/.ssh/authorized_keys`. After that, install/backup/restore work without password when using that key or ssh-agent. First run may use password or `-i`. **request** remains a stub for future SSH CA integration.
|
|
31
|
+
|
|
32
|
+
## Prerequisites
|
|
33
|
+
|
|
34
|
+
- Node.js >= 18
|
|
35
|
+
- SSH access to the server (key-based auth recommended; use `-i path/to/key`)
|
|
36
|
+
|
|
37
|
+
The server should have **builder-server** data in either:
|
|
38
|
+
|
|
39
|
+
- **SQLite** (`DATA_DIR/builder.db`) — after JSON→SQLite migration (see repo plan 006); backup/restore and cron backup use this.
|
|
40
|
+
- **JSON files** — legacy; on-demand backup still works (exports to SQLite inside the zip).
|
|
41
|
+
|
|
42
|
+
## Backup contents
|
|
43
|
+
|
|
44
|
+
- `config.json` — DATA_DIR, createdAt, source (builder.db or json)
|
|
45
|
+
- `backup.db` — SQLite (either copy of builder.db or export from JSON)
|
|
46
|
+
- `ca.crt`, `ca.key`, `secrets-encryption.key` — for restore
|
|
47
|
+
|
|
48
|
+
**Security:** Backups contain secrets. Store them encrypted at rest and restrict filesystem access. Never log key material.
|
|
49
|
+
|
|
50
|
+
## Cron backup (--schedule)
|
|
51
|
+
|
|
52
|
+
Installs a shell script on the server that runs daily at 02:00. Requires `builder.db` (SQLite) and `zip` on the server. Keeps the last N backups (default 7). Backup dir default: `/opt/aifabrix/backups`.
|
|
53
|
+
|
|
54
|
+
## Build and run locally
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
cd server-setup
|
|
58
|
+
npm install
|
|
59
|
+
npm run build
|
|
60
|
+
node dist/cli.js --help
|
|
61
|
+
# or: npm link && af-server --help
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
## References
|
|
65
|
+
|
|
66
|
+
- [SETUP.md](../SETUP.md) — server installation and manual prerequisites
|
|
67
|
+
- [builder/builder-server/README.md](../builder/builder-server/README.md) — builder-server data dir and mounts
|
|
68
|
+
- [AI Fabrix Builder CLI](https://github.com/esystemsdev/aifabrix-builder/blob/main/docs/CLI-REFERENCE.md) — for onboarding and certs (talks to builder-server API)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# Nginx snippet for builder-server onboarding API (https://DEV_DOMAIN_PLACEHOLDER).
|
|
2
|
+
# Generated from template by af-server install (substitutes DEV_DOMAIN, SSL_DIR, BUILDER_SERVER_PORT, DATA_DIR).
|
|
3
|
+
# SSL cert and key from SSL_DIR_PLACEHOLDER (wildcard.crt, wildcard.key).
|
|
4
|
+
# Client cert: ssl_client_certificate uses DATA_DIR_PLACEHOLDER/ca.crt (Builder CA; created by builder-server on first run).
|
|
5
|
+
# Reload nginx after placing: sudo nginx -t && sudo systemctl reload nginx.
|
|
6
|
+
|
|
7
|
+
server {
|
|
8
|
+
listen 443 ssl;
|
|
9
|
+
server_name DEV_DOMAIN_PLACEHOLDER;
|
|
10
|
+
|
|
11
|
+
ssl_certificate SSL_DIR_PLACEHOLDER/wildcard.crt;
|
|
12
|
+
ssl_certificate_key SSL_DIR_PLACEHOLDER/wildcard.key;
|
|
13
|
+
|
|
14
|
+
ssl_client_certificate DATA_DIR_PLACEHOLDER/ca.crt;
|
|
15
|
+
ssl_verify_client optional;
|
|
16
|
+
|
|
17
|
+
location / {
|
|
18
|
+
proxy_pass http://127.0.0.1:BUILDER_SERVER_PORT_PLACEHOLDER;
|
|
19
|
+
proxy_http_version 1.1;
|
|
20
|
+
proxy_set_header Host $host;
|
|
21
|
+
proxy_set_header X-Real-IP $remote_addr;
|
|
22
|
+
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
23
|
+
proxy_set_header X-Forwarded-Proto $scheme;
|
|
24
|
+
proxy_set_header X-Client-Cert $ssl_client_cert;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Cron backup: copy builder.db + keys to zip in backup dir. Retention: keep last 7.
|
|
3
|
+
# Requires: DATA_DIR and BACKUP_DIR set (or defaults below).
|
|
4
|
+
# Server must have builder.db (SQLite); no Node on server.
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
DATA_DIR="${DATA_DIR:-/opt/aifabrix/builder-server/data}"
|
|
8
|
+
BACKUP_DIR="${BACKUP_DIR:-/opt/aifabrix/backups}"
|
|
9
|
+
KEEP_DAYS="${KEEP_DAYS:-7}"
|
|
10
|
+
STAMP=$(date +%Y%m%d-%H%M)
|
|
11
|
+
ZIP_NAME="aifabrix-backup-${STAMP}.zip"
|
|
12
|
+
TMP_DIR=$(mktemp -d)
|
|
13
|
+
trap 'rm -rf "$TMP_DIR"' EXIT
|
|
14
|
+
|
|
15
|
+
mkdir -p "$BACKUP_DIR"
|
|
16
|
+
cp -a "$DATA_DIR/builder.db" "$TMP_DIR/backup.db" 2>/dev/null || { echo "builder.db not found"; exit 1; }
|
|
17
|
+
for f in ca.crt ca.key secrets-encryption.key; do
|
|
18
|
+
[ -f "$DATA_DIR/$f" ] && cp -a "$DATA_DIR/$f" "$TMP_DIR/"
|
|
19
|
+
done
|
|
20
|
+
echo "{\"dataDir\":\"$DATA_DIR\",\"createdAt\":\"$(date -Iseconds)\",\"source\":\"builder.db\"}" > "$TMP_DIR/config.json"
|
|
21
|
+
cd "$TMP_DIR"
|
|
22
|
+
zip -q -r "$BACKUP_DIR/$ZIP_NAME" backup.db config.json ca.crt ca.key secrets-encryption.key 2>/dev/null || true
|
|
23
|
+
[ -f "$BACKUP_DIR/$ZIP_NAME" ] || zip -q -r "$BACKUP_DIR/$ZIP_NAME" backup.db config.json
|
|
24
|
+
# Retention: delete backups older than KEEP_DAYS
|
|
25
|
+
find "$BACKUP_DIR" -name 'aifabrix-backup-*.zip' -mtime +"$KEEP_DAYS" -delete 2>/dev/null || true
|
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Idempotent dev server setup script (no Node/Builder on server). Safe to run multiple times.
|
|
3
|
+
# Run via af-server install user@host. REPO_ROOT must be set to the dir containing builder/builder-server/nginx-builder-server.conf.template.
|
|
4
|
+
# Optional env: DEV_DOMAIN, SSL_DIR, DATA_DIR, SETUP_ADMIN_USER, SYNC_USER, BUILDER_SERVER_PORT, NGINX_CONF_DIR, SETUP_HOSTNAME, INSTALL_PORTAINER=1, SKIP_DOCKER_TLS=1.
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
|
|
8
|
+
REPO_ROOT="${REPO_ROOT:-$(cd "$(dirname "$0")/.." && pwd)}"
|
|
9
|
+
DATA_DIR="${DATA_DIR:-/opt/aifabrix/builder-server/data}"
|
|
10
|
+
DEV_DOMAIN="${DEV_DOMAIN:-dev.aifabrix.dev}"
|
|
11
|
+
SSL_DIR="${SSL_DIR:-/opt/aifabrix/ssl}"
|
|
12
|
+
SETUP_ADMIN_USER="${SETUP_ADMIN_USER:-serveradmin}"
|
|
13
|
+
SYNC_USER="${SYNC_USER:-aifabrix-sync}"
|
|
14
|
+
BUILDER_SERVER_PORT="${BUILDER_SERVER_PORT:-3000}"
|
|
15
|
+
NGINX_CONF_DIR="${NGINX_CONF_DIR:-/etc/nginx/conf.d}"
|
|
16
|
+
|
|
17
|
+
# Sanitize user-controlled env to prevent path/command injection
|
|
18
|
+
sanitize_domain() {
|
|
19
|
+
echo "$1" | grep -q '^[a-zA-Z0-9][a-zA-Z0-9.-]*[a-zA-Z0-9]$' || echo "$1" | grep -q '^[a-zA-Z0-9]$'
|
|
20
|
+
}
|
|
21
|
+
sanitize_path() {
|
|
22
|
+
case "$1" in
|
|
23
|
+
*'..'*|*';'*|*'|'*|*'&'*|*'$'*|*'`'*) return 1 ;;
|
|
24
|
+
*) return 0 ;;
|
|
25
|
+
esac
|
|
26
|
+
}
|
|
27
|
+
if ! sanitize_domain "$DEV_DOMAIN"; then
|
|
28
|
+
echo "Invalid DEV_DOMAIN (use only letters, digits, dots, hyphens)."
|
|
29
|
+
exit 1
|
|
30
|
+
fi
|
|
31
|
+
if ! sanitize_path "$SSL_DIR" || ! sanitize_path "$DATA_DIR" || ! sanitize_path "$NGINX_CONF_DIR"; then
|
|
32
|
+
echo "Invalid path in SSL_DIR, DATA_DIR, or NGINX_CONF_DIR (no .. or shell metacharacters)."
|
|
33
|
+
exit 1
|
|
34
|
+
fi
|
|
35
|
+
case "$SETUP_ADMIN_USER" in
|
|
36
|
+
*'..'*|*';'*|*'|'*|*'&'*|*'$'*|*'`'*) echo "Invalid SETUP_ADMIN_USER."; exit 1 ;;
|
|
37
|
+
esac
|
|
38
|
+
case "$SYNC_USER" in
|
|
39
|
+
*'..'*|*';'*|*'|'*|*'&'*|*'$'*|*'`'*) echo "Invalid SYNC_USER."; exit 1 ;;
|
|
40
|
+
esac
|
|
41
|
+
|
|
42
|
+
require_sudo() {
|
|
43
|
+
if [ "$(id -u)" -ne 0 ]; then
|
|
44
|
+
echo "This script requires root. Run with sudo or as root."
|
|
45
|
+
exit 1
|
|
46
|
+
fi
|
|
47
|
+
}
|
|
48
|
+
require_sudo
|
|
49
|
+
|
|
50
|
+
# --- Hostname (optional) ---
|
|
51
|
+
if [ -n "$SETUP_HOSTNAME" ]; then
|
|
52
|
+
current=$(hostname 2>/dev/null || true)
|
|
53
|
+
if [ "$current" != "$SETUP_HOSTNAME" ]; then
|
|
54
|
+
hostnamectl set-hostname "$SETUP_HOSTNAME" 2>/dev/null || true
|
|
55
|
+
fi
|
|
56
|
+
fi
|
|
57
|
+
|
|
58
|
+
# --- System updates ---
|
|
59
|
+
apt-get update -qq
|
|
60
|
+
DEBIAN_FRONTEND=noninteractive apt-get upgrade -y -qq
|
|
61
|
+
|
|
62
|
+
# --- Docker ---
|
|
63
|
+
if ! command -v docker >/dev/null 2>&1; then
|
|
64
|
+
apt-get install -y docker.io
|
|
65
|
+
fi
|
|
66
|
+
systemctl enable docker 2>/dev/null || true
|
|
67
|
+
systemctl start docker 2>/dev/null || true
|
|
68
|
+
|
|
69
|
+
# --- Admin user: docker group ---
|
|
70
|
+
if getent group docker >/dev/null 2>&1; then
|
|
71
|
+
if ! id -nG "$SETUP_ADMIN_USER" 2>/dev/null | grep -q docker; then
|
|
72
|
+
usermod -aG docker "$SETUP_ADMIN_USER" 2>/dev/null || true
|
|
73
|
+
fi
|
|
74
|
+
fi
|
|
75
|
+
|
|
76
|
+
# --- Admin user: sudoers ---
|
|
77
|
+
SUDOERS_FILE="/etc/sudoers.d/$SETUP_ADMIN_USER"
|
|
78
|
+
if [ ! -f "$SUDOERS_FILE" ]; then
|
|
79
|
+
echo "$SETUP_ADMIN_USER ALL=(ALL) NOPASSWD:ALL" > "$SUDOERS_FILE"
|
|
80
|
+
chmod 440 "$SUDOERS_FILE"
|
|
81
|
+
fi
|
|
82
|
+
|
|
83
|
+
# --- PAM mkhomedir ---
|
|
84
|
+
if ! grep -q mkhomedir /etc/pam.d/common-session 2>/dev/null; then
|
|
85
|
+
pam-auth-update --enable mkhomedir || true
|
|
86
|
+
fi
|
|
87
|
+
|
|
88
|
+
# --- Optional: Portainer ---
|
|
89
|
+
if [ "$INSTALL_PORTAINER" = "1" ] && ! docker ps -a --format '{{.Names}}' 2>/dev/null | grep -q '^portainer$'; then
|
|
90
|
+
docker volume create portainer_data 2>/dev/null || true
|
|
91
|
+
docker run -d --name portainer --restart=always \
|
|
92
|
+
-p 9443:9443 \
|
|
93
|
+
-v /var/run/docker.sock:/var/run/docker.sock \
|
|
94
|
+
-v portainer_data:/data \
|
|
95
|
+
portainer/portainer-ce:latest 2>/dev/null || true
|
|
96
|
+
fi
|
|
97
|
+
|
|
98
|
+
# --- Nginx ---
|
|
99
|
+
if ! command -v nginx >/dev/null 2>&1; then
|
|
100
|
+
apt-get install -y nginx
|
|
101
|
+
systemctl enable nginx
|
|
102
|
+
systemctl start nginx
|
|
103
|
+
fi
|
|
104
|
+
|
|
105
|
+
NGINX_CONF="$NGINX_CONF_DIR/$DEV_DOMAIN.conf"
|
|
106
|
+
NGINX_TEMPLATE="$REPO_ROOT/builder/builder-server/nginx-builder-server.conf.template"
|
|
107
|
+
if [ ! -f "$NGINX_CONF" ] && [ -f "$NGINX_TEMPLATE" ]; then
|
|
108
|
+
sed -e "s|DEV_DOMAIN_PLACEHOLDER|$DEV_DOMAIN|g" \
|
|
109
|
+
-e "s|SSL_DIR_PLACEHOLDER|$SSL_DIR|g" \
|
|
110
|
+
-e "s|BUILDER_SERVER_PORT_PLACEHOLDER|$BUILDER_SERVER_PORT|g" \
|
|
111
|
+
-e "s|DATA_DIR_PLACEHOLDER|$DATA_DIR|g" \
|
|
112
|
+
"$NGINX_TEMPLATE" > "$NGINX_CONF"
|
|
113
|
+
if nginx -t 2>/dev/null; then
|
|
114
|
+
systemctl reload nginx
|
|
115
|
+
fi
|
|
116
|
+
elif [ ! -f "$NGINX_CONF" ]; then
|
|
117
|
+
echo "Warning: SSL prereqs required. Place $NGINX_CONF (see SETUP.md) and ensure $SSL_DIR/wildcard.crt and $SSL_DIR/wildcard.key exist."
|
|
118
|
+
fi
|
|
119
|
+
|
|
120
|
+
# --- Mutagen ---
|
|
121
|
+
if ! command -v mutagen >/dev/null 2>&1; then
|
|
122
|
+
MUTAGEN_VERSION="0.17.2"
|
|
123
|
+
MUTAGEN_URL="https://github.com/mutagen-io/mutagen/releases/download/v${MUTAGEN_VERSION}/mutagen_linux_amd64_v${MUTAGEN_VERSION}.tar.gz"
|
|
124
|
+
if command -v wget >/dev/null 2>&1; then
|
|
125
|
+
(cd /tmp && wget -q -O - "$MUTAGEN_URL" | tar xzf - && mv mutagen /usr/local/bin/ 2>/dev/null) || true
|
|
126
|
+
fi
|
|
127
|
+
fi
|
|
128
|
+
if command -v mutagen >/dev/null 2>&1; then
|
|
129
|
+
mutagen daemon start 2>/dev/null || true
|
|
130
|
+
if [ -d /etc/systemd/system ]; then
|
|
131
|
+
MUTAGEN_SVC="/etc/systemd/system/mutagen.service"
|
|
132
|
+
if [ ! -f "$MUTAGEN_SVC" ]; then
|
|
133
|
+
cat > "$MUTAGEN_SVC" << 'MUTAGEN_EOF'
|
|
134
|
+
[Unit]
|
|
135
|
+
Description=Mutagen daemon
|
|
136
|
+
After=network.target
|
|
137
|
+
|
|
138
|
+
[Service]
|
|
139
|
+
ExecStart=/usr/local/bin/mutagen daemon run
|
|
140
|
+
Restart=on-failure
|
|
141
|
+
|
|
142
|
+
[Install]
|
|
143
|
+
WantedBy=multi-user.target
|
|
144
|
+
MUTAGEN_EOF
|
|
145
|
+
systemctl daemon-reload
|
|
146
|
+
systemctl enable mutagen 2>/dev/null || true
|
|
147
|
+
fi
|
|
148
|
+
fi
|
|
149
|
+
fi
|
|
150
|
+
|
|
151
|
+
# --- Docker TLS (daemon.json) ---
|
|
152
|
+
if [ "$SKIP_DOCKER_TLS" != "1" ] && [ -d /etc/docker ]; then
|
|
153
|
+
if [ ! -f /etc/docker/daemon.json ]; then
|
|
154
|
+
cat > /etc/docker/daemon.json << 'DOCKER_EOF'
|
|
155
|
+
{
|
|
156
|
+
"tls": true,
|
|
157
|
+
"tlsverify": true,
|
|
158
|
+
"tlscacert": "/etc/docker/ca.pem",
|
|
159
|
+
"tlscert": "/etc/docker/server-cert.pem",
|
|
160
|
+
"tlskey": "/etc/docker/server-key.pem",
|
|
161
|
+
"hosts": ["tcp://0.0.0.0:2376", "unix:///var/run/docker.sock"]
|
|
162
|
+
}
|
|
163
|
+
DOCKER_EOF
|
|
164
|
+
echo "Docker TLS daemon.json created. Ensure /etc/docker/ca.pem, server-cert.pem, server-key.pem exist (see SETUP.md Docker API TLS)."
|
|
165
|
+
fi
|
|
166
|
+
fi
|
|
167
|
+
|
|
168
|
+
# --- Builder-server data dir and container ---
|
|
169
|
+
mkdir -p "$DATA_DIR"
|
|
170
|
+
chown 1001:65533 "$DATA_DIR"
|
|
171
|
+
if command -v docker >/dev/null 2>&1; then
|
|
172
|
+
if ! docker ps -a --format '{{.Names}}' 2>/dev/null | grep -q '^builder-server$'; then
|
|
173
|
+
echo "Builder-server container not found. Build and run manually (see builder/builder-server/README.md):"
|
|
174
|
+
echo " docker build -t builder-server:latest -f builder/builder-server/Dockerfile ."
|
|
175
|
+
echo " docker run -d --name builder-server --restart unless-stopped -p ${BUILDER_SERVER_PORT}:3000 -v $DATA_DIR:/data -e PORT=3000 builder-server:latest"
|
|
176
|
+
else
|
|
177
|
+
docker start builder-server 2>/dev/null || true
|
|
178
|
+
fi
|
|
179
|
+
fi
|
|
180
|
+
|
|
181
|
+
# --- Sync user for Mutagen SSH ---
|
|
182
|
+
SYNC_USER_HOME="${DATA_DIR}/.sync-home"
|
|
183
|
+
MANAGED_KEYS_FILE="${DATA_DIR}/sync-authorized-keys"
|
|
184
|
+
if ! getent passwd "$SYNC_USER" >/dev/null 2>&1; then
|
|
185
|
+
useradd --system --home-dir "$SYNC_USER_HOME" --create-home --shell /bin/bash "$SYNC_USER"
|
|
186
|
+
fi
|
|
187
|
+
mkdir -p "$SYNC_USER_HOME/.ssh"
|
|
188
|
+
chown -R "$SYNC_USER:$SYNC_USER" "$SYNC_USER_HOME"
|
|
189
|
+
chmod 700 "$SYNC_USER_HOME/.ssh"
|
|
190
|
+
if [ ! -f "$SYNC_USER_HOME/.ssh/authorized_keys" ]; then
|
|
191
|
+
touch "$SYNC_USER_HOME/.ssh/authorized_keys"
|
|
192
|
+
chown "$SYNC_USER:$SYNC_USER" "$SYNC_USER_HOME/.ssh/authorized_keys"
|
|
193
|
+
chmod 600 "$SYNC_USER_HOME/.ssh/authorized_keys"
|
|
194
|
+
fi
|
|
195
|
+
|
|
196
|
+
# --- Workspace dir ---
|
|
197
|
+
mkdir -p "${DATA_DIR}/workspace"
|
|
198
|
+
chown -R "$SYNC_USER:$SYNC_USER" "${DATA_DIR}/workspace" 2>/dev/null || true
|
|
199
|
+
|
|
200
|
+
# --- Host job: apply managed SSH keys to sync user ---
|
|
201
|
+
APPLY_KEYS_SCRIPT="/usr/local/bin/aifabrix-apply-sync-keys.sh"
|
|
202
|
+
if [ ! -f "$APPLY_KEYS_SCRIPT" ]; then
|
|
203
|
+
cat > "$APPLY_KEYS_SCRIPT" << APPLY_EOF
|
|
204
|
+
#!/bin/sh
|
|
205
|
+
SYNC_USER="$SYNC_USER"
|
|
206
|
+
DATA_DIR="$DATA_DIR"
|
|
207
|
+
SYNC_HOME="\${DATA_DIR}/.sync-home"
|
|
208
|
+
MANAGED="\${DATA_DIR}/sync-authorized-keys"
|
|
209
|
+
if [ -f "\$MANAGED" ]; then
|
|
210
|
+
cp "\$MANAGED" "\$SYNC_HOME/.ssh/authorized_keys"
|
|
211
|
+
else
|
|
212
|
+
touch "\$SYNC_HOME/.ssh/authorized_keys"
|
|
213
|
+
fi
|
|
214
|
+
chown "\$SYNC_USER:\$SYNC_USER" "\$SYNC_HOME/.ssh/authorized_keys"
|
|
215
|
+
chmod 600 "\$SYNC_HOME/.ssh/authorized_keys"
|
|
216
|
+
if [ -d "\${DATA_DIR}/workspace" ]; then
|
|
217
|
+
chown -R "\$SYNC_USER:\$SYNC_USER" "\${DATA_DIR}/workspace"
|
|
218
|
+
fi
|
|
219
|
+
APPLY_EOF
|
|
220
|
+
chmod 755 "$APPLY_KEYS_SCRIPT"
|
|
221
|
+
fi
|
|
222
|
+
if [ -d /etc/cron.d ] && [ ! -f /etc/cron.d/aifabrix-sync-keys ]; then
|
|
223
|
+
echo "*/2 * * * * root $APPLY_KEYS_SCRIPT" > /etc/cron.d/aifabrix-sync-keys
|
|
224
|
+
chmod 644 /etc/cron.d/aifabrix-sync-keys
|
|
225
|
+
fi
|
|
226
|
+
|
|
227
|
+
echo "Setup complete. Ensure manual prerequisites (SSL at $SSL_DIR: wildcard.crt, wildcard.key; DNS $DEV_DOMAIN) are done; see SETUP.md."
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Create or copy SQLite backup from server data (builder.db or JSON files).
|
|
3
|
+
* Schema matches builder-server (users, pin_tokens, secrets, ssh_keys).
|
|
4
|
+
* Never log key material or secret values.
|
|
5
|
+
*/
|
|
6
|
+
import type { BackupConfig } from './config.js';
|
|
7
|
+
export declare function createBackupDbFromJson(outPath: string, jsonFiles: {
|
|
8
|
+
users?: string;
|
|
9
|
+
tokens?: string;
|
|
10
|
+
secrets?: string;
|
|
11
|
+
sshKeys?: string;
|
|
12
|
+
}, _config: BackupConfig): void;
|
|
13
|
+
export declare function copyBuilderDbAsBackup(builderDbPath: string, outPath: string): void;
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Create or copy SQLite backup from server data (builder.db or JSON files).
|
|
3
|
+
* Schema matches builder-server (users, pin_tokens, secrets, ssh_keys).
|
|
4
|
+
* Never log key material or secret values.
|
|
5
|
+
*/
|
|
6
|
+
import Database from 'better-sqlite3';
|
|
7
|
+
const SCHEMA_SQL = `
|
|
8
|
+
CREATE TABLE IF NOT EXISTS users (
|
|
9
|
+
id TEXT PRIMARY KEY,
|
|
10
|
+
name TEXT NOT NULL,
|
|
11
|
+
email TEXT NOT NULL,
|
|
12
|
+
created_at TEXT NOT NULL,
|
|
13
|
+
cert_valid_not_after TEXT,
|
|
14
|
+
groups TEXT
|
|
15
|
+
);
|
|
16
|
+
CREATE TABLE IF NOT EXISTS pin_tokens (
|
|
17
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
18
|
+
user_id TEXT NOT NULL,
|
|
19
|
+
pin_hash TEXT NOT NULL,
|
|
20
|
+
expires_at TEXT NOT NULL,
|
|
21
|
+
consumed INTEGER NOT NULL DEFAULT 0,
|
|
22
|
+
created_at TEXT NOT NULL
|
|
23
|
+
);
|
|
24
|
+
CREATE TABLE IF NOT EXISTS secrets (
|
|
25
|
+
key TEXT PRIMARY KEY,
|
|
26
|
+
iv TEXT NOT NULL,
|
|
27
|
+
auth_tag TEXT NOT NULL,
|
|
28
|
+
cipher TEXT NOT NULL
|
|
29
|
+
);
|
|
30
|
+
CREATE TABLE IF NOT EXISTS ssh_keys (
|
|
31
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
32
|
+
user_id TEXT NOT NULL,
|
|
33
|
+
public_key TEXT NOT NULL,
|
|
34
|
+
label TEXT,
|
|
35
|
+
fingerprint TEXT,
|
|
36
|
+
created_at TEXT,
|
|
37
|
+
UNIQUE(user_id, fingerprint)
|
|
38
|
+
);
|
|
39
|
+
`.trim();
|
|
40
|
+
export function createBackupDbFromJson(outPath, jsonFiles, _config) {
|
|
41
|
+
const db = new Database(outPath);
|
|
42
|
+
db.exec(SCHEMA_SQL);
|
|
43
|
+
if (jsonFiles.users) {
|
|
44
|
+
const data = JSON.parse(jsonFiles.users);
|
|
45
|
+
const users = data.users ?? (Array.isArray(data) ? data : []);
|
|
46
|
+
const stmt = db.prepare(`INSERT INTO users (id, name, email, created_at, cert_valid_not_after, groups) VALUES (?, ?, ?, ?, ?, ?)`);
|
|
47
|
+
for (const u of users) {
|
|
48
|
+
const id = String(u.id ?? '');
|
|
49
|
+
const name = String(u.name ?? '');
|
|
50
|
+
const email = String(u.email ?? '');
|
|
51
|
+
const createdAt = String(u.createdAt ?? u.created_at ?? new Date().toISOString());
|
|
52
|
+
const certValidNotAfter = u.certValidNotAfter != null ? String(u.certValidNotAfter) : null;
|
|
53
|
+
const groups = u.groups != null ? JSON.stringify(u.groups) : null;
|
|
54
|
+
stmt.run(id, name, email, createdAt, certValidNotAfter, groups);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (jsonFiles.tokens) {
|
|
58
|
+
const data = JSON.parse(jsonFiles.tokens);
|
|
59
|
+
const pins = data.pins ?? (Array.isArray(data) ? data : []);
|
|
60
|
+
const stmt = db.prepare(`INSERT INTO pin_tokens (user_id, pin_hash, expires_at, consumed, created_at) VALUES (?, ?, ?, ?, ?)`);
|
|
61
|
+
for (const p of pins) {
|
|
62
|
+
const userId = String(p.userId ?? p.user_id ?? '');
|
|
63
|
+
const pinHash = String(p.pinHash ?? p.pin_hash ?? '');
|
|
64
|
+
const expiresAt = String(p.expiresAt ?? p.expires_at ?? '');
|
|
65
|
+
const consumed = (p.consumed ?? p.consumed) === true ? 1 : 0;
|
|
66
|
+
const createdAt = String(p.createdAt ?? p.created_at ?? new Date().toISOString());
|
|
67
|
+
stmt.run(userId, pinHash, expiresAt, consumed, createdAt);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
if (jsonFiles.secrets) {
|
|
71
|
+
const data = JSON.parse(jsonFiles.secrets);
|
|
72
|
+
const secrets = data.secrets ?? {};
|
|
73
|
+
const stmt = db.prepare(`INSERT INTO secrets (key, iv, auth_tag, cipher) VALUES (?, ?, ?, ?)`);
|
|
74
|
+
for (const [key, val] of Object.entries(secrets)) {
|
|
75
|
+
if (val && typeof val.iv === 'string' && typeof val.authTag === 'string' && typeof val.cipher === 'string') {
|
|
76
|
+
stmt.run(key, val.iv, val.authTag, val.cipher);
|
|
77
|
+
}
|
|
78
|
+
else if (val && 'iv' in val && 'auth_tag' in val && 'cipher' in val) {
|
|
79
|
+
const v = val;
|
|
80
|
+
stmt.run(key, v.iv, v.auth_tag, v.cipher);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (jsonFiles.sshKeys) {
|
|
85
|
+
const data = JSON.parse(jsonFiles.sshKeys);
|
|
86
|
+
const byUser = data.byUser ?? {};
|
|
87
|
+
const stmt = db.prepare(`INSERT INTO ssh_keys (user_id, public_key, label, fingerprint, created_at) VALUES (?, ?, ?, ?, ?)`);
|
|
88
|
+
for (const [userId, entries] of Object.entries(byUser)) {
|
|
89
|
+
const list = Array.isArray(entries) ? entries : [];
|
|
90
|
+
for (const e of list) {
|
|
91
|
+
const publicKey = String(e.publicKey ?? e.public_key ?? '');
|
|
92
|
+
const label = e.label != null ? String(e.label) : null;
|
|
93
|
+
const fingerprint = e.fingerprint != null ? String(e.fingerprint) : null;
|
|
94
|
+
const createdAt = e.createdAt ?? e.created_at != null ? String(e.createdAt ?? e.created_at) : null;
|
|
95
|
+
stmt.run(userId, publicKey, label, fingerprint, createdAt);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
db.close();
|
|
100
|
+
}
|
|
101
|
+
export function copyBuilderDbAsBackup(builderDbPath, outPath) {
|
|
102
|
+
const db = new Database(builderDbPath, { readonly: true });
|
|
103
|
+
const backup = new Database(outPath);
|
|
104
|
+
backup.exec(SCHEMA_SQL);
|
|
105
|
+
const tables = ['users', 'pin_tokens', 'secrets', 'ssh_keys'];
|
|
106
|
+
for (const table of tables) {
|
|
107
|
+
try {
|
|
108
|
+
const rows = db.prepare(`SELECT * FROM ${table}`).all();
|
|
109
|
+
if (rows.length === 0)
|
|
110
|
+
continue;
|
|
111
|
+
const first = rows[0];
|
|
112
|
+
const cols = Object.keys(first).join(', ');
|
|
113
|
+
const placeholders = Object.keys(first).map(() => '?').join(', ');
|
|
114
|
+
const stmt = backup.prepare(`INSERT INTO ${table} (${cols}) VALUES (${placeholders})`);
|
|
115
|
+
for (const row of rows) {
|
|
116
|
+
stmt.run(...Object.values(row));
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
catch {
|
|
120
|
+
// table might not exist or be empty
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
db.close();
|
|
124
|
+
backup.close();
|
|
125
|
+
}
|