@arcote.tech/arc-cli 0.5.1 → 0.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,279 @@
1
+ import { existsSync, readFileSync, writeFileSync } from "fs";
2
+ import { join } from "path";
3
+
4
+ // ---------------------------------------------------------------------------
5
+ // deploy.arc.json — single source of truth for deployment configuration.
6
+ //
7
+ // Design principles:
8
+ // - One file, zero duplicates. No separate terraform.tfvars, inventory.ini etc.
9
+ // - No inline secrets: tokens come from env vars referenced by name.
10
+ // - Optional provision section: when present, CLI runs terraform+ansible to
11
+ // create the host; when absent, CLI assumes target.host is already reachable.
12
+ // ---------------------------------------------------------------------------
13
+
14
+ export interface DeployTarget {
15
+ host: string;
16
+ user: string;
17
+ port: number;
18
+ /** Remote directory for the arc stack. Default: /opt/arc */
19
+ remoteDir: string;
20
+ /** Optional explicit SSH key path (otherwise ssh-agent / ~/.ssh/config is used). */
21
+ sshKey?: string;
22
+ }
23
+
24
+ export interface DeployEnv {
25
+ /** Subdomain or full domain routed by Caddy. */
26
+ domain: string;
27
+ /** Extra env vars passed to the arc container. */
28
+ envVars?: Record<string, string>;
29
+ }
30
+
31
+ export interface DeployCaddy {
32
+ /** Email for Let's Encrypt. Use "internal" to use self-signed certs. */
33
+ email: string;
34
+ }
35
+
36
+ export interface DeployProvisionTerraform {
37
+ provider: "hcloud";
38
+ serverType: string;
39
+ location: string;
40
+ image: string;
41
+ /** Name of environment variable holding the Hetzner Cloud API token. */
42
+ tokenEnv: string;
43
+ /** Path to public SSH key uploaded to the provider. Default: ~/.ssh/id_ed25519.pub */
44
+ sshPublicKey?: string;
45
+ }
46
+
47
+ export interface DeployProvisionAnsible {
48
+ sshPort?: number;
49
+ extraAllowedIps?: string[];
50
+ }
51
+
52
+ export interface DeployProvision {
53
+ terraform: DeployProvisionTerraform;
54
+ ansible?: DeployProvisionAnsible;
55
+ }
56
+
57
+ export interface DeployConfig {
58
+ target: DeployTarget;
59
+ envs: Record<string, DeployEnv>;
60
+ caddy: DeployCaddy;
61
+ provision?: DeployProvision;
62
+ }
63
+
64
+ export const DEPLOY_CONFIG_FILE = "deploy.arc.json";
65
+
66
+ // ---------------------------------------------------------------------------
67
+ // I/O
68
+ // ---------------------------------------------------------------------------
69
+
70
+ export function deployConfigPath(rootDir: string): string {
71
+ return join(rootDir, DEPLOY_CONFIG_FILE);
72
+ }
73
+
74
+ export function deployConfigExists(rootDir: string): boolean {
75
+ return existsSync(deployConfigPath(rootDir));
76
+ }
77
+
78
+ /**
79
+ * Load deploy.arc.json, expand `${VAR}` references against process.env,
80
+ * and validate shape. Throws with a precise error on any issue.
81
+ */
82
+ export function loadDeployConfig(rootDir: string): DeployConfig {
83
+ const path = deployConfigPath(rootDir);
84
+ if (!existsSync(path)) {
85
+ throw new Error(`Missing ${DEPLOY_CONFIG_FILE} at ${path}`);
86
+ }
87
+ const raw = readFileSync(path, "utf-8");
88
+ let parsed: unknown;
89
+ try {
90
+ parsed = JSON.parse(raw);
91
+ } catch (e) {
92
+ throw new Error(`Invalid JSON in ${DEPLOY_CONFIG_FILE}: ${(e as Error).message}`);
93
+ }
94
+ const expanded = expandEnvVars(parsed, process.env);
95
+ return validateDeployConfig(expanded);
96
+ }
97
+
98
+ export function saveDeployConfig(rootDir: string, cfg: DeployConfig): void {
99
+ writeFileSync(
100
+ deployConfigPath(rootDir),
101
+ JSON.stringify(cfg, null, 2) + "\n",
102
+ );
103
+ }
104
+
105
+ // ---------------------------------------------------------------------------
106
+ // ${VAR} expansion — recursive, strings only
107
+ // ---------------------------------------------------------------------------
108
+
109
+ const VAR_REGEX = /\$\{([A-Z0-9_]+)\}|\$([A-Z0-9_]+)/g;
110
+
111
+ export function expandEnvVars<T>(value: T, env: NodeJS.ProcessEnv): T {
112
+ if (typeof value === "string") {
113
+ return value.replace(VAR_REGEX, (_, a, b) => env[a ?? b] ?? "") as T;
114
+ }
115
+ if (Array.isArray(value)) {
116
+ return value.map((v) => expandEnvVars(v, env)) as T;
117
+ }
118
+ if (value && typeof value === "object") {
119
+ const out: Record<string, unknown> = {};
120
+ for (const [k, v] of Object.entries(value)) {
121
+ out[k] = expandEnvVars(v, env);
122
+ }
123
+ return out as T;
124
+ }
125
+ return value;
126
+ }
127
+
128
+ // ---------------------------------------------------------------------------
129
+ // Validation — strict, descriptive errors, no any
130
+ // ---------------------------------------------------------------------------
131
+
132
+ export function validateDeployConfig(input: unknown): DeployConfig {
133
+ if (!isObject(input)) throw cfgErr("root", "object");
134
+ const target = requireObject(input, "target");
135
+ const envs = requireObject(input, "envs");
136
+ const caddy = requireObject(input, "caddy");
137
+
138
+ const validated: DeployConfig = {
139
+ target: {
140
+ host: requireString(target, "target.host"),
141
+ user: optionalString(target, "target.user") ?? "deploy",
142
+ port: optionalNumber(target, "target.port") ?? 22,
143
+ remoteDir: optionalString(target, "target.remoteDir") ?? "/opt/arc",
144
+ sshKey: optionalString(target, "target.sshKey"),
145
+ },
146
+ envs: {},
147
+ caddy: {
148
+ email: requireString(caddy, "caddy.email"),
149
+ },
150
+ };
151
+
152
+ const envKeys = Object.keys(envs);
153
+ if (envKeys.length === 0) {
154
+ throw new Error("deploy.arc.json: envs must contain at least one environment");
155
+ }
156
+ for (const name of envKeys) {
157
+ if (!/^[a-z][a-z0-9-]*$/.test(name)) {
158
+ throw new Error(`deploy.arc.json: env name "${name}" must match [a-z][a-z0-9-]*`);
159
+ }
160
+ const env = requireObject(envs, `envs.${name}`);
161
+ const domain = requireString(env, `envs.${name}.domain`);
162
+ if (!/^[a-z0-9.-]+\.[a-z]{2,}$/i.test(domain)) {
163
+ throw new Error(
164
+ `deploy.arc.json: envs.${name}.domain "${domain}" doesn't look like a domain`,
165
+ );
166
+ }
167
+ const envVarsRaw = (env as Record<string, unknown>).envVars;
168
+ let envVars: Record<string, string> | undefined;
169
+ if (envVarsRaw !== undefined) {
170
+ if (!isObject(envVarsRaw)) throw cfgErr(`envs.${name}.envVars`, "object");
171
+ envVars = {};
172
+ for (const [k, v] of Object.entries(envVarsRaw)) {
173
+ if (typeof v !== "string") {
174
+ throw new Error(
175
+ `deploy.arc.json: envs.${name}.envVars.${k} must be a string`,
176
+ );
177
+ }
178
+ envVars[k] = v;
179
+ }
180
+ }
181
+ validated.envs[name] = { domain, envVars };
182
+ }
183
+
184
+ const provision = (input as Record<string, unknown>).provision;
185
+ if (provision !== undefined) {
186
+ if (!isObject(provision)) throw cfgErr("provision", "object");
187
+ const tf = requireObject(provision, "provision.terraform");
188
+ const providerVal = requireString(tf, "provision.terraform.provider");
189
+ if (providerVal !== "hcloud") {
190
+ throw new Error(
191
+ `deploy.arc.json: provision.terraform.provider must be "hcloud" (got "${providerVal}")`,
192
+ );
193
+ }
194
+ const terraform: DeployProvisionTerraform = {
195
+ provider: "hcloud",
196
+ serverType: requireString(tf, "provision.terraform.serverType"),
197
+ location: requireString(tf, "provision.terraform.location"),
198
+ image: optionalString(tf, "provision.terraform.image") ?? "ubuntu-22.04",
199
+ tokenEnv: requireString(tf, "provision.terraform.tokenEnv"),
200
+ sshPublicKey: optionalString(tf, "provision.terraform.sshPublicKey"),
201
+ };
202
+ let ansible: DeployProvisionAnsible | undefined;
203
+ const ansibleRaw = (provision as Record<string, unknown>).ansible;
204
+ if (ansibleRaw !== undefined) {
205
+ if (!isObject(ansibleRaw)) throw cfgErr("provision.ansible", "object");
206
+ const allowed = (ansibleRaw as Record<string, unknown>).extraAllowedIps;
207
+ let extraAllowedIps: string[] | undefined;
208
+ if (allowed !== undefined) {
209
+ if (!Array.isArray(allowed)) {
210
+ throw cfgErr("provision.ansible.extraAllowedIps", "string[]");
211
+ }
212
+ extraAllowedIps = allowed.map((v, i) => {
213
+ if (typeof v !== "string") {
214
+ throw cfgErr(`provision.ansible.extraAllowedIps[${i}]`, "string");
215
+ }
216
+ return v;
217
+ });
218
+ }
219
+ ansible = {
220
+ sshPort: optionalNumber(ansibleRaw, "provision.ansible.sshPort"),
221
+ extraAllowedIps,
222
+ };
223
+ }
224
+ validated.provision = { terraform, ansible };
225
+ }
226
+
227
+ return validated;
228
+ }
229
+
230
+ // ---------------------------------------------------------------------------
231
+ // Validator helpers
232
+ // ---------------------------------------------------------------------------
233
+
234
+ function isObject(v: unknown): v is Record<string, unknown> {
235
+ return typeof v === "object" && v !== null && !Array.isArray(v);
236
+ }
237
+
238
+ function requireObject(
239
+ parent: Record<string, unknown>,
240
+ key: string,
241
+ ): Record<string, unknown> {
242
+ const bare = key.includes(".") ? key.split(".").pop()! : key;
243
+ const v = parent[bare];
244
+ if (!isObject(v)) throw cfgErr(key, "object");
245
+ return v;
246
+ }
247
+
248
+ function requireString(parent: Record<string, unknown>, key: string): string {
249
+ const bare = key.includes(".") ? key.split(".").pop()! : key;
250
+ const v = parent[bare];
251
+ if (typeof v !== "string" || v.length === 0) throw cfgErr(key, "non-empty string");
252
+ return v;
253
+ }
254
+
255
+ function optionalString(
256
+ parent: Record<string, unknown>,
257
+ key: string,
258
+ ): string | undefined {
259
+ const bare = key.includes(".") ? key.split(".").pop()! : key;
260
+ const v = parent[bare];
261
+ if (v === undefined) return undefined;
262
+ if (typeof v !== "string") throw cfgErr(key, "string");
263
+ return v;
264
+ }
265
+
266
+ function optionalNumber(
267
+ parent: Record<string, unknown>,
268
+ key: string,
269
+ ): number | undefined {
270
+ const bare = key.includes(".") ? key.split(".").pop()! : key;
271
+ const v = parent[bare];
272
+ if (v === undefined) return undefined;
273
+ if (typeof v !== "number") throw cfgErr(key, "number");
274
+ return v;
275
+ }
276
+
277
+ function cfgErr(path: string, expected: string): Error {
278
+ return new Error(`deploy.arc.json: ${path} must be ${expected}`);
279
+ }
@@ -0,0 +1,92 @@
1
+ import type { DeployConfig, DeployTarget } from "./config";
2
+ import { assertExec, canSsh, sshExec } from "./ssh";
3
+
4
+ // ---------------------------------------------------------------------------
5
+ // Introspect the remote host to decide whether bootstrap is needed.
6
+ // Classifies into a small enum; bootstrap.ts maps this to actions.
7
+ // ---------------------------------------------------------------------------
8
+
9
+ export type RemoteState =
10
+ /** SSH unreachable or target.host is a placeholder from survey. */
11
+ | { kind: "unreachable"; reason: string }
12
+ /** SSH works but Docker is missing — needs Ansible. */
13
+ | { kind: "no-docker" }
14
+ /** Docker is there but the arc stack is not deployed. */
15
+ | { kind: "no-stack" }
16
+ /** Stack is running — state marker present, some services up. */
17
+ | {
18
+ kind: "ready";
19
+ runningEnvs: string[];
20
+ marker: RemoteStateMarker | null;
21
+ };
22
+
23
+ /** Written to /opt/arc/.arc-state.json after each successful bootstrap/deploy. */
24
+ export interface RemoteStateMarker {
25
+ cliVersion: string;
26
+ configHash: string;
27
+ updatedAt: string;
28
+ }
29
+
30
+ export const STATE_MARKER_PATH = "/opt/arc/.arc-state.json";
31
+
32
+ export async function detectRemoteState(
33
+ cfg: DeployConfig,
34
+ ): Promise<RemoteState> {
35
+ if (cfg.target.host === "PENDING_TERRAFORM" || !cfg.target.host) {
36
+ return { kind: "unreachable", reason: "target.host not yet set" };
37
+ }
38
+
39
+ if (!(await canSsh(cfg.target))) {
40
+ return { kind: "unreachable", reason: "ssh connection failed" };
41
+ }
42
+
43
+ const dockerCheck = await sshExec(cfg.target, "command -v docker", {
44
+ quiet: true,
45
+ });
46
+ if (dockerCheck.exitCode !== 0) {
47
+ return { kind: "no-docker" };
48
+ }
49
+
50
+ const composeDir = `${cfg.target.remoteDir}`;
51
+ const psCheck = await sshExec(
52
+ cfg.target,
53
+ `test -f ${composeDir}/docker-compose.yml && cd ${composeDir} && docker compose ps --format '{{.Service}}' || true`,
54
+ { quiet: true },
55
+ );
56
+ if (psCheck.exitCode !== 0 || psCheck.stdout.trim() === "") {
57
+ return { kind: "no-stack" };
58
+ }
59
+
60
+ const running = psCheck.stdout
61
+ .split("\n")
62
+ .map((l) => l.trim())
63
+ .filter((l) => l.startsWith("arc-"))
64
+ .map((l) => l.replace(/^arc-/, ""));
65
+
66
+ const markerRaw = await sshExec(cfg.target, `cat ${STATE_MARKER_PATH}`, {
67
+ quiet: true,
68
+ });
69
+ let marker: RemoteStateMarker | null = null;
70
+ if (markerRaw.exitCode === 0) {
71
+ try {
72
+ marker = JSON.parse(markerRaw.stdout) as RemoteStateMarker;
73
+ } catch {
74
+ marker = null;
75
+ }
76
+ }
77
+
78
+ return { kind: "ready", runningEnvs: running, marker };
79
+ }
80
+
81
+ /** Write the state marker file on the remote host. */
82
+ export async function writeStateMarker(
83
+ target: DeployTarget,
84
+ marker: RemoteStateMarker,
85
+ ): Promise<void> {
86
+ const json = JSON.stringify(marker, null, 2);
87
+ // Heredoc avoids any shell escaping surprises with the JSON content.
88
+ await assertExec(
89
+ target,
90
+ `sudo tee ${STATE_MARKER_PATH} > /dev/null <<'JSON'\n${json}\nJSON`,
91
+ );
92
+ }
@@ -0,0 +1,202 @@
1
+ import { existsSync, readFileSync } from "fs";
2
+ import { join, relative } from "path";
3
+ import type { BuildManifest, ModuleDescriptor } from "@arcote.tech/platform";
4
+ import type { DeployConfig } from "./config";
5
+ import { openTunnel, rsyncDir, scpUpload } from "./ssh";
6
+ import type { WorkspaceInfo } from "../platform/shared";
7
+
8
+ // ---------------------------------------------------------------------------
9
+ // Hot-swap logic.
10
+ //
11
+ // Flow per env:
12
+ // 1. rsync the whole project directory to /opt/arc/<env>/ so the running
13
+ // container's /app volume sees new code. (This is the authoritative
14
+ // source — node_modules, package.json, .arc/platform/ all live here.)
15
+ // 2. Open an SSH tunnel to the Caddy loopback listener (127.0.0.1:2019).
16
+ // 3. GET /env/<env>/api/deploy/manifest → remote BuildManifest.
17
+ // 4. diffManifests(local, remote) → list of changed module files + shell/styles flags.
18
+ // 5. POST each changed module (+ shell + styles) as multipart uploads.
19
+ // 6. POST the new manifest to /api/deploy/manifest — server writes
20
+ // manifest.json and SSE-notifies clients to reimport.
21
+ //
22
+ // Because rsync in step 1 already moved the bytes, the POSTs are redundant
23
+ // for disk writes — but they force the running server to pick up new hashes
24
+ // and push an SSE event so connected browser clients reload without F5.
25
+ // ---------------------------------------------------------------------------
26
+
27
+ export interface SyncInputs {
28
+ cfg: DeployConfig;
29
+ env: string;
30
+ ws: WorkspaceInfo;
31
+ /** Path to the project root. */
32
+ projectDir: string;
33
+ }
34
+
35
+ export interface SyncOutcome {
36
+ env: string;
37
+ changedModules: readonly string[];
38
+ shellChanged: boolean;
39
+ stylesChanged: boolean;
40
+ }
41
+
42
+ // ---------------------------------------------------------------------------
43
+ // Pure diff — exported for tests
44
+ // ---------------------------------------------------------------------------
45
+
46
+ export interface ManifestDiff {
47
+ changedModules: ModuleDescriptor[];
48
+ shellChanged: boolean;
49
+ stylesChanged: boolean;
50
+ }
51
+
52
+ export function diffManifests(
53
+ local: BuildManifest,
54
+ remote: BuildManifest,
55
+ ): ManifestDiff {
56
+ const remoteByName = new Map(remote.modules.map((m) => [m.name, m]));
57
+ const changedModules = local.modules.filter(
58
+ (m) => remoteByName.get(m.name)?.hash !== m.hash,
59
+ );
60
+ return {
61
+ changedModules: [...changedModules],
62
+ shellChanged: local.shellHash !== remote.shellHash,
63
+ stylesChanged: local.stylesHash !== remote.stylesHash,
64
+ };
65
+ }
66
+
67
+ // ---------------------------------------------------------------------------
68
+ // Sync driver
69
+ // ---------------------------------------------------------------------------
70
+
71
+ export async function syncEnv(inputs: SyncInputs): Promise<SyncOutcome> {
72
+ const { cfg, env, ws, projectDir } = inputs;
73
+ const envConfig = cfg.envs[env];
74
+ if (!envConfig) throw new Error(`Unknown env: ${env}`);
75
+
76
+ // 1. Rsync the project to the host. Excludes dev-only dirs.
77
+ const remotePath = `${cfg.target.remoteDir}/${env}`;
78
+ await rsyncDir(cfg.target, projectDir, remotePath);
79
+
80
+ // 2. Read local manifest
81
+ const localManifestPath = join(ws.modulesDir, "manifest.json");
82
+ if (!existsSync(localManifestPath)) {
83
+ throw new Error(
84
+ `Local build missing at ${localManifestPath}. Run arc platform build first.`,
85
+ );
86
+ }
87
+ const localManifest = JSON.parse(
88
+ readFileSync(localManifestPath, "utf-8"),
89
+ ) as BuildManifest;
90
+
91
+ // 3. Open SSH tunnel to Caddy's loopback admin listener
92
+ const localPort = 15500 + hashEnvToOffset(env);
93
+ const tunnel = await openTunnel(cfg.target, localPort, "127.0.0.1", 2019);
94
+
95
+ try {
96
+ const base = `http://127.0.0.1:${localPort}/env/${env}`;
97
+
98
+ // 4. Fetch remote manifest
99
+ const remoteManifestRes = await fetch(`${base}/api/deploy/manifest`);
100
+ if (!remoteManifestRes.ok) {
101
+ throw new Error(
102
+ `Failed to fetch remote manifest: ${remoteManifestRes.status}`,
103
+ );
104
+ }
105
+ const remoteManifest = (await remoteManifestRes.json()) as BuildManifest;
106
+
107
+ const diff = diffManifests(localManifest, remoteManifest);
108
+
109
+ // 5. Upload shell (if changed) — contains every file under .arc/platform/shell
110
+ if (diff.shellChanged) {
111
+ const shellFiles = collectFiles(ws.shellDir);
112
+ const form = new FormData();
113
+ for (const absPath of shellFiles) {
114
+ const rel = relative(ws.shellDir, absPath);
115
+ form.append(rel, new Blob([readFileSync(absPath)]), rel);
116
+ }
117
+ const res = await fetch(`${base}/api/deploy/shell`, {
118
+ method: "POST",
119
+ body: form,
120
+ });
121
+ if (!res.ok)
122
+ throw new Error(`Shell upload failed: ${res.status} ${await res.text()}`);
123
+ }
124
+
125
+ // 5b. Upload changed styles files alongside shell request if styles changed
126
+ if (diff.stylesChanged) {
127
+ const form = new FormData();
128
+ for (const name of ["styles.css", "theme.css"] as const) {
129
+ const p = join(ws.arcDir, name);
130
+ if (existsSync(p)) {
131
+ form.append(name, new Blob([readFileSync(p)]), name);
132
+ }
133
+ }
134
+ const res = await fetch(`${base}/api/deploy/shell`, {
135
+ method: "POST",
136
+ body: form,
137
+ });
138
+ if (!res.ok)
139
+ throw new Error(`Styles upload failed: ${res.status} ${await res.text()}`);
140
+ }
141
+
142
+ // 5c. Upload changed modules
143
+ if (diff.changedModules.length > 0) {
144
+ const form = new FormData();
145
+ for (const mod of diff.changedModules) {
146
+ const p = join(ws.modulesDir, mod.file);
147
+ form.append(mod.file, new Blob([readFileSync(p)]), mod.file);
148
+ }
149
+ const res = await fetch(`${base}/api/deploy/modules`, {
150
+ method: "POST",
151
+ body: form,
152
+ });
153
+ if (!res.ok)
154
+ throw new Error(
155
+ `Modules upload failed: ${res.status} ${await res.text()}`,
156
+ );
157
+ }
158
+
159
+ // 6. Post the new manifest so the server flips getManifest() + SSE
160
+ const res = await fetch(`${base}/api/deploy/manifest`, {
161
+ method: "POST",
162
+ headers: { "Content-Type": "application/json" },
163
+ body: JSON.stringify(localManifest),
164
+ });
165
+ if (!res.ok)
166
+ throw new Error(
167
+ `Manifest update failed: ${res.status} ${await res.text()}`,
168
+ );
169
+
170
+ return {
171
+ env,
172
+ changedModules: diff.changedModules.map((m) => m.name),
173
+ shellChanged: diff.shellChanged,
174
+ stylesChanged: diff.stylesChanged,
175
+ };
176
+ } finally {
177
+ tunnel.close();
178
+ }
179
+ }
180
+
181
+ // ---------------------------------------------------------------------------
182
+ // Helpers
183
+ // ---------------------------------------------------------------------------
184
+
185
+ function collectFiles(dir: string): string[] {
186
+ if (!existsSync(dir)) return [];
187
+ const { readdirSync } = require("fs") as typeof import("fs");
188
+ const out: string[] = [];
189
+ for (const entry of readdirSync(dir, { withFileTypes: true })) {
190
+ const p = join(dir, entry.name);
191
+ if (entry.isDirectory()) out.push(...collectFiles(p));
192
+ else if (entry.isFile()) out.push(p);
193
+ }
194
+ return out;
195
+ }
196
+
197
+ /** Deterministic per-env tunnel port offset so parallel syncs don't collide. */
198
+ function hashEnvToOffset(env: string): number {
199
+ let h = 0;
200
+ for (const ch of env) h = (h * 31 + ch.charCodeAt(0)) >>> 0;
201
+ return h % 100;
202
+ }